summaryrefslogtreecommitdiff
path: root/chromium/third_party/webrtc
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2016-07-14 17:41:05 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2016-08-04 12:37:36 +0000
commit399c965b6064c440ddcf4015f5f8e9d131c7a0a6 (patch)
tree6b06b60ff365abef0e13b3503d593a0df48d20e8 /chromium/third_party/webrtc
parent7366110654eec46f21b6824f302356426f48cd74 (diff)
downloadqtwebengine-chromium-399c965b6064c440ddcf4015f5f8e9d131c7a0a6.tar.gz
BASELINE: Update Chromium to 52.0.2743.76 and Ninja to 1.7.1
Change-Id: I382f51b959689505a60f8b707255ecb344f7d8b4 Reviewed-by: Michael BrĂ¼ning <michael.bruning@qt.io>
Diffstat (limited to 'chromium/third_party/webrtc')
-rw-r--r--chromium/third_party/webrtc/BUILD.gn44
-rw-r--r--chromium/third_party/webrtc/DEPS5
-rw-r--r--chromium/third_party/webrtc/OWNERS6
-rw-r--r--chromium/third_party/webrtc/api/BUILD.gn101
-rw-r--r--chromium/third_party/webrtc/api/androidvideocapturer.cc16
-rw-r--r--chromium/third_party/webrtc/api/api.gyp144
-rw-r--r--chromium/third_party/webrtc/api/api_tests.gyp72
-rw-r--r--chromium/third_party/webrtc/api/audiotrack.h2
-rw-r--r--chromium/third_party/webrtc/api/datachannel.cc7
-rw-r--r--chromium/third_party/webrtc/api/datachannel.h4
-rw-r--r--chromium/third_party/webrtc/api/datachannel_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/api/dtlsidentitystore.cc17
-rw-r--r--chromium/third_party/webrtc/api/dtlsidentitystore.h31
-rw-r--r--chromium/third_party/webrtc/api/dtlsidentitystore_unittest.cc16
-rw-r--r--chromium/third_party/webrtc/api/dtmfsender.h7
-rw-r--r--chromium/third_party/webrtc/api/dtmfsender_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/api/java/jni/OWNERS9
-rw-r--r--chromium/third_party/webrtc/api/java/jni/androidmediacodeccommon.h5
-rw-r--r--chromium/third_party/webrtc/api/java/jni/native_handle_impl.cc56
-rw-r--r--chromium/third_party/webrtc/api/java/jni/native_handle_impl.h7
-rw-r--r--chromium/third_party/webrtc/api/jsepsessiondescription.cc6
-rw-r--r--chromium/third_party/webrtc/api/jsepsessiondescription.h6
-rw-r--r--chromium/third_party/webrtc/api/jsepsessiondescription_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/api/localaudiosource.cc2
-rw-r--r--chromium/third_party/webrtc/api/localaudiosource.h1
-rw-r--r--chromium/third_party/webrtc/api/mediaconstraintsinterface.cc36
-rw-r--r--chromium/third_party/webrtc/api/mediaconstraintsinterface.h1
-rw-r--r--chromium/third_party/webrtc/api/mediaconstraintsinterface_unittest.cc17
-rw-r--r--chromium/third_party/webrtc/api/mediacontroller.cc5
-rw-r--r--chromium/third_party/webrtc/api/mediastream.h8
-rw-r--r--chromium/third_party/webrtc/api/mediastream_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/api/mediastreaminterface.h13
-rw-r--r--chromium/third_party/webrtc/api/mediastreamprovider.h35
-rw-r--r--chromium/third_party/webrtc/api/mediastreamproxy.h4
-rw-r--r--chromium/third_party/webrtc/api/mediastreamtrackproxy.h10
-rw-r--r--chromium/third_party/webrtc/api/objc/README3
-rw-r--r--chromium/third_party/webrtc/api/peerconnection.cc166
-rw-r--r--chromium/third_party/webrtc/api/peerconnection.h35
-rw-r--r--chromium/third_party/webrtc/api/peerconnection_unittest.cc150
-rw-r--r--chromium/third_party/webrtc/api/peerconnection_unittests.isolate30
-rw-r--r--chromium/third_party/webrtc/api/peerconnection_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/api/peerconnectionendtoend_unittest.cc27
-rw-r--r--chromium/third_party/webrtc/api/peerconnectionfactory.cc87
-rw-r--r--chromium/third_party/webrtc/api/peerconnectionfactory.h35
-rw-r--r--chromium/third_party/webrtc/api/peerconnectionfactory_unittest.cc29
-rw-r--r--chromium/third_party/webrtc/api/peerconnectionfactoryproxy.h40
-rw-r--r--chromium/third_party/webrtc/api/peerconnectioninterface.h112
-rw-r--r--chromium/third_party/webrtc/api/peerconnectioninterface_unittest.cc436
-rw-r--r--chromium/third_party/webrtc/api/peerconnectionproxy.h4
-rw-r--r--chromium/third_party/webrtc/api/proxy.h99
-rw-r--r--chromium/third_party/webrtc/api/proxy_unittest.cc171
-rw-r--r--chromium/third_party/webrtc/api/quicdatachannel.cc391
-rw-r--r--chromium/third_party/webrtc/api/quicdatachannel.h215
-rw-r--r--chromium/third_party/webrtc/api/quicdatachannel_unittest.cc659
-rw-r--r--chromium/third_party/webrtc/api/quicdatatransport.cc146
-rw-r--r--chromium/third_party/webrtc/api/quicdatatransport.h90
-rw-r--r--chromium/third_party/webrtc/api/quicdatatransport_unittest.cc356
-rw-r--r--chromium/third_party/webrtc/api/remoteaudiosource.cc4
-rw-r--r--chromium/third_party/webrtc/api/rtpparameters.h28
-rw-r--r--chromium/third_party/webrtc/api/rtpreceiver.cc28
-rw-r--r--chromium/third_party/webrtc/api/rtpreceiver.h6
-rw-r--r--chromium/third_party/webrtc/api/rtpreceiverinterface.h12
-rw-r--r--chromium/third_party/webrtc/api/rtpsender.cc43
-rw-r--r--chromium/third_party/webrtc/api/rtpsender.h12
-rw-r--r--chromium/third_party/webrtc/api/rtpsenderinterface.h4
-rw-r--r--chromium/third_party/webrtc/api/rtpsenderreceiver_unittest.cc139
-rw-r--r--chromium/third_party/webrtc/api/statscollector.cc66
-rw-r--r--chromium/third_party/webrtc/api/statscollector_unittest.cc278
-rw-r--r--chromium/third_party/webrtc/api/statstypes.cc2
-rw-r--r--chromium/third_party/webrtc/api/statstypes.h3
-rw-r--r--chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.cc19
-rw-r--r--chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.h21
-rw-r--r--chromium/third_party/webrtc/api/test/fakeaudiocapturemodule_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/api/test/fakedtlsidentitystore.h255
-rw-r--r--chromium/third_party/webrtc/api/test/fakevideotrackrenderer.h18
-rw-r--r--chromium/third_party/webrtc/api/test/fakevideotracksource.h1
-rw-r--r--chromium/third_party/webrtc/api/test/mockpeerconnectionobservers.h3
-rw-r--r--chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.cc27
-rw-r--r--chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.h10
-rw-r--r--chromium/third_party/webrtc/api/videocapturertracksource.cc9
-rw-r--r--chromium/third_party/webrtc/api/videocapturertracksource.h10
-rw-r--r--chromium/third_party/webrtc/api/videocapturertracksource_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/api/videosourceproxy.h31
-rw-r--r--chromium/third_party/webrtc/api/videotrack.cc13
-rw-r--r--chromium/third_party/webrtc/api/videotrack.h9
-rw-r--r--chromium/third_party/webrtc/api/videotrack_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/api/videotracksource.cc20
-rw-r--r--chromium/third_party/webrtc/api/videotracksource.h13
-rw-r--r--chromium/third_party/webrtc/api/webrtcsdp.cc113
-rw-r--r--chromium/third_party/webrtc/api/webrtcsdp_unittest.cc44
-rw-r--r--chromium/third_party/webrtc/api/webrtcsession.cc238
-rw-r--r--chromium/third_party/webrtc/api/webrtcsession.h83
-rw-r--r--chromium/third_party/webrtc/api/webrtcsession_unittest.cc387
-rw-r--r--chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.cc10
-rw-r--r--chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.h11
-rw-r--r--chromium/third_party/webrtc/audio/audio_receive_stream.cc87
-rw-r--r--chromium/third_party/webrtc/audio/audio_receive_stream.h16
-rw-r--r--chromium/third_party/webrtc/audio/audio_receive_stream_unittest.cc47
-rw-r--r--chromium/third_party/webrtc/audio/audio_send_stream.cc27
-rw-r--r--chromium/third_party/webrtc/audio/audio_send_stream.h9
-rw-r--r--chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/audio_receive_stream.h15
-rw-r--r--chromium/third_party/webrtc/audio_send_stream.h17
-rw-r--r--chromium/third_party/webrtc/base/BUILD.gn184
-rw-r--r--chromium/third_party/webrtc/base/DEPS1
-rw-r--r--chromium/third_party/webrtc/base/OWNERS2
-rw-r--r--chromium/third_party/webrtc/base/array_view.h2
-rw-r--r--chromium/third_party/webrtc/base/asyncinvoker.h1
-rw-r--r--chromium/third_party/webrtc/base/asyncpacketsocket.h1
-rw-r--r--chromium/third_party/webrtc/base/asynctcpsocket.cc5
-rw-r--r--chromium/third_party/webrtc/base/asynctcpsocket.h6
-rw-r--r--chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/base/asyncudpsocket.cc6
-rw-r--r--chromium/third_party/webrtc/base/asyncudpsocket.h5
-rw-r--r--chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/base/base.gyp97
-rw-r--r--chromium/third_party/webrtc/base/base_tests.gyp5
-rw-r--r--chromium/third_party/webrtc/base/buffer.cc37
-rw-r--r--chromium/third_party/webrtc/base/buffer.h258
-rw-r--r--chromium/third_party/webrtc/base/buffer_unittest.cc60
-rw-r--r--chromium/third_party/webrtc/base/bufferqueue.h1
-rw-r--r--chromium/third_party/webrtc/base/bytebuffer.cc37
-rw-r--r--chromium/third_party/webrtc/base/bytebuffer.h2
-rw-r--r--chromium/third_party/webrtc/base/bytebuffer_unittest.cc60
-rw-r--r--chromium/third_party/webrtc/base/callback_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/base/checks.cc16
-rw-r--r--chromium/third_party/webrtc/base/copyonwritebuffer.h49
-rw-r--r--chromium/third_party/webrtc/base/criticalsection_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/base/cryptstring.h12
-rw-r--r--chromium/third_party/webrtc/base/dbus_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/base/diskcache.cc6
-rw-r--r--chromium/third_party/webrtc/base/fakenetwork.h7
-rw-r--r--chromium/third_party/webrtc/base/fakesslidentity.h33
-rw-r--r--chromium/third_party/webrtc/base/filerotatingstream.h3
-rw-r--r--chromium/third_party/webrtc/base/filerotatingstream_unittest.cc28
-rw-r--r--chromium/third_party/webrtc/base/fileutils.cc2
-rw-r--r--chromium/third_party/webrtc/base/fileutils.h2
-rw-r--r--chromium/third_party/webrtc/base/fileutils_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/base/flags.h1
-rw-r--r--chromium/third_party/webrtc/base/gunit.h21
-rw-r--r--chromium/third_party/webrtc/base/helpers.cc10
-rw-r--r--chromium/third_party/webrtc/base/httpbase.cc6
-rw-r--r--chromium/third_party/webrtc/base/httpclient.cc15
-rw-r--r--chromium/third_party/webrtc/base/httpclient.h5
-rw-r--r--chromium/third_party/webrtc/base/httpcommon.h4
-rw-r--r--chromium/third_party/webrtc/base/httpserver.h4
-rw-r--r--chromium/third_party/webrtc/base/latebindingsymboltable.h1
-rw-r--r--chromium/third_party/webrtc/base/linux.h4
-rw-r--r--chromium/third_party/webrtc/base/logging.cc13
-rw-r--r--chromium/third_party/webrtc/base/logging.h13
-rw-r--r--chromium/third_party/webrtc/base/logging_mac.mm22
-rw-r--r--chromium/third_party/webrtc/base/logging_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/base/logsinks.h4
-rw-r--r--chromium/third_party/webrtc/base/macasyncsocket.h1
-rw-r--r--chromium/third_party/webrtc/base/maccocoasocketserver.h1
-rw-r--r--chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm1
-rw-r--r--chromium/third_party/webrtc/base/macifaddrs_converter.cc5
-rw-r--r--chromium/third_party/webrtc/base/macsocketserver_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/base/macutils.cc4
-rw-r--r--chromium/third_party/webrtc/base/messagedigest.cc23
-rw-r--r--chromium/third_party/webrtc/base/messagehandler.h10
-rw-r--r--chromium/third_party/webrtc/base/messagequeue.cc75
-rw-r--r--chromium/third_party/webrtc/base/messagequeue.h34
-rw-r--r--chromium/third_party/webrtc/base/messagequeue_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/base/multipart.h1
-rw-r--r--chromium/third_party/webrtc/base/multipart_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/base/nat_unittest.cc47
-rw-r--r--chromium/third_party/webrtc/base/natserver.cc4
-rw-r--r--chromium/third_party/webrtc/base/natserver.h1
-rw-r--r--chromium/third_party/webrtc/base/natsocketfactory.cc2
-rw-r--r--chromium/third_party/webrtc/base/natsocketfactory.h6
-rw-r--r--chromium/third_party/webrtc/base/nethelpers.cc4
-rw-r--r--chromium/third_party/webrtc/base/network.cc44
-rw-r--r--chromium/third_party/webrtc/base/network.h10
-rw-r--r--chromium/third_party/webrtc/base/network_unittest.cc32
-rw-r--r--chromium/third_party/webrtc/base/networkmonitor.h1
-rw-r--r--chromium/third_party/webrtc/base/networkroute.h4
-rw-r--r--chromium/third_party/webrtc/base/nullsocketserver.cc49
-rw-r--r--chromium/third_party/webrtc/base/nullsocketserver.h45
-rw-r--r--chromium/third_party/webrtc/base/nullsocketserver_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/base/objc/OWNERS1
-rw-r--r--chromium/third_party/webrtc/base/onetimeevent.h61
-rw-r--r--chromium/third_party/webrtc/base/onetimeevent_unittest.cc33
-rw-r--r--chromium/third_party/webrtc/base/opensslidentity.cc127
-rw-r--r--chromium/third_party/webrtc/base/opensslidentity.h25
-rw-r--r--chromium/third_party/webrtc/base/opensslstreamadapter.cc12
-rw-r--r--chromium/third_party/webrtc/base/opensslstreamadapter.h7
-rw-r--r--chromium/third_party/webrtc/base/optional.h116
-rw-r--r--chromium/third_party/webrtc/base/optional_unittest.cc111
-rw-r--r--chromium/third_party/webrtc/base/optionsfile_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/base/pathutils.cc12
-rw-r--r--chromium/third_party/webrtc/base/pathutils.h5
-rw-r--r--chromium/third_party/webrtc/base/physicalsocketserver.cc21
-rw-r--r--chromium/third_party/webrtc/base/physicalsocketserver.h4
-rw-r--r--chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/base/platform_thread.cc30
-rw-r--r--chromium/third_party/webrtc/base/platform_thread.h14
-rw-r--r--chromium/third_party/webrtc/base/platform_thread_unittest.cc42
-rw-r--r--chromium/third_party/webrtc/base/profiler.h1
-rw-r--r--chromium/third_party/webrtc/base/proxy_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/base/proxydetect.cc5
-rw-r--r--chromium/third_party/webrtc/base/proxyserver.h8
-rw-r--r--chromium/third_party/webrtc/base/rate_statistics.cc17
-rw-r--r--chromium/third_party/webrtc/base/rate_statistics.h5
-rw-r--r--chromium/third_party/webrtc/base/rate_statistics_unittest.cc69
-rw-r--r--chromium/third_party/webrtc/base/ratetracker.cc70
-rw-r--r--chromium/third_party/webrtc/base/ratetracker.h14
-rw-r--r--chromium/third_party/webrtc/base/ratetracker_unittest.cc32
-rw-r--r--chromium/third_party/webrtc/base/referencecountedsingletonfactory.h8
-rw-r--r--chromium/third_party/webrtc/base/rollingaccumulator.h1
-rw-r--r--chromium/third_party/webrtc/base/rtccertificate.cc24
-rw-r--r--chromium/third_party/webrtc/base/rtccertificate.h36
-rw-r--r--chromium/third_party/webrtc/base/rtccertificate_unittest.cc (renamed from chromium/third_party/webrtc/base/rtccertificate_unittests.cc)34
-rw-r--r--chromium/third_party/webrtc/base/rtccertificategenerator.cc158
-rw-r--r--chromium/third_party/webrtc/base/rtccertificategenerator.h69
-rw-r--r--chromium/third_party/webrtc/base/rtccertificategenerator_unittest.cc152
-rw-r--r--chromium/third_party/webrtc/base/scoped_autorelease_pool.h1
-rw-r--r--chromium/third_party/webrtc/base/scoped_ptr.h626
-rw-r--r--chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc33
-rw-r--r--chromium/third_party/webrtc/base/signalthread_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/base/socket_unittest.cc91
-rw-r--r--chromium/third_party/webrtc/base/socketadapters.h1
-rw-r--r--chromium/third_party/webrtc/base/socketserver.h2
-rw-r--r--chromium/third_party/webrtc/base/socketstream.h1
-rw-r--r--chromium/third_party/webrtc/base/ssladapter_unittest.cc13
-rw-r--r--chromium/third_party/webrtc/base/sslfingerprint.cc2
-rw-r--r--chromium/third_party/webrtc/base/sslfingerprint.h2
-rw-r--r--chromium/third_party/webrtc/base/sslidentity.cc8
-rw-r--r--chromium/third_party/webrtc/base/sslidentity.h30
-rw-r--r--chromium/third_party/webrtc/base/sslidentity_unittest.cc181
-rw-r--r--chromium/third_party/webrtc/base/sslsocketfactory.cc5
-rw-r--r--chromium/third_party/webrtc/base/sslstreamadapter.h7
-rw-r--r--chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/base/stream.h7
-rw-r--r--chromium/third_party/webrtc/base/swap_queue.h1
-rw-r--r--chromium/third_party/webrtc/base/task.h1
-rw-r--r--chromium/third_party/webrtc/base/task_queue.h277
-rw-r--r--chromium/third_party/webrtc/base/task_queue_gcd.cc167
-rw-r--r--chromium/third_party/webrtc/base/task_queue_libevent.cc318
-rw-r--r--chromium/third_party/webrtc/base/task_queue_posix.cc40
-rw-r--r--chromium/third_party/webrtc/base/task_queue_posix.h36
-rw-r--r--chromium/third_party/webrtc/base/task_queue_unittest.cc261
-rw-r--r--chromium/third_party/webrtc/base/task_queue_win.cc184
-rw-r--r--chromium/third_party/webrtc/base/task_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/base/taskparent.h5
-rw-r--r--chromium/third_party/webrtc/base/taskrunner.cc1
-rw-r--r--chromium/third_party/webrtc/base/testclient.h1
-rw-r--r--chromium/third_party/webrtc/base/testechoserver.h4
-rw-r--r--chromium/third_party/webrtc/base/testutils.h5
-rw-r--r--chromium/third_party/webrtc/base/thread.cc35
-rw-r--r--chromium/third_party/webrtc/base/thread.h13
-rw-r--r--chromium/third_party/webrtc/base/thread_checker_unittest.cc16
-rw-r--r--chromium/third_party/webrtc/base/thread_unittest.cc15
-rw-r--r--chromium/third_party/webrtc/base/timeutils.cc23
-rw-r--r--chromium/third_party/webrtc/base/timeutils.h44
-rw-r--r--chromium/third_party/webrtc/base/timeutils_unittest.cc77
-rw-r--r--chromium/third_party/webrtc/base/virtualsocket_unittest.cc33
-rw-r--r--chromium/third_party/webrtc/base/virtualsocketserver.cc47
-rw-r--r--chromium/third_party/webrtc/base/virtualsocketserver.h12
-rw-r--r--chromium/third_party/webrtc/base/win32filesystem.cc9
-rw-r--r--chromium/third_party/webrtc/base/win32regkey.cc9
-rw-r--r--chromium/third_party/webrtc/base/win32socketserver.cc4
-rw-r--r--chromium/third_party/webrtc/base/win32socketserver.h2
-rw-r--r--chromium/third_party/webrtc/base/windowpicker_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/base/x11windowpicker.cc1
-rw-r--r--chromium/third_party/webrtc/base/x11windowpicker.h5
-rw-r--r--chromium/third_party/webrtc/build/apk_tests.gyp75
-rw-r--r--chromium/third_party/webrtc/build/common.gypi38
-rw-r--r--[-rwxr-xr-x]chromium/third_party/webrtc/build/gyp_webrtc110
-rwxr-xr-x[-rw-r--r--]chromium/third_party/webrtc/build/gyp_webrtc.py128
-rw-r--r--chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC.xcodeproj/project.pbxproj910
-rw-r--r--chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC.xcodeproj/xcshareddata/xcschemes/WebRTC.xcscheme80
-rw-r--r--chromium/third_party/webrtc/build/ios/SDK/PodTest/Podfile2
-rw-r--r--chromium/third_party/webrtc/build/ios/SDK/README50
-rwxr-xr-xchromium/third_party/webrtc/build/ios/build_ios_framework.sh85
-rwxr-xr-xchromium/third_party/webrtc/build/ios/build_ios_libs.sh301
-rwxr-xr-xchromium/third_party/webrtc/build/ios/export_headers95
-rwxr-xr-xchromium/third_party/webrtc/build/ios/flatten_ios_headers46
-rwxr-xr-xchromium/third_party/webrtc/build/ios/generate_licenses.py145
-rw-r--r--chromium/third_party/webrtc/build/ios/merge_ios_libs.gyp4
-rwxr-xr-xchromium/third_party/webrtc/build/ios/merge_ios_libs.py (renamed from chromium/third_party/webrtc/build/ios/merge_ios_libs)53
-rw-r--r--chromium/third_party/webrtc/build/isolate.gypi1
-rw-r--r--chromium/third_party/webrtc/build/objc_common.gypi2
-rw-r--r--chromium/third_party/webrtc/build/protoc.gypi7
-rw-r--r--chromium/third_party/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc2
-rw-r--r--chromium/third_party/webrtc/build/webrtc.gni14
-rw-r--r--chromium/third_party/webrtc/call.h5
-rw-r--r--chromium/third_party/webrtc/call/bitrate_allocator.cc170
-rw-r--r--chromium/third_party/webrtc/call/bitrate_allocator.h70
-rw-r--r--chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc73
-rw-r--r--chromium/third_party/webrtc/call/bitrate_estimator_tests.cc3
-rw-r--r--chromium/third_party/webrtc/call/call.cc100
-rw-r--r--chromium/third_party/webrtc/call/call_perf_tests.cc114
-rw-r--r--chromium/third_party/webrtc/call/mock/mock_rtc_event_log.h7
-rw-r--r--chromium/third_party/webrtc/call/ringbuffer.h100
-rw-r--r--chromium/third_party/webrtc/call/ringbuffer_unittest.cc170
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log.cc383
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log.h52
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log2rtp_dump.cc157
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log_helper_thread.cc285
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log_helper_thread.h123
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log_parser.cc394
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log_parser.h114
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log_unittest.cc460
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log_unittest_helper.cc409
-rw-r--r--chromium/third_party/webrtc/call/rtc_event_log_unittest_helper.h58
-rw-r--r--chromium/third_party/webrtc/common.h1
-rw-r--r--chromium/third_party/webrtc/common_audio/audio_ring_buffer.h2
-rw-r--r--chromium/third_party/webrtc/common_audio/common_audio.gyp23
-rw-r--r--chromium/third_party/webrtc/common_audio/common_audio_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/common_audio/fir_filter.cc7
-rw-r--r--chromium/third_party/webrtc/common_audio/lapped_transform.cc3
-rw-r--r--chromium/third_party/webrtc/common_audio/real_fourier.cc6
-rw-r--r--chromium/third_party/webrtc/common_audio/real_fourier.h8
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc6
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h2
-rw-r--r--chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc20
-rw-r--r--chromium/third_party/webrtc/common_audio/ring_buffer.c14
-rw-r--r--chromium/third_party/webrtc/common_audio/ring_buffer.h11
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h22
-rw-r--r--chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c15
-rw-r--r--chromium/third_party/webrtc/common_audio/vad/vad.cc2
-rw-r--r--chromium/third_party/webrtc/common_types.h17
-rw-r--r--chromium/third_party/webrtc/common_video/BUILD.gn3
-rw-r--r--chromium/third_party/webrtc/common_video/DEPS1
-rw-r--r--chromium/third_party/webrtc/common_video/bitrate_adjuster.cc (renamed from chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster.cc)3
-rw-r--r--chromium/third_party/webrtc/common_video/bitrate_adjuster_unittest.cc (renamed from chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster_unittest.cc)4
-rw-r--r--chromium/third_party/webrtc/common_video/common_video.gyp3
-rw-r--r--chromium/third_party/webrtc/common_video/common_video_unittests.gyp24
-rw-r--r--chromium/third_party/webrtc/common_video/common_video_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/common_video/i420_buffer_pool.cc30
-rw-r--r--chromium/third_party/webrtc/common_video/i420_buffer_pool_unittest.cc26
-rw-r--r--chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc87
-rw-r--r--chromium/third_party/webrtc/common_video/include/bitrate_adjuster.h (renamed from chromium/third_party/webrtc/modules/video_coding/include/bitrate_adjuster.h)6
-rw-r--r--chromium/third_party/webrtc/common_video/include/frame_callback.h (renamed from chromium/third_party/webrtc/frame_callback.h)6
-rw-r--r--chromium/third_party/webrtc/common_video/include/incoming_video_stream.h41
-rw-r--r--chromium/third_party/webrtc/common_video/include/video_frame_buffer.h77
-rw-r--r--chromium/third_party/webrtc/common_video/incoming_video_stream.cc68
-rw-r--r--chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc56
-rw-r--r--chromium/third_party/webrtc/common_video/libyuv/scaler.cc55
-rw-r--r--chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/common_video/libyuv/webrtc_libyuv.cc186
-rw-r--r--chromium/third_party/webrtc/common_video/video_frame.cc94
-rw-r--r--chromium/third_party/webrtc/common_video/video_frame_buffer.cc231
-rw-r--r--chromium/third_party/webrtc/common_video/video_render_frames.cc6
-rw-r--r--chromium/third_party/webrtc/examples/DEPS1
-rw-r--r--chromium/third_party/webrtc/examples/OWNERS2
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/AndroidManifest.xml8
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/README6
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/drawable-hdpi/ic_add_white_24dp.pngbin0 -> 127 bytes
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/drawable-mdpi/ic_add_white_24dp.pngbin0 -> 88 bytes
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/drawable-xhdpi/ic_add_white_24dp.pngbin0 -> 97 bytes
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/drawable-xxhdpi/ic_add_white_24dp.pngbin0 -> 97 bytes
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/drawable-xxxhdpi/ic_add_white_24dp.pngbin0 -> 102 bytes
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/layout/activity_connect.xml124
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/layout/dialog_add_favorite.xml14
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/menu/connect_menu.xml10
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/values/arrays.xml4
-rw-r--r--chromium/third_party/webrtc/examples/androidapp/res/values/strings.xml14
-rw-r--r--chromium/third_party/webrtc/examples/androidjunit/README8
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient+Internal.h2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.h5
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.m62
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppEngineClient.m2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSDPUtils.m4
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.h5
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.m2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDStatsBuilder.m2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDWebSocketChannel.m2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.h2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.m2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICEServer+JSON.h2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/common/ARDUtilities.m2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m8
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.h8
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m50
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m92
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDStatsView.m2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h4
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h12
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m16
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m2
-rw-r--r--chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCViewController.m5
-rw-r--r--chromium/third_party/webrtc/examples/peerconnection/client/conductor.cc3
-rw-r--r--chromium/third_party/webrtc/examples/peerconnection/client/conductor.h1
-rw-r--r--chromium/third_party/webrtc/examples/peerconnection/client/linux/main_wnd.h9
-rw-r--r--chromium/third_party/webrtc/examples/peerconnection/client/main_wnd.h7
-rw-r--r--chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.h6
-rw-r--r--chromium/third_party/webrtc/examples/relayserver/relayserver_main.cc6
-rw-r--r--chromium/third_party/webrtc/libjingle/xmllite/xmlbuilder.h6
-rw-r--r--chromium/third_party/webrtc/libjingle/xmllite/xmlelement.h1
-rw-r--r--chromium/third_party/webrtc/libjingle/xmllite/xmlnsstack.h6
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/chatroommoduleimpl.cc7
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/discoitemsquerytask.cc1
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/fakexmppclient.h1
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient.h18
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/iqtask.h3
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/jingleinfotask.cc4
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/mucroomconfigtask.cc1
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/mucroomlookuptask.cc1
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/pingtask.cc7
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/pingtask.h4
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/pubsub_task.cc3
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/pubsubclient_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/pubsubstateclient.h7
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/pubsubtasks_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/rostermodule_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/rostermoduleimpl.h12
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmpp.gyp6
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmppclient.cc7
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmppclient.h4
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmppengine_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmppengineimpl.h14
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask.h8
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmpppump.cc2
-rw-r--r--chromium/third_party/webrtc/libjingle/xmpp/xmpptask.h5
-rw-r--r--chromium/third_party/webrtc/media/base/audioframe.h45
-rw-r--r--chromium/third_party/webrtc/media/base/codec.cc58
-rw-r--r--chromium/third_party/webrtc/media/base/codec.h35
-rw-r--r--chromium/third_party/webrtc/media/base/codec_unittest.cc160
-rw-r--r--chromium/third_party/webrtc/media/base/fakemediaengine.h112
-rw-r--r--chromium/third_party/webrtc/media/base/fakevideocapturer.h6
-rw-r--r--chromium/third_party/webrtc/media/base/fakevideorenderer.h16
-rw-r--r--chromium/third_party/webrtc/media/base/mediachannel.h43
-rw-r--r--chromium/third_party/webrtc/media/base/mediaconstants.cc2
-rw-r--r--chromium/third_party/webrtc/media/base/mediaengine.h12
-rw-r--r--chromium/third_party/webrtc/media/base/rtpdataengine.cc7
-rw-r--r--chromium/third_party/webrtc/media/base/rtpdump.cc26
-rw-r--r--chromium/third_party/webrtc/media/base/rtpdump.h13
-rw-r--r--chromium/third_party/webrtc/media/base/videoadapter.cc276
-rw-r--r--chromium/third_party/webrtc/media/base/videoadapter.h36
-rw-r--r--chromium/third_party/webrtc/media/base/videoadapter_unittest.cc592
-rw-r--r--chromium/third_party/webrtc/media/base/videobroadcaster.cc10
-rw-r--r--chromium/third_party/webrtc/media/base/videobroadcaster.h3
-rw-r--r--chromium/third_party/webrtc/media/base/videobroadcaster_unittest.cc16
-rw-r--r--chromium/third_party/webrtc/media/base/videocapturer.cc166
-rw-r--r--chromium/third_party/webrtc/media/base/videocapturer.h4
-rw-r--r--chromium/third_party/webrtc/media/base/videocapturer_unittest.cc32
-rw-r--r--chromium/third_party/webrtc/media/base/videocommon.cc147
-rw-r--r--chromium/third_party/webrtc/media/base/videocommon.h25
-rw-r--r--chromium/third_party/webrtc/media/base/videocommon_unittest.cc217
-rw-r--r--chromium/third_party/webrtc/media/base/videoengine_unittest.h119
-rw-r--r--chromium/third_party/webrtc/media/base/videoframe.cc60
-rw-r--r--chromium/third_party/webrtc/media/base/videoframe.h40
-rw-r--r--chromium/third_party/webrtc/media/base/videoframe_unittest.h210
-rw-r--r--chromium/third_party/webrtc/media/engine/fakewebrtccall.cc15
-rw-r--r--chromium/third_party/webrtc/media/engine/fakewebrtccall.h48
-rw-r--r--chromium/third_party/webrtc/media/engine/fakewebrtcvideocapturemodule.h13
-rw-r--r--chromium/third_party/webrtc/media/engine/fakewebrtcvideoengine.h50
-rw-r--r--chromium/third_party/webrtc/media/engine/fakewebrtcvoiceengine.h94
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideocapturer.cc2
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideocapturer.h10
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideoengine2.cc414
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideoengine2.h52
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideoengine2_unittest.cc425
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideoframe.cc128
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideoframe.h56
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideoframe_unittest.cc35
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvideoframefactory_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvoe.h11
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvoiceengine.cc506
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvoiceengine.h42
-rw-r--r--chromium/third_party/webrtc/media/engine/webrtcvoiceengine_unittest.cc581
-rw-r--r--chromium/third_party/webrtc/media/media.gyp20
-rw-r--r--chromium/third_party/webrtc/media/sctp/sctpdataengine.cc255
-rw-r--r--chromium/third_party/webrtc/media/sctp/sctpdataengine.h28
-rw-r--r--chromium/third_party/webrtc/media/sctp/sctpdataengine_unittest.cc113
-rw-r--r--chromium/third_party/webrtc/modules/audio_codec_speed_tests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/BUILD.gn41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc22
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc72
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc64
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h22
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc53
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi44
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory.h36
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory_unittest.cc127
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h75
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder_unittest.cc64
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.cc59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.h53
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.cc152
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h26
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc92
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.c48
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.h25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc329
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.c603
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.cc442
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h222
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc22
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h49
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc101
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc347
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h38
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc17
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc42
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.cc62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.h50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc61
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h28
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h18
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc24
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h31
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc43
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc127
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h53
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h17
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc19
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h20
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc20
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc18
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc88
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc36
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc75
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc46
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi21
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc205
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h57
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc489
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc23
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc763
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc23
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet.cc (renamed from chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_android.cc)16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h23
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc47
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.cc25
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.h110
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer_unittest.cc135
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc112
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.cc196
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.h48
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h29
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc110
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc17
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_decoder_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc12
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/build_info.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device.gypi5
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc17
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc5
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/audio_device.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h31
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm340
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm136
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h30
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h76
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm217
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m18
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm11
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm39
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm17
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc45
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc21
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/BUILD.gn46
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc725
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h192
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h89
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc336
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc206
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h79
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc219
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c)4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c)0
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.c)0
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c)0
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc56
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h54
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h71
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c)20
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c)11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.c)1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.c)13
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c)24
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi50
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc69
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_unittest.cc)9
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/debug.proto6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc72
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc32
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging.h108
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.cc57
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.h41
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.cc65
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.h129
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c13
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h14
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc78
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c)43
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc (renamed from chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c)19
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc1
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc30
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h24
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h19
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h6
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc8
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h3
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc157
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/congestion_controller_unittest.cc157
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h61
-rw-r--r--chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h15
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h10
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/differ.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc12
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc56
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h17
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc44
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h36
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.cc36
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h26
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm15
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc14
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h5
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc36
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc14
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer.cc22
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm2
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc1
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h1
-rw-r--r--chromium/third_party/webrtc/modules/include/module_common_types.h74
-rw-r--r--chromium/third_party/webrtc/modules/media_file/media_file_impl.cc1
-rw-r--r--chromium/third_party/webrtc/modules/modules.gyp127
-rw-r--r--chromium/third_party/webrtc/modules/modules_java.gyp9
-rw-r--r--chromium/third_party/webrtc/modules/modules_java_chromium.gyp8
-rw-r--r--chromium/third_party/webrtc/modules/modules_tests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/modules/modules_unittests.isolate13
-rw-r--r--chromium/third_party/webrtc/modules/modules_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc54
-rw-r--r--chromium/third_party/webrtc/modules/pacing/bitrate_prober.h18
-rw-r--r--chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h4
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender.cc67
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender.h45
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc488
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router.cc3
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router.h5
-rw-r--r--chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc55
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h14
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc40
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc8
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h4
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc37
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc6
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc76
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc10
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc4
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h1
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc7
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h3
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc17
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h5
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc19
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h4
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc91
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h41
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h28
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h22
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc29
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc8
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc60
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc29
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h19
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc53
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc63
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc36
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc64
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc20
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h6
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc203
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h75
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc509
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.h187
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h56
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h33
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc252
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc48
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc23
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h14
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc38
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h16
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc53
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h12
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc118
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h31
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc39
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc147
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc65
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc56
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc17
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc8
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc9
-rw-r--r--chromium/third_party/webrtc/modules/utility/OWNERS1
-rw-r--r--chromium/third_party/webrtc/modules/utility/include/file_recorder.h4
-rw-r--r--chromium/third_party/webrtc/modules/utility/include/jvm_android.h15
-rw-r--r--chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h6
-rw-r--r--chromium/third_party/webrtc/modules/utility/include/process_thread.h7
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/coder.cc145
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/coder.h60
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_player_impl.h1
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc3
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h23
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/jvm_android.cc16
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc14
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h2
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc73
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture.gypi1
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc52
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h14
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/BUILD.gn13
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/OWNERS3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codec_database.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc43
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc30
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc44
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc20
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc18
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc27
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h82
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc46
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h20
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp21
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc31
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc39
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.cc126
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.h72
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc154
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h83
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc329
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_object.cc45
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/frame_object.h40
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc228
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/generic_encoder.h102
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/include/video_coding.h42
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h49
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc22
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc29
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_opt_util.h9
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_optimization.cc160
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_optimization.h25
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc12
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module.cc64
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module.h15
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc87
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/packet_buffer.h53
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc1414
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/qm_select.cc953
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/qm_select.h356
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/qm_select_data.h227
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/qm_select_unittest.cc1307
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/receiver.cc35
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/receiver.h5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc60
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc486
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.h152
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/timing.h6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc14
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.cc196
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.h56
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer_unittest.cc176
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc124
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h20
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc212
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_coding.gypi11
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc61
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h67
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_receiver.cc158
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_sender.cc90
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc75
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/BUILD.gn7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/brightness_detection.cc136
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/brightness_detection.h35
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/content_analysis.cc281
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/content_analysis.h87
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/content_analysis_sse2.cc271
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/deflickering.cc402
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/deflickering.h55
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc53
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h17
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/include/video_processing.h44
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/brightness_detection_test.cc122
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/content_metrics_test.cc50
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/deflickering_test.cc100
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc58
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc143
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h13
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc72
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc112
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc94
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc48
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h22
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_decimator.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc524
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_denoiser.h49
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_processing.gypi7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc111
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h14
-rw-r--r--chromium/third_party/webrtc/modules/video_render/BUILD.gn178
-rw-r--r--chromium/third_party/webrtc/modules/video_render/OWNERS12
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.cc316
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.h154
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc450
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.h95
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.cc474
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.h83
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.cc397
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.h57
-rw-r--r--chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.cc195
-rw-r--r--chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.h128
-rw-r--r--chromium/third_party/webrtc/modules/video_render/i_video_render.h129
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/open_gles20.h64
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm330
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.h45
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm61
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.h87
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm285
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h105
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm170
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h34
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm163
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.cc261
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.h128
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.cc315
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.h96
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.cc153
-rw-r--r--chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.h58
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.h33
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm87
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.h32
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.mm55
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.cc1987
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.h178
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc280
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h146
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h141
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm253
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.h192
-rw-r--r--chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.mm1247
-rw-r--r--chromium/third_party/webrtc/modules/video_render/test/testAPI/renderStartImage.bmpbin304182 -> 0 bytes
-rw-r--r--chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.cc645
-rw-r--r--chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.h18
-rw-r--r--chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm69
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render.gypi218
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render.h268
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_defines.h70
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_impl.cc602
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_impl.h215
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_internal.h27
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_internal_impl.cc825
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/i_video_render_win.h110
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.cc1160
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.h256
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.cc337
-rw-r--r--chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.h137
-rw-r--r--chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket.h2
-rw-r--r--chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/p2p/base/basicpacketsocketfactory.cc1
-rw-r--r--chromium/third_party/webrtc/p2p/base/candidate.h16
-rw-r--r--chromium/third_party/webrtc/p2p/base/dtlstransport.h107
-rw-r--r--chromium/third_party/webrtc/p2p/base/dtlstransportchannel.cc44
-rw-r--r--chromium/third_party/webrtc/p2p/base/dtlstransportchannel.h13
-rw-r--r--chromium/third_party/webrtc/p2p/base/dtlstransportchannel_unittest.cc190
-rw-r--r--chromium/third_party/webrtc/p2p/base/fakeportallocator.h (renamed from chromium/third_party/webrtc/p2p/client/fakeportallocator.h)116
-rw-r--r--chromium/third_party/webrtc/p2p/base/faketransportcontroller.h104
-rw-r--r--chromium/third_party/webrtc/p2p/base/p2ptransport.h2
-rw-r--r--chromium/third_party/webrtc/p2p/base/p2ptransportchannel.cc71
-rw-r--r--chromium/third_party/webrtc/p2p/base/p2ptransportchannel.h11
-rw-r--r--chromium/third_party/webrtc/p2p/base/p2ptransportchannel_unittest.cc178
-rw-r--r--chromium/third_party/webrtc/p2p/base/packetsocketfactory.h1
-rw-r--r--chromium/third_party/webrtc/p2p/base/port.cc48
-rw-r--r--chromium/third_party/webrtc/p2p/base/port.h41
-rw-r--r--chromium/third_party/webrtc/p2p/base/port_unittest.cc204
-rw-r--r--chromium/third_party/webrtc/p2p/base/portallocator.cc76
-rw-r--r--chromium/third_party/webrtc/p2p/base/portallocator.h114
-rw-r--r--chromium/third_party/webrtc/p2p/base/portallocator_unittest.cc205
-rw-r--r--chromium/third_party/webrtc/p2p/base/pseudotcp.cc49
-rw-r--r--chromium/third_party/webrtc/p2p/base/pseudotcp_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/p2p/base/relayport.cc4
-rw-r--r--chromium/third_party/webrtc/p2p/base/relayport_unittest.cc14
-rw-r--r--chromium/third_party/webrtc/p2p/base/relayserver.cc4
-rw-r--r--chromium/third_party/webrtc/p2p/base/relayserver_unittest.cc61
-rw-r--r--chromium/third_party/webrtc/p2p/base/stun.cc5
-rw-r--r--chromium/third_party/webrtc/p2p/base/stun_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/p2p/base/stunport.cc10
-rw-r--r--chromium/third_party/webrtc/p2p/base/stunport.h3
-rw-r--r--chromium/third_party/webrtc/p2p/base/stunport_unittest.cc15
-rw-r--r--chromium/third_party/webrtc/p2p/base/stunrequest.cc8
-rw-r--r--chromium/third_party/webrtc/p2p/base/stunrequest_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/p2p/base/stunserver.h5
-rw-r--r--chromium/third_party/webrtc/p2p/base/stunserver_unittest.cc9
-rw-r--r--chromium/third_party/webrtc/p2p/base/tcpport.cc39
-rw-r--r--chromium/third_party/webrtc/p2p/base/tcpport.h4
-rw-r--r--chromium/third_party/webrtc/p2p/base/tcpport_unittest.cc87
-rw-r--r--chromium/third_party/webrtc/p2p/base/testrelayserver.h11
-rw-r--r--chromium/third_party/webrtc/p2p/base/transport.cc122
-rw-r--r--chromium/third_party/webrtc/p2p/base/transport.h23
-rw-r--r--chromium/third_party/webrtc/p2p/base/transport_unittest.cc189
-rw-r--r--chromium/third_party/webrtc/p2p/base/transportchannel.h4
-rw-r--r--chromium/third_party/webrtc/p2p/base/transportchannelimpl.h2
-rw-r--r--chromium/third_party/webrtc/p2p/base/transportcontroller.cc251
-rw-r--r--chromium/third_party/webrtc/p2p/base/transportcontroller.h74
-rw-r--r--chromium/third_party/webrtc/p2p/base/transportcontroller_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/p2p/base/transportdescription.h4
-rw-r--r--chromium/third_party/webrtc/p2p/base/transportdescriptionfactory.cc6
-rw-r--r--chromium/third_party/webrtc/p2p/base/transportdescriptionfactory_unittest.cc71
-rw-r--r--chromium/third_party/webrtc/p2p/base/turnport.cc72
-rw-r--r--chromium/third_party/webrtc/p2p/base/turnport.h11
-rw-r--r--chromium/third_party/webrtc/p2p/base/turnport_unittest.cc22
-rw-r--r--chromium/third_party/webrtc/p2p/base/turnserver.cc4
-rw-r--r--chromium/third_party/webrtc/p2p/base/turnserver.h7
-rw-r--r--chromium/third_party/webrtc/p2p/client/basicportallocator.cc165
-rw-r--r--chromium/third_party/webrtc/p2p/client/basicportallocator.h47
-rw-r--r--chromium/third_party/webrtc/p2p/client/basicportallocator_unittest.cc (renamed from chromium/third_party/webrtc/p2p/client/portallocator_unittest.cc)476
-rw-r--r--chromium/third_party/webrtc/p2p/p2p.gyp21
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quicconnectionhelper_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quicsession.cc49
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quicsession.h13
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quicsession_unittest.cc70
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quictransport.cc114
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quictransport.h64
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quictransport_unittest.cc160
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quictransportchannel.cc48
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quictransportchannel.h37
-rw-r--r--chromium/third_party/webrtc/p2p/quic/quictransportchannel_unittest.cc106
-rw-r--r--chromium/third_party/webrtc/p2p/quic/reliablequicstream.cc19
-rw-r--r--chromium/third_party/webrtc/p2p/quic/reliablequicstream.h13
-rw-r--r--chromium/third_party/webrtc/p2p/quic/reliablequicstream_unittest.cc28
-rw-r--r--chromium/third_party/webrtc/p2p/stunprober/main.cc7
-rw-r--r--chromium/third_party/webrtc/p2p/stunprober/stunprober.cc16
-rw-r--r--chromium/third_party/webrtc/p2p/stunprober/stunprober.h2
-rw-r--r--chromium/third_party/webrtc/p2p/stunprober/stunprober_unittest.cc15
-rw-r--r--chromium/third_party/webrtc/pc/audiomonitor.cc2
-rw-r--r--chromium/third_party/webrtc/pc/audiomonitor.h12
-rw-r--r--chromium/third_party/webrtc/pc/bundlefilter.h6
-rw-r--r--chromium/third_party/webrtc/pc/channel.cc629
-rw-r--r--chromium/third_party/webrtc/pc/channel.h295
-rw-r--r--chromium/third_party/webrtc/pc/channel_unittest.cc1914
-rw-r--r--chromium/third_party/webrtc/pc/channelmanager.cc73
-rw-r--r--chromium/third_party/webrtc/pc/channelmanager.h29
-rw-r--r--chromium/third_party/webrtc/pc/channelmanager_unittest.cc74
-rw-r--r--chromium/third_party/webrtc/pc/currentspeakermonitor.cc6
-rw-r--r--chromium/third_party/webrtc/pc/currentspeakermonitor.h16
-rw-r--r--chromium/third_party/webrtc/pc/externalhmac.h6
-rw-r--r--chromium/third_party/webrtc/pc/mediamonitor.cc2
-rw-r--r--chromium/third_party/webrtc/pc/mediamonitor.h6
-rw-r--r--chromium/third_party/webrtc/pc/mediasession.cc244
-rw-r--r--chromium/third_party/webrtc/pc/mediasession.h55
-rw-r--r--chromium/third_party/webrtc/pc/mediasession_unittest.cc153
-rw-r--r--chromium/third_party/webrtc/pc/mediasink.h6
-rwxr-xr-xchromium/third_party/webrtc/pc/pc.gyp10
-rw-r--r--chromium/third_party/webrtc/pc/rtcpmuxfilter.h6
-rw-r--r--chromium/third_party/webrtc/pc/srtpfilter.cc8
-rw-r--r--chromium/third_party/webrtc/pc/srtpfilter.h21
-rw-r--r--chromium/third_party/webrtc/pc/srtpfilter_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/pc/voicechannel.h6
-rw-r--r--chromium/third_party/webrtc/pc/yuvscaler_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/rtc_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/sdk/BUILD.gn155
-rw-r--r--chromium/third_party/webrtc/sdk/DEPS (renamed from chromium/third_party/webrtc/modules/video_render/DEPS)5
-rw-r--r--chromium/third_party/webrtc/sdk/OWNERS (renamed from chromium/third_party/webrtc/api/objc/OWNERS)0
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/NSString+StdString.h (renamed from chromium/third_party/webrtc/base/objc/NSString+StdString.h)0
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/NSString+StdString.mm (renamed from chromium/third_party/webrtc/base/objc/NSString+StdString.mm)0
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource+Private.h)4
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource.mm (renamed from chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource.mm)26
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCAudioTrack+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack.mm (renamed from chromium/third_party/webrtc/api/objc/RTCAudioTrack.mm)9
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCCameraPreviewView.m (renamed from chromium/third_party/webrtc/base/objc/RTCCameraPreviewView.m)8
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCConfiguration+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCConfiguration+Private.h)16
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCConfiguration.mm (renamed from chromium/third_party/webrtc/api/objc/RTCConfiguration.mm)107
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannel+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCDataChannel+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannel.mm (renamed from chromium/third_party/webrtc/api/objc/RTCDataChannel.mm)29
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration.mm (renamed from chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration.mm)5
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDispatcher+Private.h (renamed from chromium/third_party/webrtc/base/objc/RTCDispatcher+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDispatcher.m (renamed from chromium/third_party/webrtc/base/objc/RTCDispatcher.m)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m (renamed from chromium/third_party/webrtc/api/objc/RTCEAGLVideoView.m)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCFieldTrials.mm (renamed from chromium/third_party/webrtc/base/objc/RTCFieldTrials.mm)7
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCFileLogger.mm (renamed from chromium/third_party/webrtc/base/objc/RTCFileLogger.mm)11
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCIceCandidate+Private.h)7
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate.mm (renamed from chromium/third_party/webrtc/api/objc/RTCIceCandidate.mm)13
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceServer+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCIceServer+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceServer.mm (renamed from chromium/third_party/webrtc/api/objc/RTCIceServer.mm)5
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCLogging.mm (renamed from chromium/third_party/webrtc/base/objc/RTCLogging.mm)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCMediaConstraints+Private.h)7
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints.mm (renamed from chromium/third_party/webrtc/api/objc/RTCMediaConstraints.mm)11
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStream+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCMediaStream+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStream.mm (renamed from chromium/third_party/webrtc/api/objc/RTCMediaStream.mm)13
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack+Private.h)8
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack.mm (renamed from chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack.mm)47
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m (renamed from chromium/third_party/webrtc/api/objc/RTCNSGLVideoView.m)9
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h (renamed from chromium/third_party/webrtc/api/objc/RTCOpenGLVideoRenderer.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm (renamed from chromium/third_party/webrtc/api/objc/RTCOpenGLVideoRenderer.mm)11
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+DataChannel.mm (renamed from chromium/third_party/webrtc/api/objc/RTCPeerConnection+DataChannel.mm)8
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCPeerConnection+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Stats.mm (renamed from chromium/third_party/webrtc/api/objc/RTCPeerConnection+Stats.mm)10
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection.mm (renamed from chromium/third_party/webrtc/api/objc/RTCPeerConnection.mm)90
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm (renamed from chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory.mm)41
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters+Private.h28
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters.mm65
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters+Private.h28
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters.mm46
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters+Private.h28
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters.mm56
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpReceiver+Private.h29
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpReceiver.mm88
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpSender+Private.h29
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpSender.mm94
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSSLAdapter.mm (renamed from chromium/third_party/webrtc/base/objc/RTCSSLAdapter.mm)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCSessionDescription+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription.mm (renamed from chromium/third_party/webrtc/api/objc/RTCSessionDescription.mm)9
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCStatsReport+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCStatsReport+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCStatsReport.mm (renamed from chromium/third_party/webrtc/api/objc/RTCStatsReport.mm)9
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCTracing.mm (renamed from chromium/third_party/webrtc/base/objc/RTCTracing.mm)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.h (renamed from chromium/third_party/webrtc/base/objc/RTCUIApplication.h)4
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.mm (renamed from chromium/third_party/webrtc/base/objc/RTCUIApplication.mm)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoFrame+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm (renamed from chromium/third_party/webrtc/api/objc/RTCVideoFrame.mm)12
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter+Private.h)4
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter.h)0
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm (renamed from chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter.mm)37
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoSource+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoSource+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoSource.mm (renamed from chromium/third_party/webrtc/api/objc/RTCVideoSource.mm)4
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack+Private.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoTrack+Private.h)2
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack.mm (renamed from chromium/third_party/webrtc/api/objc/RTCVideoTrack.mm)13
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h (renamed from chromium/third_party/webrtc/api/objc/avfoundationvideocapturer.h)5
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm (renamed from chromium/third_party/webrtc/api/objc/avfoundationvideocapturer.mm)14
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h (renamed from chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource.h)4
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAudioTrack.h (renamed from chromium/third_party/webrtc/api/objc/RTCAudioTrack.h)4
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h (renamed from chromium/third_party/webrtc/base/objc/RTCCameraPreviewView.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCConfiguration.h (renamed from chromium/third_party/webrtc/api/objc/RTCConfiguration.h)11
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDataChannel.h (renamed from chromium/third_party/webrtc/api/objc/RTCDataChannel.h)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h (renamed from chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDispatcher.h (renamed from chromium/third_party/webrtc/base/objc/RTCDispatcher.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h (renamed from chromium/third_party/webrtc/api/objc/RTCEAGLVideoView.h)5
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFieldTrials.h (renamed from chromium/third_party/webrtc/base/objc/RTCFieldTrials.h)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFileLogger.h (renamed from chromium/third_party/webrtc/base/objc/RTCFileLogger.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIceCandidate.h (renamed from chromium/third_party/webrtc/api/objc/RTCIceCandidate.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIceServer.h (renamed from chromium/third_party/webrtc/api/objc/RTCIceServer.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCLogging.h (renamed from chromium/third_party/webrtc/base/objc/RTCLogging.h)16
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMacros.h (renamed from chromium/third_party/webrtc/base/objc/RTCMacros.h)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaConstraints.h (renamed from chromium/third_party/webrtc/api/objc/RTCMediaConstraints.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaStream.h (renamed from chromium/third_party/webrtc/api/objc/RTCMediaStream.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h (renamed from chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack.h)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h (renamed from chromium/third_party/webrtc/api/objc/RTCNSGLVideoView.h)8
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h (renamed from chromium/third_party/webrtc/api/objc/RTCPeerConnection.h)29
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h (renamed from chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h58
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h32
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpParameters.h32
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpReceiver.h50
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpSender.h46
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCSSLAdapter.h (renamed from chromium/third_party/webrtc/base/objc/RTCSSLAdapter.h)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCSessionDescription.h (renamed from chromium/third_party/webrtc/api/objc/RTCSessionDescription.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCStatsReport.h (renamed from chromium/third_party/webrtc/api/objc/RTCStatsReport.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCTracing.h (renamed from chromium/third_party/webrtc/base/objc/RTCTracing.h)10
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoFrame.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoRenderer.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoRenderer.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoSource.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoSource.h)3
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoTrack.h (renamed from chromium/third_party/webrtc/api/objc/RTCVideoTrack.h)5
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/WebRTC.h (renamed from chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC/WebRTC.h)19
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Info.plist (renamed from chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC/Info.plist)8
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/Modules/module.modulemap6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCConfigurationTest.mm (renamed from chromium/third_party/webrtc/api/objctests/RTCConfigurationTest.mm)35
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCDataChannelConfigurationTest.mm (renamed from chromium/third_party/webrtc/api/objctests/RTCDataChannelConfigurationTest.mm)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCIceCandidateTest.mm (renamed from chromium/third_party/webrtc/api/objctests/RTCIceCandidateTest.mm)10
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCIceServerTest.mm (renamed from chromium/third_party/webrtc/api/objctests/RTCIceServerTest.mm)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCMediaConstraintsTest.mm (renamed from chromium/third_party/webrtc/api/objctests/RTCMediaConstraintsTest.mm)10
-rw-r--r--chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCSessionDescriptionTest.mm (renamed from chromium/third_party/webrtc/api/objctests/RTCSessionDescriptionTest.mm)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/WebRTC-Prefix.pch (renamed from chromium/third_party/webrtc/build/WebRTC-Prefix.pch)6
-rw-r--r--chromium/third_party/webrtc/sdk/objc/WebRTC.podspec (renamed from chromium/third_party/webrtc/build/ios/SDK/WebRTC.podspec)2
-rw-r--r--chromium/third_party/webrtc/sdk/sdk.gyp274
-rw-r--r--chromium/third_party/webrtc/sdk/sdk.gypi26
-rw-r--r--chromium/third_party/webrtc/sdk/sdk_tests.gyp42
-rw-r--r--chromium/third_party/webrtc/stream.h56
-rw-r--r--chromium/third_party/webrtc/supplement.gypi8
-rw-r--r--chromium/third_party/webrtc/system_wrappers/BUILD.gn2
-rw-r--r--chromium/third_party/webrtc/system_wrappers/include/aligned_malloc.h4
-rw-r--r--chromium/third_party/webrtc/system_wrappers/include/clock.h5
-rw-r--r--chromium/third_party/webrtc/system_wrappers/include/data_log_impl.h8
-rw-r--r--chromium/third_party/webrtc/system_wrappers/include/metrics.h14
-rw-r--r--chromium/third_party/webrtc/system_wrappers/include/tick_util.h190
-rw-r--r--chromium/third_party/webrtc/system_wrappers/include/utf_util_win.h7
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/clock.cc6
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/condition_variable_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/event_timer_posix.h8
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/file_impl.h5
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/logging_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc5
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc40
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/tick_util.cc42
-rw-r--r--chromium/third_party/webrtc/system_wrappers/source/trace_impl.h5
-rw-r--r--chromium/third_party/webrtc/system_wrappers/system_wrappers.gyp2
-rw-r--r--chromium/third_party/webrtc/system_wrappers/system_wrappers_tests.gyp25
-rw-r--r--chromium/third_party/webrtc/system_wrappers/system_wrappers_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/system_wrappers/test/TestSort/TestSort.cc10
-rw-r--r--chromium/third_party/webrtc/test/call_test.cc29
-rw-r--r--chromium/third_party/webrtc/test/call_test.h25
-rw-r--r--chromium/third_party/webrtc/test/channel_transport/udp_transport_impl.cc25
-rw-r--r--chromium/third_party/webrtc/test/configurable_frame_size_encoder.h4
-rw-r--r--chromium/third_party/webrtc/test/direct_transport.h1
-rw-r--r--chromium/third_party/webrtc/test/fake_audio_device.h8
-rw-r--r--chromium/third_party/webrtc/test/fake_network_pipe.h4
-rw-r--r--chromium/third_party/webrtc/test/fake_network_pipe_unittest.cc21
-rw-r--r--chromium/third_party/webrtc/test/fake_texture_frame.h6
-rw-r--r--chromium/third_party/webrtc/test/frame_generator.cc31
-rw-r--r--chromium/third_party/webrtc/test/frame_generator_capturer.cc6
-rw-r--r--chromium/third_party/webrtc/test/frame_generator_capturer.h9
-rw-r--r--chromium/third_party/webrtc/test/frame_generator_unittest.cc23
-rw-r--r--chromium/third_party/webrtc/test/frame_utils.cc59
-rw-r--r--chromium/third_party/webrtc/test/frame_utils.h11
-rw-r--r--chromium/third_party/webrtc/test/fuzzers/BUILD.gn9
-rw-r--r--chromium/third_party/webrtc/test/fuzzers/producer_fec_fuzzer.cc13
-rw-r--r--chromium/third_party/webrtc/test/fuzzers/rtcp_receiver_fuzzer.cc1
-rw-r--r--chromium/third_party/webrtc/test/fuzzers/rtp_packet_fuzzer.cc29
-rw-r--r--chromium/third_party/webrtc/test/layer_filtering_transport.cc4
-rw-r--r--chromium/third_party/webrtc/test/mock_voe_channel_proxy.h6
-rw-r--r--chromium/third_party/webrtc/test/mock_voice_engine.h9
-rw-r--r--chromium/third_party/webrtc/test/rtp_file_reader.cc4
-rw-r--r--chromium/third_party/webrtc/test/rtp_file_reader_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/test/rtp_file_writer_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/test/rtp_rtcp_observer.h3
-rw-r--r--chromium/third_party/webrtc/test/test.gyp28
-rw-r--r--chromium/third_party/webrtc/test/test_suite.h5
-rw-r--r--chromium/third_party/webrtc/test/test_support_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/test/testsupport/fileutils.cc5
-rw-r--r--chromium/third_party/webrtc/tools/BUILD.gn3
-rw-r--r--chromium/third_party/webrtc/tools/agc/activity_metric.cc10
-rw-r--r--chromium/third_party/webrtc/tools/agc/agc_harness.cc9
-rw-r--r--chromium/third_party/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc5
-rw-r--r--chromium/third_party/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc1
-rw-r--r--chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc4
-rw-r--r--chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/tools/tools.gyp21
-rw-r--r--chromium/third_party/webrtc/tools/tools_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/typedefs.h3
-rw-r--r--chromium/third_party/webrtc/video/BUILD.gn11
-rw-r--r--chromium/third_party/webrtc/video/DEPS2
-rw-r--r--chromium/third_party/webrtc/video/call_stats.cc2
-rw-r--r--chromium/third_party/webrtc/video/call_stats_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/video/encoded_frame_callback_adapter.cc4
-rw-r--r--chromium/third_party/webrtc/video/encoded_frame_callback_adapter.h4
-rw-r--r--chromium/third_party/webrtc/video/encoder_state_feedback.cc65
-rw-r--r--chromium/third_party/webrtc/video/encoder_state_feedback.h31
-rw-r--r--chromium/third_party/webrtc/video/encoder_state_feedback_unittest.cc84
-rw-r--r--chromium/third_party/webrtc/video/end_to_end_tests.cc302
-rw-r--r--chromium/third_party/webrtc/video/full_stack.cc4
-rw-r--r--chromium/third_party/webrtc/video/overuse_frame_detector.cc10
-rw-r--r--chromium/third_party/webrtc/video/overuse_frame_detector_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/video/payload_router.cc155
-rw-r--r--chromium/third_party/webrtc/video/payload_router.h44
-rw-r--r--chromium/third_party/webrtc/video/payload_router_unittest.cc212
-rw-r--r--chromium/third_party/webrtc/video/receive_statistics_proxy.cc29
-rw-r--r--chromium/third_party/webrtc/video/receive_statistics_proxy.h7
-rw-r--r--chromium/third_party/webrtc/video/rtp_stream_receiver.cc542
-rw-r--r--chromium/third_party/webrtc/video/rtp_stream_receiver.h (renamed from chromium/third_party/webrtc/video/vie_receiver.h)95
-rw-r--r--chromium/third_party/webrtc/video/screenshare_loopback.cc6
-rw-r--r--chromium/third_party/webrtc/video/send_delay_stats.cc118
-rw-r--r--chromium/third_party/webrtc/video/send_delay_stats.h93
-rw-r--r--chromium/third_party/webrtc/video/send_delay_stats_unittest.cc122
-rw-r--r--chromium/third_party/webrtc/video/send_statistics_proxy.cc85
-rw-r--r--chromium/third_party/webrtc/video/send_statistics_proxy.h10
-rw-r--r--chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc168
-rw-r--r--chromium/third_party/webrtc/video/video_capture_input.cc12
-rw-r--r--chromium/third_party/webrtc/video/video_capture_input.h3
-rw-r--r--chromium/third_party/webrtc/video/video_capture_input_unittest.cc74
-rw-r--r--chromium/third_party/webrtc/video/video_decoder.cc1
-rw-r--r--chromium/third_party/webrtc/video/video_encoder.cc7
-rw-r--r--chromium/third_party/webrtc/video/video_encoder_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/video/video_loopback.cc6
-rw-r--r--chromium/third_party/webrtc/video/video_quality_test.cc75
-rw-r--r--chromium/third_party/webrtc/video/video_quality_test.h1
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream.cc236
-rw-r--r--chromium/third_party/webrtc/video/video_receive_stream.h41
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream.cc663
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream.h66
-rw-r--r--chromium/third_party/webrtc/video/video_send_stream_tests.cc154
-rw-r--r--chromium/third_party/webrtc/video/video_stream_decoder.cc139
-rw-r--r--chromium/third_party/webrtc/video/video_stream_decoder.h111
-rw-r--r--chromium/third_party/webrtc/video/vie_channel.cc515
-rw-r--r--chromium/third_party/webrtc/video/vie_channel.h311
-rw-r--r--chromium/third_party/webrtc/video/vie_encoder.cc295
-rw-r--r--chromium/third_party/webrtc/video/vie_encoder.h80
-rw-r--r--chromium/third_party/webrtc/video/vie_receiver.cc389
-rw-r--r--chromium/third_party/webrtc/video/vie_remb.cc1
-rw-r--r--chromium/third_party/webrtc/video/vie_remb_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/video/vie_sync_module.cc29
-rw-r--r--chromium/third_party/webrtc/video/vie_sync_module.h16
-rw-r--r--chromium/third_party/webrtc/video/webrtc_video.gypi11
-rw-r--r--chromium/third_party/webrtc/video_encoder.h3
-rw-r--r--chromium/third_party/webrtc/video_engine_tests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/video_frame.h26
-rw-r--r--chromium/third_party/webrtc/video_receive_stream.h17
-rw-r--r--chromium/third_party/webrtc/video_send_stream.h18
-rw-r--r--chromium/third_party/webrtc/voice_engine/channel.cc52
-rw-r--r--chromium/third_party/webrtc/voice_engine/channel.h10
-rw-r--r--chromium/third_party/webrtc/voice_engine/channel_manager.cc2
-rw-r--r--chromium/third_party/webrtc/voice_engine/channel_proxy.cc23
-rw-r--r--chromium/third_party/webrtc/voice_engine/channel_proxy.h9
-rw-r--r--chromium/third_party/webrtc/voice_engine/monitor_module.cc8
-rw-r--r--chromium/third_party/webrtc/voice_engine/shared_data.cc2
-rw-r--r--chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc2
-rw-r--r--chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h4
-rw-r--r--chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.cc7
-rw-r--r--chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.h6
-rw-r--r--chromium/third_party/webrtc/voice_engine/test/auto_test/voe_conference_test.cc6
-rw-r--r--chromium/third_party/webrtc/voice_engine/test/auto_test/voe_output_test.cc2
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_base_impl.cc7
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc6
-rw-r--r--chromium/third_party/webrtc/voice_engine/voe_network_impl.cc9
-rw-r--r--chromium/third_party/webrtc/voice_engine/voice_engine.gyp21
-rw-r--r--chromium/third_party/webrtc/voice_engine/voice_engine_unittests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/webrtc.gyp43
-rwxr-xr-xchromium/third_party/webrtc/webrtc_examples.gyp40
-rw-r--r--chromium/third_party/webrtc/webrtc_nonparallel_tests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/webrtc_perf_tests_apk.isolate26
-rw-r--r--chromium/third_party/webrtc/webrtc_tests.gypi75
1493 files changed, 40677 insertions, 44538 deletions
diff --git a/chromium/third_party/webrtc/BUILD.gn b/chromium/third_party/webrtc/BUILD.gn
index 9f4fd5c7a66..01fa042af36 100644
--- a/chromium/third_party/webrtc/BUILD.gn
+++ b/chromium/third_party/webrtc/BUILD.gn
@@ -114,10 +114,18 @@ config("common_config") {
}
if (is_clang) {
- cflags += [ "-Wthread-safety" ]
+ cflags += [
+ "-Wimplicit-fallthrough",
+ "-Wthread-safety",
+ "-Winconsistent-missing-override",
+ ]
}
}
+ if (rtc_enable_libevent) {
+ defines += [ "WEBRTC_BUILD_LIBEVENT" ]
+ }
+
if (current_cpu == "arm64") {
defines += [ "WEBRTC_ARCH_ARM64" ]
defines += [ "WEBRTC_HAS_NEON" ]
@@ -129,8 +137,6 @@ config("common_config") {
defines += [ "WEBRTC_ARCH_ARM_V7" ]
if (arm_use_neon) {
defines += [ "WEBRTC_HAS_NEON" ]
- } else if (arm_optionally_use_neon) {
- defines += [ "WEBRTC_DETECT_NEON" ]
}
}
}
@@ -169,7 +175,6 @@ source_set("webrtc") {
sources = [
"call.h",
"config.h",
- "frame_callback.h",
"transport.h",
]
@@ -202,10 +207,7 @@ source_set("webrtc") {
]
if (build_with_chromium) {
- deps += [
- "modules/video_capture",
- "modules/video_render",
- ]
+ deps += [ "modules/video_capture" ]
}
if (rtc_enable_protobuf) {
@@ -220,7 +222,6 @@ if (!build_with_chromium) {
deps = [
":webrtc",
"modules/video_capture:video_capture_internal_impl",
- "modules/video_render:video_render_internal_impl",
"test",
]
}
@@ -260,6 +261,8 @@ source_set("rtc_event_log") {
sources = [
"call/rtc_event_log.cc",
"call/rtc_event_log.h",
+ "call/rtc_event_log_helper_thread.cc",
+ "call/rtc_event_log_helper_thread.h",
]
defines = []
@@ -281,6 +284,29 @@ source_set("rtc_event_log") {
}
}
+if (rtc_enable_protobuf) {
+ source_set("rtc_event_log_parser") {
+ sources = [
+ "call/rtc_event_log_parser.cc",
+ "call/rtc_event_log_parser.h",
+ ]
+
+ configs += [ ":common_config" ]
+ public_configs = [ ":common_inherited_config" ]
+
+ deps = [
+ ":rtc_event_log_proto",
+ ":webrtc_common",
+ ]
+
+ if (is_clang && !is_nacl) {
+ # Suppress warnings from Chrome's Clang plugins.
+ # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+ configs -= [ "//build/config/clang:find_bad_constructs" ]
+ }
+ }
+}
+
if (use_libfuzzer || use_drfuzz) {
# This target is only here for gn to discover fuzzer build targets under
# webrtc/test/fuzzers/.
diff --git a/chromium/third_party/webrtc/DEPS b/chromium/third_party/webrtc/DEPS
index 7f15817e370..40ec7a3d622 100644
--- a/chromium/third_party/webrtc/DEPS
+++ b/chromium/third_party/webrtc/DEPS
@@ -19,8 +19,6 @@ include_rules = [
"+webrtc/common_types.h",
"+webrtc/config.h",
"+webrtc/engine_configurations.h",
- "+webrtc/frame_callback.h",
- "+webrtc/stream.h",
"+webrtc/transport.h",
"+webrtc/typedefs.h",
"+webrtc/video_decoder.h",
@@ -30,6 +28,7 @@ include_rules = [
"+webrtc/video_renderer.h",
"+webrtc/video_send_stream.h",
+ "+WebRTC",
"+webrtc/base",
"+webrtc/modules/include",
"+webrtc/test",
@@ -45,9 +44,11 @@ specific_include_rules = {
"+webrtc/common_video",
],
"video_receive_stream\.h": [
+ "+webrtc/common_video/include",
"+webrtc/media/base",
],
"video_send_stream\.h": [
+ "+webrtc/common_video/include",
"+webrtc/media/base",
],
}
diff --git a/chromium/third_party/webrtc/OWNERS b/chromium/third_party/webrtc/OWNERS
index 057b5636e86..613031f5cd4 100644
--- a/chromium/third_party/webrtc/OWNERS
+++ b/chromium/third_party/webrtc/OWNERS
@@ -1,3 +1,9 @@
+henrika@webrtc.org
+mflodman@webrtc.org
+niklas.enbom@webrtc.org
+tina.legrand@webrtc.org
+tommi@webrtc.org
+
per-file *.isolate=kjellander@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
diff --git a/chromium/third_party/webrtc/api/BUILD.gn b/chromium/third_party/webrtc/api/BUILD.gn
index 784c048905d..6dc52174ece 100644
--- a/chromium/third_party/webrtc/api/BUILD.gn
+++ b/chromium/third_party/webrtc/api/BUILD.gn
@@ -7,104 +7,3 @@
# be found in the AUTHORS file in the root of the source tree.
import("../build/webrtc.gni")
-
-config("ios_config") {
- libs = [
- "CoreGraphics.framework",
- "GLKit.framework",
- "OpenGLES.framework",
- "QuartzCore.framework",
- ]
-}
-
-if (is_ios) {
- source_set("rtc_api_objc") {
- deps = [
- "../base:rtc_base_objc",
- #"../../talk/libjingle:libjingle_peerconnection",
- ]
- cflags = [
- "-fobjc-arc",
- "-Wobjc-missing-property-synthesis",
- ]
- sources = [
- # Add these when there's a BUILD.gn for peer connection APIs
- #"objc/RTCAVFoundationVideoSource+Private.h",
- #"objc/RTCAVFoundationVideoSource.h",
- #"objc/RTCAVFoundationVideoSource.mm",
- #"objc/RTCAudioTrack+Private.h",
- #"objc/RTCAudioTrack.h",
- #"objc/RTCAudioTrack.mm",
- #"objc/RTCConfiguration+Private.h",
- #"objc/RTCConfiguration.h",
- #"objc/RTCConfiguration.mm",
- #"objc/RTCDataChannel+Private.h",
- #"objc/RTCDataChannel.h",
- #"objc/RTCDataChannel.mm",
- #"objc/RTCDataChannelConfiguration+Private.h",
- #"objc/RTCDataChannelConfiguration.h",
- #"objc/RTCDataChannelConfiguration.mm",
- #"objc/RTCIceCandidate+Private.h",
- #"objc/RTCIceCandidate.h",
- #"objc/RTCIceCandidate.mm",
- #"objc/RTCMediaStream+Private.h",
- #"objc/RTCMediaStream.h",
- #"objc/RTCMediaStream.mm",
- #"objc/RTCMediaStreamTrack+Private.h",
- #"objc/RTCMediaStreamTrack.h",
- #"objc/RTCMediaStreamTrack.mm",
- #"objc/RTCPeerConnection+DataChannel.h",
- #"objc/RTCPeerConnection+Private.h",
- #"objc/RTCPeerConnection+Stats.h",
- #"objc/RTCPeerConnection.h",
- #"objc/RTCPeerConnection.mm",
- #"objc/RTCPeerConnectionFactory+Private.h",
- #"objc/RTCPeerConnectionFactory.h",
- #"objc/RTCPeerConnectionFactory.mm",
- #"objc/RTCVideoSource+Private.h",
- #"objc/RTCVideoSource.h",
- #"objc/RTCVideoSource.mm",
- #"objc/RTCVideoTrack+Private.h",
- #"objc/RTCVideoTrack.h",
- #"objc/RTCVideoTrack.mm",
- "objc/RTCIceServer+Private.h",
- "objc/RTCIceServer.h",
- "objc/RTCIceServer.mm",
- "objc/RTCMediaConstraints+Private.h",
- "objc/RTCMediaConstraints.h",
- "objc/RTCMediaConstraints.mm",
- "objc/RTCOpenGLVideoRenderer.h",
- "objc/RTCOpenGLVideoRenderer.mm",
- "objc/RTCSessionDescription+Private.h",
- "objc/RTCSessionDescription.h",
- "objc/RTCSessionDescription.mm",
- "objc/RTCStatsReport+Private.h",
- "objc/RTCStatsReport.h",
- "objc/RTCStatsReport.mm",
- "objc/RTCVideoFrame+Private.h",
- "objc/RTCVideoFrame.h",
- "objc/RTCVideoFrame.mm",
- "objc/RTCVideoRenderer.h",
- "objc/RTCVideoRendererAdapter+Private.h",
- "objc/RTCVideoRendererAdapter.h",
- "objc/RTCVideoRendererAdapter.mm",
- "objc/WebRTC-Prefix.pch",
- "objc/avfoundationvideocapturer.h",
- "objc/avfoundationvideocapturer.mm",
- ]
-
- if (is_ios) {
- sources += [
- "objc/RTCEAGLVideoView.h",
- "objc/RTCEAGLVideoView.m",
- ]
- }
-
- if (is_mac) {
- sources += [
- "objc/RTCNSGLVideoView.h",
- "objc/RTCNSGLVideoView.m",
- ]
- }
- }
-}
diff --git a/chromium/third_party/webrtc/api/androidvideocapturer.cc b/chromium/third_party/webrtc/api/androidvideocapturer.cc
index e98a4be573d..71a94fedb06 100644
--- a/chromium/third_party/webrtc/api/androidvideocapturer.cc
+++ b/chromium/third_party/webrtc/api/androidvideocapturer.cc
@@ -10,6 +10,8 @@
#include "webrtc/api/androidvideocapturer.h"
+#include <memory>
+
#include "webrtc/api/java/jni/native_handle_impl.h"
#include "webrtc/base/common.h"
#include "webrtc/base/timeutils.h"
@@ -70,11 +72,11 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
RTC_CHECK(captured_frame == &captured_frame_);
RTC_CHECK(buffer_->native_handle() == nullptr);
- rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
+ std::unique_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
ShallowCenterCrop(buffer_, dst_width, dst_height),
captured_frame->time_stamp, captured_frame->rotation));
// Caller takes ownership.
- // TODO(magjed): Change CreateAliasedFrame() to return a rtc::scoped_ptr.
+ // TODO(magjed): Change CreateAliasedFrame() to return a std::unique_ptr.
return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy()
: frame.release();
}
@@ -86,14 +88,12 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
int output_width,
int output_height) const override {
if (buffer_->native_handle() != nullptr) {
- // TODO(perkj) Implement cropping.
- RTC_CHECK_EQ(cropped_input_width, buffer_->width());
- RTC_CHECK_EQ(cropped_input_height, buffer_->height());
rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
static_cast<webrtc_jni::AndroidTextureBuffer*>(buffer_.get())
- ->ScaleAndRotate(output_width, output_height,
- apply_rotation_ ? input_frame->rotation :
- webrtc::kVideoRotation_0));
+ ->CropScaleAndRotate(cropped_input_width, cropped_input_height,
+ output_width, output_height,
+ apply_rotation_ ? input_frame->rotation
+ : webrtc::kVideoRotation_0));
return new cricket::WebRtcVideoFrame(
scaled_buffer, input_frame->time_stamp,
apply_rotation_ ? webrtc::kVideoRotation_0 : input_frame->rotation);
diff --git a/chromium/third_party/webrtc/api/api.gyp b/chromium/third_party/webrtc/api/api.gyp
index aa6ef38e5ef..377ef8f70be 100644
--- a/chromium/third_party/webrtc/api/api.gyp
+++ b/chromium/third_party/webrtc/api/api.gyp
@@ -66,7 +66,6 @@
'-Wextra',
],
'cflags_cc!': [
- '-Wnon-virtual-dtor',
'-Woverloaded-virtual',
],
'msvs_disabled_warnings': [
@@ -115,139 +114,13 @@
'java/android',
'<(webrtc_base_dir)/java/src',
'<(webrtc_modules_dir)/audio_device/android/java/src',
- '<(webrtc_modules_dir)/video_render/android/java/src',
+
],
},
'includes': ['../../build/java.gypi'],
}, # libjingle_peerconnection_java
]
}],
- ['OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7")', {
- 'targets': [
- {
- 'target_name': 'rtc_api_objc',
- 'type': 'static_library',
- 'includes': [
- '../build/objc_common.gypi',
- ],
- 'dependencies': [
- '<(webrtc_root)/base/base.gyp:rtc_base_objc',
- 'libjingle_peerconnection',
- ],
- 'sources': [
- 'objc/RTCAudioTrack+Private.h',
- 'objc/RTCAudioTrack.h',
- 'objc/RTCAudioTrack.mm',
- 'objc/RTCConfiguration+Private.h',
- 'objc/RTCConfiguration.h',
- 'objc/RTCConfiguration.mm',
- 'objc/RTCDataChannel+Private.h',
- 'objc/RTCDataChannel.h',
- 'objc/RTCDataChannel.mm',
- 'objc/RTCDataChannelConfiguration+Private.h',
- 'objc/RTCDataChannelConfiguration.h',
- 'objc/RTCDataChannelConfiguration.mm',
- 'objc/RTCIceCandidate+Private.h',
- 'objc/RTCIceCandidate.h',
- 'objc/RTCIceCandidate.mm',
- 'objc/RTCIceServer+Private.h',
- 'objc/RTCIceServer.h',
- 'objc/RTCIceServer.mm',
- 'objc/RTCMediaConstraints+Private.h',
- 'objc/RTCMediaConstraints.h',
- 'objc/RTCMediaConstraints.mm',
- 'objc/RTCMediaStream+Private.h',
- 'objc/RTCMediaStream.h',
- 'objc/RTCMediaStream.mm',
- 'objc/RTCMediaStreamTrack+Private.h',
- 'objc/RTCMediaStreamTrack.h',
- 'objc/RTCMediaStreamTrack.mm',
- 'objc/RTCOpenGLVideoRenderer.h',
- 'objc/RTCOpenGLVideoRenderer.mm',
- 'objc/RTCPeerConnection+DataChannel.mm',
- 'objc/RTCPeerConnection+Private.h',
- 'objc/RTCPeerConnection+Stats.mm',
- 'objc/RTCPeerConnection.h',
- 'objc/RTCPeerConnection.mm',
- 'objc/RTCPeerConnectionFactory+Private.h',
- 'objc/RTCPeerConnectionFactory.h',
- 'objc/RTCPeerConnectionFactory.mm',
- 'objc/RTCSessionDescription+Private.h',
- 'objc/RTCSessionDescription.h',
- 'objc/RTCSessionDescription.mm',
- 'objc/RTCStatsReport+Private.h',
- 'objc/RTCStatsReport.h',
- 'objc/RTCStatsReport.mm',
- 'objc/RTCVideoFrame+Private.h',
- 'objc/RTCVideoFrame.h',
- 'objc/RTCVideoFrame.mm',
- 'objc/RTCVideoRenderer.h',
- 'objc/RTCVideoRendererAdapter+Private.h',
- 'objc/RTCVideoRendererAdapter.h',
- 'objc/RTCVideoRendererAdapter.mm',
- 'objc/RTCVideoSource+Private.h',
- 'objc/RTCVideoSource.h',
- 'objc/RTCVideoSource.mm',
- 'objc/RTCVideoTrack+Private.h',
- 'objc/RTCVideoTrack.h',
- 'objc/RTCVideoTrack.mm',
- ],
- # TODO(hjon): Make this compile without linking to libstdc++
- # See https://bugs.chromium.org/p/webrtc/issues/detail?id=5593
- 'link_settings': {
- 'libraries': [
- '-lstdc++',
- ],
- },
- 'conditions': [
- ['OS=="ios"', {
- 'sources': [
- 'objc/RTCAVFoundationVideoSource+Private.h',
- 'objc/RTCAVFoundationVideoSource.h',
- 'objc/RTCAVFoundationVideoSource.mm',
- 'objc/RTCEAGLVideoView.h',
- 'objc/RTCEAGLVideoView.m',
- 'objc/avfoundationvideocapturer.h',
- 'objc/avfoundationvideocapturer.mm',
- ],
- 'all_dependent_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework CoreGraphics',
- '-framework GLKit',
- '-framework OpenGLES',
- '-framework QuartzCore',
- ]
- }
- },
- # TODO(kjellander): Make the code compile without disabling these.
- # See https://bugs.chromium.org/p/webrtc/issues/detail?id=3307
- 'cflags': [
- '-Wno-return-type',
- ],
- 'xcode_settings': {
- 'WARNING_CFLAGS': [
- '-Wno-return-type',
- ],
- },
- }],
- ['OS=="mac"', {
- 'sources': [
- 'objc/RTCNSGLVideoView.h',
- 'objc/RTCNSGLVideoView.m',
- ],
- 'link_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework OpenGL',
- ],
- },
- },
- }],
- ],
- }
- ],
- }], # OS=="ios"
], # conditions
'targets': [
{
@@ -333,7 +206,6 @@
'-Wno-sign-compare',
],
'cflags_cc!': [
- '-Wnon-virtual-dtor',
'-Woverloaded-virtual',
],
'conditions': [
@@ -366,6 +238,20 @@
},
},
}],
+ ['use_quic==1', {
+ 'dependencies': [
+ '<(DEPTH)/third_party/libquic/libquic.gyp:libquic',
+ ],
+ 'sources': [
+ 'quicdatachannel.cc',
+ 'quicdatachannel.h',
+ 'quicdatatransport.cc',
+ 'quicdatatransport.h',
+ ],
+ 'export_dependent_settings': [
+ '<(DEPTH)/third_party/libquic/libquic.gyp:libquic',
+ ],
+ }],
],
}, # target libjingle_peerconnection
], # targets
diff --git a/chromium/third_party/webrtc/api/api_tests.gyp b/chromium/third_party/webrtc/api/api_tests.gyp
index b7cbd687ad9..65bb4614744 100644
--- a/chromium/third_party/webrtc/api/api_tests.gyp
+++ b/chromium/third_party/webrtc/api/api_tests.gyp
@@ -43,7 +43,7 @@
'peerconnectionendtoend_unittest.cc',
'peerconnectionfactory_unittest.cc',
'peerconnectioninterface_unittest.cc',
- # 'peerconnectionproxy_unittest.cc',
+ 'proxy_unittest.cc',
'rtpsenderreceiver_unittest.cc',
'statscollector_unittest.cc',
'test/fakeaudiocapturemodule.cc',
@@ -72,7 +72,6 @@
'-Wextra',
],
'cflags_cc!': [
- '-Wnon-virtual-dtor',
'-Woverloaded-virtual',
],
'msvs_disabled_warnings': [
@@ -113,6 +112,18 @@
},
},
}],
+ ['use_quic==1', {
+ 'dependencies': [
+ '<(DEPTH)/third_party/libquic/libquic.gyp:libquic',
+ ],
+ 'sources': [
+ 'quicdatachannel_unittest.cc',
+ 'quicdatatransport_unittest.cc',
+ ],
+ 'export_dependent_settings': [
+ '<(DEPTH)/third_party/libquic/libquic.gyp:libquic',
+ ],
+ }],
], # conditions
}, # target peerconnection_unittests
], # targets
@@ -131,41 +142,17 @@
'resource_dir': 'androidtests/res',
'native_lib_target': 'libjingle_peerconnection_so',
'is_test_apk': 1,
+ 'test_type': 'instrumentation',
+ 'tested_apk_path': '',
'never_lint': 1,
},
- 'includes': [ '../../build/java_apk.gypi' ],
- },
- ], # targets
- }], # OS=="android"
- ['OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7")', {
- 'targets': [
- {
- 'target_name': 'rtc_api_objc_tests',
- 'type': 'executable',
'includes': [
- '../build/objc_common.gypi',
+ '../../build/java_apk.gypi',
+ '../../build/android/test_runner.gypi',
],
- 'dependencies': [
- '<(webrtc_root)/api/api.gyp:rtc_api_objc',
- '<(webrtc_root)/base/base_tests.gyp:rtc_base_tests_utils',
- ],
- 'sources': [
- 'objctests/RTCConfigurationTest.mm',
- 'objctests/RTCDataChannelConfigurationTest.mm',
- 'objctests/RTCIceCandidateTest.mm',
- 'objctests/RTCIceServerTest.mm',
- 'objctests/RTCMediaConstraintsTest.mm',
- 'objctests/RTCSessionDescriptionTest.mm',
- ],
- 'xcode_settings': {
- # |-ObjC| flag needed to make sure category method implementations
- # are included:
- # https://developer.apple.com/library/mac/qa/qa1490/_index.html
- 'OTHER_LDFLAGS': ['-ObjC'],
- },
},
- ],
- }], # OS=="ios"
+ ], # targets
+ }], # OS=="android"
['OS=="android"', {
'targets': [
{
@@ -176,6 +163,27 @@
],
},
],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'peerconnection_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):peerconnection_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'peerconnection_unittests_apk.isolate',
+ ],
+ },
+ ]
+ }
+ ],
+ ],
}], # OS=="android"
['test_isolation_mode != "noop"', {
'targets': [
diff --git a/chromium/third_party/webrtc/api/audiotrack.h b/chromium/third_party/webrtc/api/audiotrack.h
index e7ffee5d830..096caf9d0e0 100644
--- a/chromium/third_party/webrtc/api/audiotrack.h
+++ b/chromium/third_party/webrtc/api/audiotrack.h
@@ -16,7 +16,7 @@
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/mediastreamtrack.h"
#include "webrtc/api/notifier.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/thread_checker.h"
diff --git a/chromium/third_party/webrtc/api/datachannel.cc b/chromium/third_party/webrtc/api/datachannel.cc
index 612d7e0b382..452e4b39611 100644
--- a/chromium/third_party/webrtc/api/datachannel.cc
+++ b/chromium/third_party/webrtc/api/datachannel.cc
@@ -10,6 +10,7 @@
#include "webrtc/api/datachannel.h"
+#include <memory>
#include <string>
#include "webrtc/api/mediastreamprovider.h"
@@ -363,7 +364,7 @@ void DataChannel::OnDataReceived(cricket::DataChannel* channel,
}
bool binary = (params.type == cricket::DMT_BINARY);
- rtc::scoped_ptr<DataBuffer> buffer(new DataBuffer(payload, binary));
+ std::unique_ptr<DataBuffer> buffer(new DataBuffer(payload, binary));
if (state_ == kOpen && observer_) {
observer_->OnMessage(*buffer.get());
} else {
@@ -494,7 +495,7 @@ void DataChannel::DeliverQueuedReceivedData() {
}
while (!queued_received_data_.Empty()) {
- rtc::scoped_ptr<DataBuffer> buffer(queued_received_data_.Front());
+ std::unique_ptr<DataBuffer> buffer(queued_received_data_.Front());
observer_->OnMessage(*buffer);
queued_received_data_.Pop();
}
@@ -589,7 +590,7 @@ void DataChannel::SendQueuedControlMessages() {
control_packets.Swap(&queued_control_data_);
while (!control_packets.Empty()) {
- rtc::scoped_ptr<DataBuffer> buf(control_packets.Front());
+ std::unique_ptr<DataBuffer> buf(control_packets.Front());
SendControlMessage(buf->data);
control_packets.Pop();
}
diff --git a/chromium/third_party/webrtc/api/datachannel.h b/chromium/third_party/webrtc/api/datachannel.h
index b8830be300a..3fb400b7ec4 100644
--- a/chromium/third_party/webrtc/api/datachannel.h
+++ b/chromium/third_party/webrtc/api/datachannel.h
@@ -260,7 +260,7 @@ class DataChannel : public DataChannelInterface,
};
// Define proxy for DataChannelInterface.
-BEGIN_PROXY_MAP(DataChannel)
+BEGIN_SIGNALING_PROXY_MAP(DataChannel)
PROXY_METHOD1(void, RegisterObserver, DataChannelObserver*)
PROXY_METHOD0(void, UnregisterObserver)
PROXY_CONSTMETHOD0(std::string, label)
@@ -275,7 +275,7 @@ BEGIN_PROXY_MAP(DataChannel)
PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
PROXY_METHOD0(void, Close)
PROXY_METHOD1(bool, Send, const DataBuffer&)
-END_PROXY()
+END_SIGNALING_PROXY()
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/datachannel_unittest.cc b/chromium/third_party/webrtc/api/datachannel_unittest.cc
index 5958ec02f85..d55ab57b64d 100644
--- a/chromium/third_party/webrtc/api/datachannel_unittest.cc
+++ b/chromium/third_party/webrtc/api/datachannel_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/api/datachannel.h"
#include "webrtc/api/sctputils.h"
#include "webrtc/api/test/fakedatachannelprovider.h"
@@ -85,7 +87,7 @@ class SctpDataChannelTest : public testing::Test {
webrtc::InternalDataChannelInit init_;
FakeDataChannelProvider provider_;
- rtc::scoped_ptr<FakeDataChannelObserver> observer_;
+ std::unique_ptr<FakeDataChannelObserver> observer_;
rtc::scoped_refptr<DataChannel> webrtc_data_channel_;
};
diff --git a/chromium/third_party/webrtc/api/dtlsidentitystore.cc b/chromium/third_party/webrtc/api/dtlsidentitystore.cc
index a4851880650..bdccc10c1b0 100644
--- a/chromium/third_party/webrtc/api/dtlsidentitystore.cc
+++ b/chromium/third_party/webrtc/api/dtlsidentitystore.cc
@@ -51,7 +51,7 @@ class DtlsIdentityStoreImpl::WorkerTask : public sigslot::has_slots<>,
private:
void GenerateIdentity_w() {
LOG(LS_INFO) << "Generating identity, using keytype " << key_type_;
- rtc::scoped_ptr<rtc::SSLIdentity> identity(
+ std::unique_ptr<rtc::SSLIdentity> identity(
rtc::SSLIdentity::Generate(kIdentityName, key_type_));
// Posting to |this| avoids touching |store_| on threads other than
@@ -74,7 +74,7 @@ class DtlsIdentityStoreImpl::WorkerTask : public sigslot::has_slots<>,
case MSG_GENERATE_IDENTITY_RESULT:
RTC_DCHECK(signaling_thread_->IsCurrent());
{
- rtc::scoped_ptr<IdentityResultMessageData> pdata(
+ std::unique_ptr<IdentityResultMessageData> pdata(
static_cast<IdentityResultMessageData*>(msg->pdata));
if (store_) {
store_->OnIdentityGenerated(pdata->data()->key_type_,
@@ -108,12 +108,6 @@ DtlsIdentityStoreImpl::DtlsIdentityStoreImpl(rtc::Thread* signaling_thread,
worker_thread_(worker_thread),
request_info_() {
RTC_DCHECK(signaling_thread_->IsCurrent());
- // Preemptively generate identities unless the worker thread and signaling
- // thread are the same (only do preemptive work in the background).
- if (worker_thread_ != signaling_thread_) {
- // Only necessary for RSA.
- GenerateIdentity(rtc::KT_RSA, nullptr);
- }
}
DtlsIdentityStoreImpl::~DtlsIdentityStoreImpl() {
@@ -137,7 +131,7 @@ void DtlsIdentityStoreImpl::OnMessage(rtc::Message* msg) {
RTC_DCHECK(signaling_thread_->IsCurrent());
switch (msg->message_id) {
case MSG_GENERATE_IDENTITY_RESULT: {
- rtc::scoped_ptr<IdentityResultMessageData> pdata(
+ std::unique_ptr<IdentityResultMessageData> pdata(
static_cast<IdentityResultMessageData*>(msg->pdata));
OnIdentityGenerated(pdata->data()->key_type_,
std::move(pdata->data()->identity_));
@@ -192,7 +186,8 @@ void DtlsIdentityStoreImpl::GenerateIdentity(
}
void DtlsIdentityStoreImpl::OnIdentityGenerated(
- rtc::KeyType key_type, rtc::scoped_ptr<rtc::SSLIdentity> identity) {
+ rtc::KeyType key_type,
+ std::unique_ptr<rtc::SSLIdentity> identity) {
RTC_DCHECK(signaling_thread_->IsCurrent());
RTC_DCHECK(request_info_[key_type].gen_in_progress_counts_);
@@ -226,7 +221,7 @@ void DtlsIdentityStoreImpl::OnIdentityGenerated(
if (worker_thread_ != signaling_thread_ && // Only do in background thread.
key_type == rtc::KT_RSA && // Only necessary for RSA.
!request_info_[key_type].free_identity_.get() &&
- request_info_[key_type].request_observers_.size() <=
+ request_info_[key_type].request_observers_.size() ==
request_info_[key_type].gen_in_progress_counts_) {
GenerateIdentity(key_type, nullptr);
}
diff --git a/chromium/third_party/webrtc/api/dtlsidentitystore.h b/chromium/third_party/webrtc/api/dtlsidentitystore.h
index af4229201cb..e25b79591a7 100644
--- a/chromium/third_party/webrtc/api/dtlsidentitystore.h
+++ b/chromium/third_party/webrtc/api/dtlsidentitystore.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_API_DTLSIDENTITYSTORE_H_
#define WEBRTC_API_DTLSIDENTITYSTORE_H_
+#include <memory>
#include <queue>
#include <string>
#include <utility>
@@ -19,7 +20,6 @@
#include "webrtc/base/messagequeue.h"
#include "webrtc/base/optional.h"
#include "webrtc/base/refcount.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/sslidentity.h"
#include "webrtc/base/thread.h"
@@ -39,9 +39,9 @@ class DtlsIdentityRequestObserver : public rtc::RefCountInterface {
// TODO(hbos): Unify the OnSuccess method once Chrome code is updated.
virtual void OnSuccess(const std::string& der_cert,
const std::string& der_private_key) = 0;
- // |identity| is a scoped_ptr because rtc::SSLIdentity is not copyable and the
+ // |identity| is a unique_ptr because rtc::SSLIdentity is not copyable and the
// client has to get the ownership of the object to make use of it.
- virtual void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) = 0;
+ virtual void OnSuccess(std::unique_ptr<rtc::SSLIdentity> identity) = 0;
protected:
virtual ~DtlsIdentityRequestObserver() {}
@@ -57,25 +57,10 @@ class DtlsIdentityStoreInterface {
// The |observer| will be called when the requested identity is ready, or when
// identity generation fails.
- // TODO(torbjorng,hbos): There are currently two versions of RequestIdentity,
- // with default implementation to call the other version of itself (so that a
- // call can be made regardless of which version has been overridden). The 1st
- // version exists because it is currently implemented in chromium. The 2nd
- // version will become the one and only RequestIdentity as soon as chromium
- // implements the correct version. crbug.com/544902, webrtc:5092.
- virtual void RequestIdentity(
- rtc::KeyParams key_params,
- const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) {
- // Add default ("null") expiration.
- RequestIdentity(key_params, rtc::Optional<uint64_t>(), observer);
- }
virtual void RequestIdentity(
const rtc::KeyParams& key_params,
const rtc::Optional<uint64_t>& expires_ms,
- const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) {
- // Drop |expires|.
- RequestIdentity(key_params, observer);
- }
+ const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer) = 0;
};
// The WebRTC default implementation of DtlsIdentityStoreInterface.
@@ -106,7 +91,7 @@ class DtlsIdentityStoreImpl : public DtlsIdentityStoreInterface,
rtc::KeyType key_type,
const rtc::scoped_refptr<DtlsIdentityRequestObserver>& observer);
void OnIdentityGenerated(rtc::KeyType key_type,
- rtc::scoped_ptr<rtc::SSLIdentity> identity);
+ std::unique_ptr<rtc::SSLIdentity> identity);
class WorkerTask;
typedef rtc::ScopedMessageData<DtlsIdentityStoreImpl::WorkerTask>
@@ -115,11 +100,11 @@ class DtlsIdentityStoreImpl : public DtlsIdentityStoreInterface,
// A key type-identity pair.
struct IdentityResult {
IdentityResult(rtc::KeyType key_type,
- rtc::scoped_ptr<rtc::SSLIdentity> identity)
+ std::unique_ptr<rtc::SSLIdentity> identity)
: key_type_(key_type), identity_(std::move(identity)) {}
rtc::KeyType key_type_;
- rtc::scoped_ptr<rtc::SSLIdentity> identity_;
+ std::unique_ptr<rtc::SSLIdentity> identity_;
};
typedef rtc::ScopedMessageData<IdentityResult> IdentityResultMessageData;
@@ -139,7 +124,7 @@ class DtlsIdentityStoreImpl : public DtlsIdentityStoreInterface,
std::queue<rtc::scoped_refptr<DtlsIdentityRequestObserver>>
request_observers_;
size_t gen_in_progress_counts_;
- rtc::scoped_ptr<rtc::SSLIdentity> free_identity_;
+ std::unique_ptr<rtc::SSLIdentity> free_identity_;
};
// One RequestInfo per KeyType. Only touch on the |signaling_thread_|.
diff --git a/chromium/third_party/webrtc/api/dtlsidentitystore_unittest.cc b/chromium/third_party/webrtc/api/dtlsidentitystore_unittest.cc
index 809e885216c..31f0113f283 100644
--- a/chromium/third_party/webrtc/api/dtlsidentitystore_unittest.cc
+++ b/chromium/third_party/webrtc/api/dtlsidentitystore_unittest.cc
@@ -10,6 +10,8 @@
#include "webrtc/api/dtlsidentitystore.h"
+#include <memory>
+
#include "webrtc/api/webrtcsessiondescriptionfactory.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
@@ -34,7 +36,7 @@ class MockDtlsIdentityRequestObserver :
LOG(LS_WARNING) << "The string version of OnSuccess is called unexpectedly";
EXPECT_TRUE(false);
}
- void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) override {
+ void OnSuccess(std::unique_ptr<rtc::SSLIdentity> identity) override {
EXPECT_FALSE(call_back_called_);
call_back_called_ = true;
last_request_success_ = true;
@@ -77,14 +79,12 @@ class DtlsIdentityStoreTest : public testing::Test {
rtc::CleanupSSL();
}
- rtc::scoped_ptr<rtc::Thread> worker_thread_;
- rtc::scoped_ptr<DtlsIdentityStoreImpl> store_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ std::unique_ptr<DtlsIdentityStoreImpl> store_;
rtc::scoped_refptr<MockDtlsIdentityRequestObserver> observer_;
};
TEST_F(DtlsIdentityStoreTest, RequestIdentitySuccessRSA) {
- EXPECT_TRUE_WAIT(store_->HasFreeIdentityForTesting(rtc::KT_RSA), kTimeoutMs);
-
store_->RequestIdentity(rtc::KeyParams(rtc::KT_RSA),
rtc::Optional<uint64_t>(),
observer_.get());
@@ -103,14 +103,14 @@ TEST_F(DtlsIdentityStoreTest, RequestIdentitySuccessRSA) {
}
TEST_F(DtlsIdentityStoreTest, RequestIdentitySuccessECDSA) {
- // Since store currently does not preemptively generate free ECDSA identities
- // we do not invoke HasFreeIdentityForTesting between requests.
-
store_->RequestIdentity(rtc::KeyParams(rtc::KT_ECDSA),
rtc::Optional<uint64_t>(),
observer_.get());
EXPECT_TRUE_WAIT(observer_->LastRequestSucceeded(), kTimeoutMs);
+ // Since store currently does not preemptively generate free ECDSA identities
+ // we do not invoke HasFreeIdentityForTesting between requests.
+
observer_->Reset();
// Verifies that the callback is async when a free identity is ready.
diff --git a/chromium/third_party/webrtc/api/dtmfsender.h b/chromium/third_party/webrtc/api/dtmfsender.h
index ae8aa445410..c85557f2e15 100644
--- a/chromium/third_party/webrtc/api/dtmfsender.h
+++ b/chromium/third_party/webrtc/api/dtmfsender.h
@@ -17,6 +17,7 @@
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/proxy.h"
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/messagehandler.h"
#include "webrtc/base/refcount.h"
@@ -82,7 +83,7 @@ class DtmfSender
DtmfSender();
// Implements MessageHandler.
- virtual void OnMessage(rtc::Message* msg);
+ void OnMessage(rtc::Message* msg) override;
// The DTMF sending task.
void DoInsertDtmf();
@@ -103,7 +104,7 @@ class DtmfSender
};
// Define proxy for DtmfSenderInterface.
-BEGIN_PROXY_MAP(DtmfSender)
+BEGIN_SIGNALING_PROXY_MAP(DtmfSender)
PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*)
PROXY_METHOD0(void, UnregisterObserver)
PROXY_METHOD0(bool, CanInsertDtmf)
@@ -112,7 +113,7 @@ BEGIN_PROXY_MAP(DtmfSender)
PROXY_CONSTMETHOD0(std::string, tones)
PROXY_CONSTMETHOD0(int, duration)
PROXY_CONSTMETHOD0(int, inter_tone_gap)
-END_PROXY()
+END_SIGNALING_PROXY()
// Get DTMF code from the DTMF event character.
bool GetDtmfCode(char tone, int* code);
diff --git a/chromium/third_party/webrtc/api/dtmfsender_unittest.cc b/chromium/third_party/webrtc/api/dtmfsender_unittest.cc
index 0a944cb160d..e5fe26171b5 100644
--- a/chromium/third_party/webrtc/api/dtmfsender_unittest.cc
+++ b/chromium/third_party/webrtc/api/dtmfsender_unittest.cc
@@ -10,6 +10,7 @@
#include "webrtc/api/dtmfsender.h"
+#include <memory>
#include <set>
#include <string>
#include <vector>
@@ -84,9 +85,9 @@ class FakeDtmfProvider : public DtmfProviderInterface {
// TODO(ronghuawu): Make the timer (basically the rtc::TimeNanos)
// mockable and use a fake timer in the unit tests.
if (last_insert_dtmf_call_ > 0) {
- gap = static_cast<int>(rtc::Time() - last_insert_dtmf_call_);
+ gap = static_cast<int>(rtc::TimeMillis() - last_insert_dtmf_call_);
}
- last_insert_dtmf_call_ = rtc::Time();
+ last_insert_dtmf_call_ = rtc::TimeMillis();
LOG(LS_VERBOSE) << "FakeDtmfProvider::InsertDtmf code=" << code
<< " duration=" << duration
@@ -95,7 +96,7 @@ class FakeDtmfProvider : public DtmfProviderInterface {
return true;
}
- virtual sigslot::signal0<>* GetOnDestroyedSignal() {
+ sigslot::signal0<>* GetOnDestroyedSignal() override {
return &SignalDestroyed;
}
@@ -214,8 +215,8 @@ class DtmfSenderTest : public testing::Test {
}
rtc::scoped_refptr<AudioTrackInterface> track_;
- rtc::scoped_ptr<FakeDtmfObserver> observer_;
- rtc::scoped_ptr<FakeDtmfProvider> provider_;
+ std::unique_ptr<FakeDtmfObserver> observer_;
+ std::unique_ptr<FakeDtmfProvider> provider_;
rtc::scoped_refptr<DtmfSender> dtmf_;
};
diff --git a/chromium/third_party/webrtc/api/java/jni/OWNERS b/chromium/third_party/webrtc/api/java/jni/OWNERS
index 4d31ffb663f..4178fd6920b 100644
--- a/chromium/third_party/webrtc/api/java/jni/OWNERS
+++ b/chromium/third_party/webrtc/api/java/jni/OWNERS
@@ -1 +1,8 @@
-magjed@webrtc.org
+per-file androidvideocapturer*=magjed@webrtc.org
+per-file androidmediaencoder*=magjed@webrtc.org
+per-file androidmediadecoder*=magjed@webrtc.org
+per-file androidmediacodeccommon.h=magjed@webrtc.org
+per-file surfacetexturehelper*=magjed@webrtc.org
+per-file native_handle_impl*=magjed@webrtc.org
+# Video related parts of peerconnection only.
+per-file peerconnection_jni.cc=magjed@webrtc.org
diff --git a/chromium/third_party/webrtc/api/java/jni/androidmediacodeccommon.h b/chromium/third_party/webrtc/api/java/jni/androidmediacodeccommon.h
index 2f9e05d1f54..db7260ade55 100644
--- a/chromium/third_party/webrtc/api/java/jni/androidmediacodeccommon.h
+++ b/chromium/third_party/webrtc/api/java/jni/androidmediacodeccommon.h
@@ -19,7 +19,6 @@
#include "webrtc/api/java/jni/jni_helpers.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc_jni {
@@ -59,10 +58,6 @@ enum { kMaxDecodedLogFrames = 10 };
// Maximum amount of encoded frames for which per-frame logging is enabled.
enum { kMaxEncodedLogFrames = 10 };
-static inline int64_t GetCurrentTimeMs() {
- return webrtc::TickTime::Now().Ticks() / 1000000LL;
-}
-
static inline void AllowBlockingCalls() {
rtc::Thread* current_thread = rtc::Thread::Current();
if (current_thread != NULL)
diff --git a/chromium/third_party/webrtc/api/java/jni/native_handle_impl.cc b/chromium/third_party/webrtc/api/java/jni/native_handle_impl.cc
index d52584acfe6..1f180ade9cc 100644
--- a/chromium/third_party/webrtc/api/java/jni/native_handle_impl.cc
+++ b/chromium/third_party/webrtc/api/java/jni/native_handle_impl.cc
@@ -10,12 +10,13 @@
#include "webrtc/api/java/jni/native_handle_impl.h"
+#include <memory>
+
#include "webrtc/api/java/jni/jni_helpers.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/keep_ref_until_done.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
using webrtc::NativeHandleBuffer;
@@ -58,6 +59,37 @@ void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
}
}
+// Calculates result = a * b, in column-major order.
+void MultiplyMatrix(const float a[16], const float b[16], float result[16]) {
+ for (int i = 0; i < 4; ++i) {
+ for (int j = 0; j < 4; ++j) {
+ float sum = 0;
+ for (int k = 0; k < 4; ++k) {
+ sum += a[k * 4 + j] * b[i * 4 + k];
+ }
+ result[i * 4 + j] = sum;
+ }
+ }
+}
+
+// Center crop by keeping xFraction of the width and yFraction of the height,
+// so e.g. cropping from 640x480 to 640x360 would use
+// xFraction=1, yFraction=360/480.
+void CropMatrix(float a[16], float xFraction, float yFraction) {
+ // Move cropped area to the center of the frame by offsetting half the
+ // removed area.
+ const float xOffset = (1 - xFraction) / 2;
+ const float yOffset = (1 - yFraction) / 2;
+ const float crop_matrix[16] = {
+ xFraction, 0, 0, 0,
+ 0, yFraction, 0, 0,
+ 0, 0, 1, 0,
+ xOffset, yOffset, 0, 1};
+ float mul_result[16];
+ MultiplyMatrix(crop_matrix, a, mul_result);
+ memcpy(a, mul_result, sizeof(mul_result));
+}
+
} // anonymouse namespace
namespace webrtc_jni {
@@ -104,7 +136,7 @@ AndroidTextureBuffer::NativeToI420Buffer() {
//
// TODO(nisse): Use an I420BufferPool. We then need to extend that
// class, and I420Buffer, to support our memory layout.
- rtc::scoped_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
+ std::unique_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment)));
// See SurfaceTextureHelper.java for the required layout.
uint8_t* y_data = yuv_data.get();
@@ -145,15 +177,18 @@ AndroidTextureBuffer::NativeToI420Buffer() {
}
rtc::scoped_refptr<AndroidTextureBuffer>
-AndroidTextureBuffer::ScaleAndRotate(int dst_widht,
- int dst_height,
- webrtc::VideoRotation rotation) {
- if (width() == dst_widht && height() == dst_height &&
+AndroidTextureBuffer::CropScaleAndRotate(int cropped_width,
+ int cropped_height,
+ int dst_width,
+ int dst_height,
+ webrtc::VideoRotation rotation) {
+ if (cropped_width == dst_width && cropped_height == dst_height &&
+ width() == dst_width && height() == dst_height &&
rotation == webrtc::kVideoRotation_0) {
return this;
}
- int rotated_width = (rotation % 180 == 0) ? dst_widht : dst_height;
- int rotated_height = (rotation % 180 == 0) ? dst_height : dst_widht;
+ int rotated_width = (rotation % 180 == 0) ? dst_width : dst_height;
+ int rotated_height = (rotation % 180 == 0) ? dst_height : dst_width;
// Here we use Bind magic to add a reference count to |this| until the newly
// created AndroidTextureBuffer is destructed
@@ -162,6 +197,11 @@ AndroidTextureBuffer::ScaleAndRotate(int dst_widht,
rotated_width, rotated_height, native_handle_,
surface_texture_helper_, rtc::KeepRefUntilDone(this)));
+ if (cropped_width != width() || cropped_height != height()) {
+ CropMatrix(buffer->native_handle_.sampling_matrix,
+ cropped_width / static_cast<float>(width()),
+ cropped_height / static_cast<float>(height()));
+ }
RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
return buffer;
}
diff --git a/chromium/third_party/webrtc/api/java/jni/native_handle_impl.h b/chromium/third_party/webrtc/api/java/jni/native_handle_impl.h
index 859c3713b46..b781815718d 100644
--- a/chromium/third_party/webrtc/api/java/jni/native_handle_impl.h
+++ b/chromium/third_party/webrtc/api/java/jni/native_handle_impl.h
@@ -38,8 +38,11 @@ class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
- rtc::scoped_refptr<AndroidTextureBuffer> ScaleAndRotate(
- int dst_widht,
+ // First crop, then scale to dst resolution, and then rotate.
+ rtc::scoped_refptr<AndroidTextureBuffer> CropScaleAndRotate(
+ int cropped_width,
+ int cropped_height,
+ int dst_width,
int dst_height,
webrtc::VideoRotation rotation);
diff --git a/chromium/third_party/webrtc/api/jsepsessiondescription.cc b/chromium/third_party/webrtc/api/jsepsessiondescription.cc
index eb776c86fa7..547a60f1c3a 100644
--- a/chromium/third_party/webrtc/api/jsepsessiondescription.cc
+++ b/chromium/third_party/webrtc/api/jsepsessiondescription.cc
@@ -10,12 +10,13 @@
#include "webrtc/api/jsepsessiondescription.h"
+#include <memory>
+
#include "webrtc/api/webrtcsdp.h"
#include "webrtc/base/arraysize.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/pc/mediasession.h"
-using rtc::scoped_ptr;
using cricket::SessionDescription;
namespace webrtc {
@@ -57,7 +58,6 @@ const int JsepSessionDescription::kMaxVideoCodecHeight = 1280;
const int JsepSessionDescription::kMaxVideoCodecWidth = 1920;
const int JsepSessionDescription::kMaxVideoCodecHeight = 1080;
#endif
-const int JsepSessionDescription::kDefaultVideoCodecPreference = 1;
SessionDescriptionInterface* CreateSessionDescription(const std::string& type,
const std::string& sdp,
@@ -125,7 +125,7 @@ bool JsepSessionDescription::AddCandidate(
updated_candidate.set_password(transport_info->description.ice_pwd);
}
- scoped_ptr<JsepIceCandidate> updated_candidate_wrapper(
+ std::unique_ptr<JsepIceCandidate> updated_candidate_wrapper(
new JsepIceCandidate(candidate->sdp_mid(),
static_cast<int>(mediasection_index),
updated_candidate));
diff --git a/chromium/third_party/webrtc/api/jsepsessiondescription.h b/chromium/third_party/webrtc/api/jsepsessiondescription.h
index 9a0d8732044..0248a07c726 100644
--- a/chromium/third_party/webrtc/api/jsepsessiondescription.h
+++ b/chromium/third_party/webrtc/api/jsepsessiondescription.h
@@ -13,12 +13,13 @@
#ifndef WEBRTC_API_JSEPSESSIONDESCRIPTION_H_
#define WEBRTC_API_JSEPSESSIONDESCRIPTION_H_
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/api/jsep.h"
#include "webrtc/api/jsepicecandidate.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/p2p/base/candidate.h"
namespace cricket {
@@ -72,10 +73,9 @@ class JsepSessionDescription : public SessionDescriptionInterface {
static const char kDefaultVideoCodecName[];
static const int kMaxVideoCodecWidth;
static const int kMaxVideoCodecHeight;
- static const int kDefaultVideoCodecPreference;
private:
- rtc::scoped_ptr<cricket::SessionDescription> description_;
+ std::unique_ptr<cricket::SessionDescription> description_;
std::string session_id_;
std::string session_version_;
std::string type_;
diff --git a/chromium/third_party/webrtc/api/jsepsessiondescription_unittest.cc b/chromium/third_party/webrtc/api/jsepsessiondescription_unittest.cc
index 3d875137311..6be590faf4d 100644
--- a/chromium/third_party/webrtc/api/jsepsessiondescription_unittest.cc
+++ b/chromium/third_party/webrtc/api/jsepsessiondescription_unittest.cc
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/api/jsepicecandidate.h"
#include "webrtc/api/jsepsessiondescription.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/p2p/base/candidate.h"
@@ -27,7 +27,6 @@ using webrtc::IceCandidateInterface;
using webrtc::JsepIceCandidate;
using webrtc::JsepSessionDescription;
using webrtc::SessionDescriptionInterface;
-using rtc::scoped_ptr;
static const char kCandidateUfrag[] = "ufrag";
static const char kCandidatePwd[] = "pwd";
@@ -41,18 +40,18 @@ static const char kCandidatePwdVideo[] = "pwd_video";
static cricket::SessionDescription* CreateCricketSessionDescription() {
cricket::SessionDescription* desc(new cricket::SessionDescription());
// AudioContentDescription
- scoped_ptr<cricket::AudioContentDescription> audio(
+ std::unique_ptr<cricket::AudioContentDescription> audio(
new cricket::AudioContentDescription());
// VideoContentDescription
- scoped_ptr<cricket::VideoContentDescription> video(
+ std::unique_ptr<cricket::VideoContentDescription> video(
new cricket::VideoContentDescription());
- audio->AddCodec(cricket::AudioCodec(103, "ISAC", 16000, 0, 0, 0));
+ audio->AddCodec(cricket::AudioCodec(103, "ISAC", 16000, 0, 0));
desc->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
audio.release());
- video->AddCodec(cricket::VideoCodec(120, "VP8", 640, 480, 30, 0));
+ video->AddCodec(cricket::VideoCodec(120, "VP8", 640, 480, 30));
desc->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
video.release());
@@ -100,7 +99,7 @@ class JsepSessionDescriptionTest : public testing::Test {
}
cricket::Candidate candidate_;
- rtc::scoped_ptr<JsepSessionDescription> jsep_desc_;
+ std::unique_ptr<JsepSessionDescription> jsep_desc_;
};
// Test that number_of_mediasections() returns the number of media contents in
@@ -204,7 +203,8 @@ TEST_F(JsepSessionDescriptionTest, AddCandidateDuplicates) {
TEST_F(JsepSessionDescriptionTest, SerializeDeserialize) {
std::string sdp = Serialize(jsep_desc_.get());
- scoped_ptr<SessionDescriptionInterface> parsed_jsep_desc(DeSerialize(sdp));
+ std::unique_ptr<SessionDescriptionInterface> parsed_jsep_desc(
+ DeSerialize(sdp));
EXPECT_EQ(2u, parsed_jsep_desc->number_of_mediasections());
std::string parsed_sdp = Serialize(parsed_jsep_desc.get());
@@ -222,7 +222,7 @@ TEST_F(JsepSessionDescriptionTest, SerializeDeserializeWithCandidates) {
std::string sdp_with_candidate = Serialize(jsep_desc_.get());
EXPECT_NE(sdp, sdp_with_candidate);
- scoped_ptr<SessionDescriptionInterface> parsed_jsep_desc(
+ std::unique_ptr<SessionDescriptionInterface> parsed_jsep_desc(
DeSerialize(sdp_with_candidate));
std::string parsed_sdp_with_candidate = Serialize(parsed_jsep_desc.get());
diff --git a/chromium/third_party/webrtc/api/localaudiosource.cc b/chromium/third_party/webrtc/api/localaudiosource.cc
index 3b22ad146ef..9da9fd26123 100644
--- a/chromium/third_party/webrtc/api/localaudiosource.cc
+++ b/chromium/third_party/webrtc/api/localaudiosource.cc
@@ -47,6 +47,8 @@ void FromConstraints(const MediaConstraintsInterface::Constraints& constraints,
options->noise_suppression},
{MediaConstraintsInterface::kExperimentalNoiseSuppression,
options->experimental_ns},
+ {MediaConstraintsInterface::kIntelligibilityEnhancer,
+ options->intelligibility_enhancer},
{MediaConstraintsInterface::kHighpassFilter, options->highpass_filter},
{MediaConstraintsInterface::kTypingNoiseDetection,
options->typing_detection},
diff --git a/chromium/third_party/webrtc/api/localaudiosource.h b/chromium/third_party/webrtc/api/localaudiosource.h
index e4de650537f..e1c023e5424 100644
--- a/chromium/third_party/webrtc/api/localaudiosource.h
+++ b/chromium/third_party/webrtc/api/localaudiosource.h
@@ -14,7 +14,6 @@
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/notifier.h"
#include "webrtc/api/peerconnectioninterface.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/media/base/mediachannel.h"
// LocalAudioSource implements AudioSourceInterface.
diff --git a/chromium/third_party/webrtc/api/mediaconstraintsinterface.cc b/chromium/third_party/webrtc/api/mediaconstraintsinterface.cc
index af258917e7a..6a014a2b039 100644
--- a/chromium/third_party/webrtc/api/mediaconstraintsinterface.cc
+++ b/chromium/third_party/webrtc/api/mediaconstraintsinterface.cc
@@ -46,6 +46,8 @@ const char MediaConstraintsInterface::kNoiseSuppression[] =
"googNoiseSuppression";
const char MediaConstraintsInterface::kExperimentalNoiseSuppression[] =
"googNoiseSuppression2";
+const char MediaConstraintsInterface::kIntelligibilityEnhancer[] =
+ "intelligibilityEnhancer";
const char MediaConstraintsInterface::kHighpassFilter[] =
"googHighpassFilter";
const char MediaConstraintsInterface::kTypingNoiseDetection[] =
@@ -179,29 +181,23 @@ void CopyConstraintsIntoRtcConfiguration(
return;
}
- bool value;
+ bool enable_ipv6;
if (FindConstraint(constraints, MediaConstraintsInterface::kEnableIPv6,
- &value, nullptr)) {
- if (!value) {
- configuration->disable_ipv6 = true;
- }
- }
- ConstraintToOptionalBool(constraints, MediaConstraintsInterface::kEnableDscp,
- &configuration->enable_dscp);
- ConstraintToOptionalBool(constraints,
- MediaConstraintsInterface::kCpuOveruseDetection,
- &configuration->cpu_overuse_detection);
- if (FindConstraint(constraints,
- MediaConstraintsInterface::kEnableRtpDataChannels, &value,
- NULL) &&
- value) {
- configuration->enable_rtp_data_channel = true;
+ &enable_ipv6, nullptr)) {
+ configuration->disable_ipv6 = !enable_ipv6;
}
+ FindConstraint(constraints, MediaConstraintsInterface::kEnableDscp,
+ &configuration->media_config.enable_dscp, nullptr);
+ FindConstraint(
+ constraints, MediaConstraintsInterface::kCpuOveruseDetection,
+ &configuration->media_config.video.enable_cpu_overuse_detection, nullptr);
+ FindConstraint(constraints, MediaConstraintsInterface::kEnableRtpDataChannels,
+ &configuration->enable_rtp_data_channel, nullptr);
// Find Suspend Below Min Bitrate constraint.
- ConstraintToOptionalBool(
- constraints,
- MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate,
- &configuration->suspend_below_min_bitrate);
+ FindConstraint(constraints,
+ MediaConstraintsInterface::kEnableVideoSuspendBelowMinBitrate,
+ &configuration->media_config.video.suspend_below_min_bitrate,
+ nullptr);
ConstraintToOptionalInt(constraints,
MediaConstraintsInterface::kScreencastMinBitrate,
&configuration->screencast_min_bitrate);
diff --git a/chromium/third_party/webrtc/api/mediaconstraintsinterface.h b/chromium/third_party/webrtc/api/mediaconstraintsinterface.h
index 3db6e2672bd..13560dd35b9 100644
--- a/chromium/third_party/webrtc/api/mediaconstraintsinterface.h
+++ b/chromium/third_party/webrtc/api/mediaconstraintsinterface.h
@@ -73,6 +73,7 @@ class MediaConstraintsInterface {
static const char kExperimentalAutoGainControl[]; // googAutoGainControl2
static const char kNoiseSuppression[]; // googNoiseSuppression
static const char kExperimentalNoiseSuppression[]; // googNoiseSuppression2
+ static const char kIntelligibilityEnhancer[]; // intelligibilityEnhancer
static const char kHighpassFilter[]; // googHighpassFilter
static const char kTypingNoiseDetection[]; // googTypingNoiseDetection
static const char kAudioMirroring[]; // googAudioMirroring
diff --git a/chromium/third_party/webrtc/api/mediaconstraintsinterface_unittest.cc b/chromium/third_party/webrtc/api/mediaconstraintsinterface_unittest.cc
index 07338c15e82..dcf4bb7fde7 100644
--- a/chromium/third_party/webrtc/api/mediaconstraintsinterface_unittest.cc
+++ b/chromium/third_party/webrtc/api/mediaconstraintsinterface_unittest.cc
@@ -17,11 +17,24 @@ namespace webrtc {
namespace {
+// Checks all settings touched by CopyConstraintsIntoRtcConfiguration,
+// plus audio_jitter_buffer_max_packets.
bool Matches(const PeerConnectionInterface::RTCConfiguration& a,
const PeerConnectionInterface::RTCConfiguration& b) {
- return a.audio_jitter_buffer_max_packets ==
+ return a.disable_ipv6 == b.disable_ipv6 &&
+ a.audio_jitter_buffer_max_packets ==
b.audio_jitter_buffer_max_packets &&
- a.disable_prerenderer_smoothing == b.disable_prerenderer_smoothing;
+ a.enable_rtp_data_channel == b.enable_rtp_data_channel &&
+ a.screencast_min_bitrate == b.screencast_min_bitrate &&
+ a.combined_audio_video_bwe == b.combined_audio_video_bwe &&
+ a.enable_dtls_srtp == b.enable_dtls_srtp &&
+ a.media_config.enable_dscp == b.media_config.enable_dscp &&
+ a.media_config.video.enable_cpu_overuse_detection ==
+ b.media_config.video.enable_cpu_overuse_detection &&
+ a.media_config.video.disable_prerenderer_smoothing ==
+ b.media_config.video.disable_prerenderer_smoothing &&
+ a.media_config.video.suspend_below_min_bitrate ==
+ b.media_config.video.suspend_below_min_bitrate;
}
TEST(MediaConstraintsInterface, CopyConstraintsIntoRtcConfiguration) {
diff --git a/chromium/third_party/webrtc/api/mediacontroller.cc b/chromium/third_party/webrtc/api/mediacontroller.cc
index 71964d52a7e..2e4501b3f1b 100644
--- a/chromium/third_party/webrtc/api/mediacontroller.cc
+++ b/chromium/third_party/webrtc/api/mediacontroller.cc
@@ -10,8 +10,11 @@
#include "webrtc/api/mediacontroller.h"
+#include <memory>
+
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/call.h"
#include "webrtc/pc/channelmanager.h"
#include "webrtc/media/base/mediachannel.h"
@@ -74,7 +77,7 @@ class MediaController : public webrtc::MediaControllerInterface,
const cricket::MediaConfig media_config_;
cricket::ChannelManager* const channel_manager_;
webrtc::Call::Config call_config_;
- rtc::scoped_ptr<webrtc::Call> call_;
+ std::unique_ptr<webrtc::Call> call_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MediaController);
};
diff --git a/chromium/third_party/webrtc/api/mediastream.h b/chromium/third_party/webrtc/api/mediastream.h
index 2a77f0ddc4b..1f80f25207a 100644
--- a/chromium/third_party/webrtc/api/mediastream.h
+++ b/chromium/third_party/webrtc/api/mediastream.h
@@ -31,10 +31,10 @@ class MediaStream : public Notifier<MediaStreamInterface> {
bool AddTrack(VideoTrackInterface* track) override;
bool RemoveTrack(AudioTrackInterface* track) override;
bool RemoveTrack(VideoTrackInterface* track) override;
- virtual rtc::scoped_refptr<AudioTrackInterface>
- FindAudioTrack(const std::string& track_id);
- virtual rtc::scoped_refptr<VideoTrackInterface>
- FindVideoTrack(const std::string& track_id);
+ rtc::scoped_refptr<AudioTrackInterface>
+ FindAudioTrack(const std::string& track_id) override;
+ rtc::scoped_refptr<VideoTrackInterface>
+ FindVideoTrack(const std::string& track_id) override;
AudioTrackVector GetAudioTracks() override { return audio_tracks_; }
VideoTrackVector GetVideoTracks() override { return video_tracks_; }
diff --git a/chromium/third_party/webrtc/api/mediastream_unittest.cc b/chromium/third_party/webrtc/api/mediastream_unittest.cc
index dd63356e925..1881cceeb0e 100644
--- a/chromium/third_party/webrtc/api/mediastream_unittest.cc
+++ b/chromium/third_party/webrtc/api/mediastream_unittest.cc
@@ -18,7 +18,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/refcount.h"
-#include "webrtc/base/scoped_ptr.h"
static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoTrackId[] = "dummy_video_cam_1";
diff --git a/chromium/third_party/webrtc/api/mediastreaminterface.h b/chromium/third_party/webrtc/api/mediastreaminterface.h
index 2a3f3473b02..7f563250249 100644
--- a/chromium/third_party/webrtc/api/mediastreaminterface.h
+++ b/chromium/third_party/webrtc/api/mediastreaminterface.h
@@ -31,7 +31,6 @@
namespace cricket {
class AudioRenderer;
-class VideoCapturer;
class VideoRenderer;
class VideoFrame;
@@ -113,14 +112,6 @@ class VideoTrackSourceInterface
int input_width;
int input_height;
};
- // Get access to the source implementation of cricket::VideoCapturer.
- // This can be used for receiving frames and state notifications.
- // But it should not be used for starting or stopping capturing.
- // TODO(perkj): We are currently trying to replace all internal use of
- // cricket::VideoCapturer with rtc::VideoSourceInterface. Once that
- // refactoring is done,
- // remove this method.
- virtual cricket::VideoCapturer* GetVideoCapturer() = 0;
virtual void Stop() = 0;
virtual void Restart() = 0;
@@ -215,7 +206,8 @@ class AudioProcessorInterface : public rtc::RefCountInterface {
echo_return_loss_enhancement(0),
echo_delay_median_ms(0),
aec_quality_min(0.0),
- echo_delay_std_ms(0) {}
+ echo_delay_std_ms(0),
+ aec_divergent_filter_fraction(0.0) {}
~AudioProcessorStats() {}
bool typing_noise_detected;
@@ -224,6 +216,7 @@ class AudioProcessorInterface : public rtc::RefCountInterface {
int echo_delay_median_ms;
float aec_quality_min;
int echo_delay_std_ms;
+ float aec_divergent_filter_fraction;
};
// Get audio processor statistics.
diff --git a/chromium/third_party/webrtc/api/mediastreamprovider.h b/chromium/third_party/webrtc/api/mediastreamprovider.h
index 6814c416e20..b23e17bd4bb 100644
--- a/chromium/third_party/webrtc/api/mediastreamprovider.h
+++ b/chromium/third_party/webrtc/api/mediastreamprovider.h
@@ -11,17 +11,17 @@
#ifndef WEBRTC_API_MEDIASTREAMPROVIDER_H_
#define WEBRTC_API_MEDIASTREAMPROVIDER_H_
+#include <memory>
+
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/base/basictypes.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/media/base/videosinkinterface.h"
+#include "webrtc/media/base/videosourceinterface.h"
namespace cricket {
class AudioSource;
-class VideoCapturer;
class VideoFrame;
-class VideoRenderer;
struct AudioOptions;
struct VideoOptions;
@@ -61,11 +61,16 @@ class AudioProviderInterface {
// passed to the provider.
virtual void SetRawAudioSink(
uint32_t ssrc,
- rtc::scoped_ptr<webrtc::AudioSinkInterface> sink) = 0;
+ std::unique_ptr<webrtc::AudioSinkInterface> sink) = 0;
+
+ virtual RtpParameters GetAudioRtpSendParameters(uint32_t ssrc) const = 0;
+ virtual bool SetAudioRtpSendParameters(uint32_t ssrc,
+ const RtpParameters& parameters) = 0;
- virtual RtpParameters GetAudioRtpParameters(uint32_t ssrc) const = 0;
- virtual bool SetAudioRtpParameters(uint32_t ssrc,
- const RtpParameters& parameters) = 0;
+ virtual RtpParameters GetAudioRtpReceiveParameters(uint32_t ssrc) const = 0;
+ virtual bool SetAudioRtpReceiveParameters(
+ uint32_t ssrc,
+ const RtpParameters& parameters) = 0;
protected:
virtual ~AudioProviderInterface() {}
@@ -75,8 +80,9 @@ class AudioProviderInterface {
// of a video track connected to a certain PeerConnection.
class VideoProviderInterface {
public:
- virtual bool SetCaptureDevice(uint32_t ssrc,
- cricket::VideoCapturer* camera) = 0;
+ virtual bool SetSource(
+ uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) = 0;
// Enable/disable the video playout of a remote video track with |ssrc|.
virtual void SetVideoPlayout(
uint32_t ssrc,
@@ -87,9 +93,14 @@ class VideoProviderInterface {
bool enable,
const cricket::VideoOptions* options) = 0;
- virtual RtpParameters GetVideoRtpParameters(uint32_t ssrc) const = 0;
- virtual bool SetVideoRtpParameters(uint32_t ssrc,
- const RtpParameters& parameters) = 0;
+ virtual RtpParameters GetVideoRtpSendParameters(uint32_t ssrc) const = 0;
+ virtual bool SetVideoRtpSendParameters(uint32_t ssrc,
+ const RtpParameters& parameters) = 0;
+
+ virtual RtpParameters GetVideoRtpReceiveParameters(uint32_t ssrc) const = 0;
+ virtual bool SetVideoRtpReceiveParameters(
+ uint32_t ssrc,
+ const RtpParameters& parameters) = 0;
protected:
virtual ~VideoProviderInterface() {}
diff --git a/chromium/third_party/webrtc/api/mediastreamproxy.h b/chromium/third_party/webrtc/api/mediastreamproxy.h
index 645b28a3f79..06f8eb3b2cf 100644
--- a/chromium/third_party/webrtc/api/mediastreamproxy.h
+++ b/chromium/third_party/webrtc/api/mediastreamproxy.h
@@ -16,7 +16,7 @@
namespace webrtc {
-BEGIN_PROXY_MAP(MediaStream)
+BEGIN_SIGNALING_PROXY_MAP(MediaStream)
PROXY_CONSTMETHOD0(std::string, label)
PROXY_METHOD0(AudioTrackVector, GetAudioTracks)
PROXY_METHOD0(VideoTrackVector, GetVideoTracks)
@@ -30,7 +30,7 @@ BEGIN_PROXY_MAP(MediaStream)
PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
-END_PROXY()
+END_SIGNALING_PROXY()
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/mediastreamtrackproxy.h b/chromium/third_party/webrtc/api/mediastreamtrackproxy.h
index f68773223b9..12fdc36b368 100644
--- a/chromium/third_party/webrtc/api/mediastreamtrackproxy.h
+++ b/chromium/third_party/webrtc/api/mediastreamtrackproxy.h
@@ -19,7 +19,7 @@
namespace webrtc {
-BEGIN_PROXY_MAP(AudioTrack)
+BEGIN_SIGNALING_PROXY_MAP(AudioTrack)
PROXY_CONSTMETHOD0(std::string, kind)
PROXY_CONSTMETHOD0(std::string, id)
PROXY_CONSTMETHOD0(TrackState, state)
@@ -33,7 +33,7 @@ BEGIN_PROXY_MAP(AudioTrack)
PROXY_METHOD1(bool, set_enabled, bool)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
-END_PROXY()
+END_SIGNALING_PROXY()
BEGIN_PROXY_MAP(VideoTrack)
PROXY_CONSTMETHOD0(std::string, kind)
@@ -41,11 +41,13 @@ BEGIN_PROXY_MAP(VideoTrack)
PROXY_CONSTMETHOD0(TrackState, state)
PROXY_CONSTMETHOD0(bool, enabled)
PROXY_METHOD1(bool, set_enabled, bool)
- PROXY_METHOD2(void,
+ PROXY_WORKER_METHOD2(void,
AddOrUpdateSink,
rtc::VideoSinkInterface<cricket::VideoFrame>*,
const rtc::VideoSinkWants&)
- PROXY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<cricket::VideoFrame>*)
+ PROXY_WORKER_METHOD1(void,
+ RemoveSink,
+ rtc::VideoSinkInterface<cricket::VideoFrame>*)
PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
diff --git a/chromium/third_party/webrtc/api/objc/README b/chromium/third_party/webrtc/api/objc/README
deleted file mode 100644
index bd33e61921e..00000000000
--- a/chromium/third_party/webrtc/api/objc/README
+++ /dev/null
@@ -1,3 +0,0 @@
-This is a work-in-progress to update the Objective-C API according to the W3C
-specification. The Objective-C API located at talk/app/webrtc/objc is
-deprecated, but will remain for the time being.
diff --git a/chromium/third_party/webrtc/api/peerconnection.cc b/chromium/third_party/webrtc/api/peerconnection.cc
index b2b8062ca84..dda0eebb074 100644
--- a/chromium/third_party/webrtc/api/peerconnection.cc
+++ b/chromium/third_party/webrtc/api/peerconnection.cc
@@ -66,6 +66,9 @@ static const char kTransport[] = "transport";
// NOTE: Must be in the same order as the ServiceType enum.
static const char* kValidIceServiceTypes[] = {"stun", "stuns", "turn", "turns"};
+// The length of RTCP CNAMEs.
+static const int kRtcpCnameLength = 16;
+
// NOTE: A loop below assumes that the first value of this enum is 0 and all
// other values are incremental.
enum ServiceType {
@@ -373,10 +376,37 @@ void AddSendStreams(
}
}
+uint32_t ConvertIceTransportTypeToCandidateFilter(
+ PeerConnectionInterface::IceTransportsType type) {
+ switch (type) {
+ case PeerConnectionInterface::kNone:
+ return cricket::CF_NONE;
+ case PeerConnectionInterface::kRelay:
+ return cricket::CF_RELAY;
+ case PeerConnectionInterface::kNoHost:
+ return (cricket::CF_ALL & ~cricket::CF_HOST);
+ case PeerConnectionInterface::kAll:
+ return cricket::CF_ALL;
+ default:
+ ASSERT(false);
+ }
+ return cricket::CF_NONE;
+}
+
} // namespace
namespace webrtc {
+// Generate a RTCP CNAME when a PeerConnection is created.
+std::string GenerateRtcpCname() {
+ std::string cname;
+ if (!rtc::CreateRandomString(kRtcpCnameLength, &cname)) {
+ LOG(LS_ERROR) << "Failed to generate CNAME.";
+ RTC_DCHECK(false);
+ }
+ return cname;
+}
+
bool ExtractMediaSessionOptions(
const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options,
bool is_offer,
@@ -508,6 +538,7 @@ PeerConnection::PeerConnection(PeerConnectionFactory* factory)
ice_state_(kIceNew),
ice_connection_state_(kIceConnectionNew),
ice_gathering_state_(kIceGatheringNew),
+ rtcp_cname_(GenerateRtcpCname()),
local_streams_(StreamCollection::Create()),
remote_streams_(StreamCollection::Create()) {}
@@ -522,13 +553,20 @@ PeerConnection::~PeerConnection() {
for (const auto& receiver : receivers_) {
receiver->Stop();
}
+ // Destroy stats_ because it depends on session_.
+ stats_.reset(nullptr);
+ // Now destroy session_ before destroying other members,
+ // because its destruction fires signals (such as VoiceChannelDestroyed)
+ // which will trigger some final actions in PeerConnection...
+ session_.reset(nullptr);
+ // port_allocator_ lives on the worker thread and should be destroyed there.
+ worker_thread()->Invoke<void>([this] { port_allocator_.reset(nullptr); });
}
bool PeerConnection::Initialize(
- const cricket::MediaConfig& media_config,
const PeerConnectionInterface::RTCConfiguration& configuration,
- rtc::scoped_ptr<cricket::PortAllocator> allocator,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<cricket::PortAllocator> allocator,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) {
TRACE_EVENT0("webrtc", "PeerConnection::Initialize");
RTC_DCHECK(observer != nullptr);
@@ -539,41 +577,20 @@ bool PeerConnection::Initialize(
port_allocator_ = std::move(allocator);
- cricket::ServerAddresses stun_servers;
- std::vector<cricket::RelayServerConfig> turn_servers;
- if (!ParseIceServers(configuration.servers, &stun_servers, &turn_servers)) {
+ // The port allocator lives on the worker thread and should be initialized
+ // there.
+ if (!worker_thread()->Invoke<bool>(rtc::Bind(
+ &PeerConnection::InitializePortAllocator_w, this, configuration))) {
return false;
}
- port_allocator_->SetIceServers(stun_servers, turn_servers);
-
- // To handle both internal and externally created port allocator, we will
- // enable BUNDLE here.
- int portallocator_flags = port_allocator_->flags();
- portallocator_flags |= cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET |
- cricket::PORTALLOCATOR_ENABLE_IPV6;
- // If the disable-IPv6 flag was specified, we'll not override it
- // by experiment.
- if (configuration.disable_ipv6) {
- portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
- } else if (webrtc::field_trial::FindFullName("WebRTC-IPv6Default") ==
- "Disabled") {
- portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
- }
-
- if (configuration.tcp_candidate_policy == kTcpCandidatePolicyDisabled) {
- portallocator_flags |= cricket::PORTALLOCATOR_DISABLE_TCP;
- LOG(LS_INFO) << "TCP candidates are disabled.";
- }
- port_allocator_->set_flags(portallocator_flags);
- // No step delay is used while allocating ports.
- port_allocator_->set_step_delay(cricket::kMinimumStepDelay);
-
- media_controller_.reset(factory_->CreateMediaController(media_config));
+ media_controller_.reset(
+ factory_->CreateMediaController(configuration.media_config));
session_.reset(
- new WebRtcSession(media_controller_.get(), factory_->signaling_thread(),
- factory_->worker_thread(), port_allocator_.get()));
+ new WebRtcSession(media_controller_.get(), factory_->network_thread(),
+ factory_->worker_thread(), factory_->signaling_thread(),
+ port_allocator_.get()));
stats_.reset(new StatsCollector(this));
// Initialize the WebRtcSession. It creates transport channels etc.
@@ -628,7 +645,7 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
&PeerConnection::OnVideoTrackAdded);
observer->SignalVideoTrackRemoved.connect(
this, &PeerConnection::OnVideoTrackRemoved);
- stream_observers_.push_back(rtc::scoped_ptr<MediaStreamObserver>(observer));
+ stream_observers_.push_back(std::unique_ptr<MediaStreamObserver>(observer));
for (const auto& track : local_stream->GetAudioTracks()) {
OnAudioTrackAdded(track.get(), local_stream);
@@ -655,7 +672,7 @@ void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
stream_observers_.erase(
std::remove_if(
stream_observers_.begin(), stream_observers_.end(),
- [local_stream](const rtc::scoped_ptr<MediaStreamObserver>& observer) {
+ [local_stream](const std::unique_ptr<MediaStreamObserver>& observer) {
return observer->stream()->label().compare(local_stream->label()) ==
0;
}),
@@ -835,7 +852,7 @@ PeerConnection::CreateDataChannel(
TRACE_EVENT0("webrtc", "PeerConnection::CreateDataChannel");
bool first_datachannel = !HasDataChannels();
- rtc::scoped_ptr<InternalDataChannelInit> internal_config;
+ std::unique_ptr<InternalDataChannelInit> internal_config;
if (config) {
internal_config.reset(new InternalDataChannelInit(*config));
}
@@ -1144,18 +1161,19 @@ void PeerConnection::SetRemoteDescription(
signaling_thread()->Post(this, MSG_SET_SESSIONDESCRIPTION_SUCCESS, msg);
}
-bool PeerConnection::SetConfiguration(const RTCConfiguration& config) {
+bool PeerConnection::SetConfiguration(const RTCConfiguration& configuration) {
TRACE_EVENT0("webrtc", "PeerConnection::SetConfiguration");
if (port_allocator_) {
- cricket::ServerAddresses stun_servers;
- std::vector<cricket::RelayServerConfig> turn_servers;
- if (!ParseIceServers(config.servers, &stun_servers, &turn_servers)) {
+ if (!worker_thread()->Invoke<bool>(
+ rtc::Bind(&PeerConnection::ReconfigurePortAllocator_w, this,
+ configuration))) {
return false;
}
- port_allocator_->SetIceServers(stun_servers, turn_servers);
}
- session_->SetIceConfig(session_->ParseIceConfig(config));
- return session_->SetIceTransports(config.type);
+
+ // TODO(deadbeef): Shouldn't have to hop to the worker thread twice...
+ session_->SetIceConfig(session_->ParseIceConfig(configuration));
+ return true;
}
bool PeerConnection::AddIceCandidate(
@@ -1503,6 +1521,8 @@ bool PeerConnection::GetOptionsForOffer(
if (session_->data_channel_type() == cricket::DCT_SCTP && HasDataChannels()) {
session_options->data_channel_type = cricket::DCT_SCTP;
}
+
+ session_options->rtcp_cname = rtcp_cname_;
return true;
}
@@ -1540,6 +1560,8 @@ bool PeerConnection::GetOptionsForAnswer(
if (!ParseConstraintsForAnswer(constraints, session_options)) {
return false;
}
+ session_options->rtcp_cname = rtcp_cname_;
+
FinishOptionsForAnswer(session_options);
return true;
}
@@ -1552,6 +1574,8 @@ bool PeerConnection::GetOptionsForAnswer(
if (!ExtractMediaSessionOptions(options, false, session_options)) {
return false;
}
+ session_options->rtcp_cname = rtcp_cname_;
+
FinishOptionsForAnswer(session_options);
return true;
}
@@ -2064,4 +2088,60 @@ DataChannel* PeerConnection::FindDataChannelBySid(int sid) const {
return nullptr;
}
+bool PeerConnection::InitializePortAllocator_w(
+ const RTCConfiguration& configuration) {
+ cricket::ServerAddresses stun_servers;
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ if (!ParseIceServers(configuration.servers, &stun_servers, &turn_servers)) {
+ return false;
+ }
+
+ // To handle both internal and externally created port allocator, we will
+ // enable BUNDLE here.
+ int portallocator_flags = port_allocator_->flags();
+ portallocator_flags |= cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET |
+ cricket::PORTALLOCATOR_ENABLE_IPV6;
+ // If the disable-IPv6 flag was specified, we'll not override it
+ // by experiment.
+ if (configuration.disable_ipv6) {
+ portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
+ } else if (webrtc::field_trial::FindFullName("WebRTC-IPv6Default") ==
+ "Disabled") {
+ portallocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6);
+ }
+
+ if (configuration.tcp_candidate_policy == kTcpCandidatePolicyDisabled) {
+ portallocator_flags |= cricket::PORTALLOCATOR_DISABLE_TCP;
+ LOG(LS_INFO) << "TCP candidates are disabled.";
+ }
+
+ port_allocator_->set_flags(portallocator_flags);
+ // No step delay is used while allocating ports.
+ port_allocator_->set_step_delay(cricket::kMinimumStepDelay);
+ port_allocator_->set_candidate_filter(
+ ConvertIceTransportTypeToCandidateFilter(configuration.type));
+
+ // Call this last since it may create pooled allocator sessions using the
+ // properties set above.
+ port_allocator_->SetConfiguration(stun_servers, turn_servers,
+ configuration.ice_candidate_pool_size);
+ return true;
+}
+
+bool PeerConnection::ReconfigurePortAllocator_w(
+ const RTCConfiguration& configuration) {
+ cricket::ServerAddresses stun_servers;
+ std::vector<cricket::RelayServerConfig> turn_servers;
+ if (!ParseIceServers(configuration.servers, &stun_servers, &turn_servers)) {
+ return false;
+ }
+ port_allocator_->set_candidate_filter(
+ ConvertIceTransportTypeToCandidateFilter(configuration.type));
+ // Call this last since it may create pooled allocator sessions using the
+ // candidate filter set above.
+ port_allocator_->SetConfiguration(stun_servers, turn_servers,
+ configuration.ice_candidate_pool_size);
+ return true;
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/peerconnection.h b/chromium/third_party/webrtc/api/peerconnection.h
index d1676733d57..8ba7e583724 100644
--- a/chromium/third_party/webrtc/api/peerconnection.h
+++ b/chromium/third_party/webrtc/api/peerconnection.h
@@ -13,6 +13,7 @@
#include <string>
#include <map>
+#include <memory>
#include <vector>
#include "webrtc/api/dtlsidentitystore.h"
@@ -23,7 +24,6 @@
#include "webrtc/api/statscollector.h"
#include "webrtc/api/streamcollection.h"
#include "webrtc/api/webrtcsession.h"
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
@@ -68,10 +68,9 @@ class PeerConnection : public PeerConnectionInterface,
explicit PeerConnection(PeerConnectionFactory* factory);
bool Initialize(
- const cricket::MediaConfig& media_config,
const PeerConnectionInterface::RTCConfiguration& configuration,
- rtc::scoped_ptr<cricket::PortAllocator> allocator,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<cricket::PortAllocator> allocator,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer);
rtc::scoped_refptr<StreamCollectionInterface> local_streams() override;
@@ -131,7 +130,7 @@ class PeerConnection : public PeerConnectionInterface,
void SetRemoteDescription(SetSessionDescriptionObserver* observer,
SessionDescriptionInterface* desc) override;
bool SetConfiguration(
- const PeerConnectionInterface::RTCConfiguration& config) override;
+ const PeerConnectionInterface::RTCConfiguration& configuration) override;
bool AddIceCandidate(const IceCandidateInterface* candidate) override;
bool RemoveIceCandidates(
const std::vector<cricket::Candidate>& candidates) override;
@@ -210,6 +209,8 @@ class PeerConnection : public PeerConnectionInterface,
return factory_->signaling_thread();
}
+ rtc::Thread* worker_thread() const { return factory_->worker_thread(); }
+
void PostSetSessionDescriptionFailure(SetSessionDescriptionObserver* observer,
const std::string& error);
void PostCreateSessionDescriptionFailure(
@@ -351,6 +352,12 @@ class PeerConnection : public PeerConnectionInterface,
// or nullptr if not found.
DataChannel* FindDataChannelBySid(int sid) const;
+ // Called when first configuring the port allocator.
+ bool InitializePortAllocator_w(const RTCConfiguration& configuration);
+ // Called when SetConfiguration is called. Only a subset of the configuration
+ // is applied.
+ bool ReconfigurePortAllocator_w(const RTCConfiguration& configuration);
+
// Storing the factory as a scoped reference pointer ensures that the memory
// in the PeerConnectionFactoryImpl remains available as long as the
// PeerConnection is running. It is passed to PeerConnection as a raw pointer.
@@ -366,15 +373,19 @@ class PeerConnection : public PeerConnectionInterface,
IceConnectionState ice_connection_state_;
IceGatheringState ice_gathering_state_;
- rtc::scoped_ptr<cricket::PortAllocator> port_allocator_;
- rtc::scoped_ptr<MediaControllerInterface> media_controller_;
+ std::unique_ptr<cricket::PortAllocator> port_allocator_;
+ std::unique_ptr<MediaControllerInterface> media_controller_;
+
+ // One PeerConnection has only one RTCP CNAME.
+ // https://tools.ietf.org/html/draft-ietf-rtcweb-rtp-usage-26#section-4.9
+ std::string rtcp_cname_;
// Streams added via AddStream.
rtc::scoped_refptr<StreamCollection> local_streams_;
// Streams created as a result of SetRemoteDescription.
rtc::scoped_refptr<StreamCollection> remote_streams_;
- std::vector<rtc::scoped_ptr<MediaStreamObserver>> stream_observers_;
+ std::vector<std::unique_ptr<MediaStreamObserver>> stream_observers_;
// These lists store track info seen in local/remote descriptions.
TrackInfos remote_audio_tracks_;
@@ -393,12 +404,8 @@ class PeerConnection : public PeerConnectionInterface,
std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders_;
std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers_;
- // The session_ scoped_ptr is declared at the bottom of PeerConnection
- // because its destruction fires signals (such as VoiceChannelDestroyed)
- // which will trigger some final actions in PeerConnection...
- rtc::scoped_ptr<WebRtcSession> session_;
- // ... But stats_ depends on session_ so it should be destroyed even earlier.
- rtc::scoped_ptr<StatsCollector> stats_;
+ std::unique_ptr<WebRtcSession> session_;
+ std::unique_ptr<StatsCollector> stats_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/peerconnection_unittest.cc b/chromium/third_party/webrtc/api/peerconnection_unittest.cc
index 67b4efbc73f..24411ab7290 100644
--- a/chromium/third_party/webrtc/api/peerconnection_unittest.cc
+++ b/chromium/third_party/webrtc/api/peerconnection_unittest.cc
@@ -13,6 +13,7 @@
#include <algorithm>
#include <list>
#include <map>
+#include <memory>
#include <utility>
#include <vector>
@@ -31,15 +32,14 @@
#include "webrtc/api/test/mockpeerconnectionobservers.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/sslstreamadapter.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/virtualsocketserver.h"
#include "webrtc/media/engine/fakewebrtcvideoengine.h"
+#include "webrtc/p2p/base/fakeportallocator.h"
#include "webrtc/p2p/base/p2pconstants.h"
#include "webrtc/p2p/base/sessiondescription.h"
-#include "webrtc/p2p/client/fakeportallocator.h"
#include "webrtc/pc/mediasession.h"
#define MAYBE_SKIP_TEST(feature) \
@@ -154,11 +154,13 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
const std::string& id,
const MediaConstraintsInterface* constraints,
const PeerConnectionFactory::Options* options,
- rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store,
- bool prefer_constraint_apis) {
+ std::unique_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store,
+ bool prefer_constraint_apis,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread) {
PeerConnectionTestClient* client(new PeerConnectionTestClient(id));
if (!client->Init(constraints, options, std::move(dtls_identity_store),
- prefer_constraint_apis)) {
+ prefer_constraint_apis, network_thread, worker_thread)) {
delete client;
return nullptr;
}
@@ -168,24 +170,30 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
static PeerConnectionTestClient* CreateClient(
const std::string& id,
const MediaConstraintsInterface* constraints,
- const PeerConnectionFactory::Options* options) {
- rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ const PeerConnectionFactory::Options* options,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread) {
+ std::unique_ptr<FakeDtlsIdentityStore> dtls_identity_store(
rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
: nullptr);
return CreateClientWithDtlsIdentityStore(
- id, constraints, options, std::move(dtls_identity_store), true);
+ id, constraints, options, std::move(dtls_identity_store), true,
+ network_thread, worker_thread);
}
static PeerConnectionTestClient* CreateClientPreferNoConstraints(
const std::string& id,
- const PeerConnectionFactory::Options* options) {
- rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ const PeerConnectionFactory::Options* options,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread) {
+ std::unique_ptr<FakeDtlsIdentityStore> dtls_identity_store(
rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
: nullptr);
return CreateClientWithDtlsIdentityStore(
- id, nullptr, options, std::move(dtls_identity_store), false);
+ id, nullptr, options, std::move(dtls_identity_store), false,
+ network_thread, worker_thread);
}
~PeerConnectionTestClient() {
@@ -194,7 +202,7 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
void Negotiate() { Negotiate(true, true); }
void Negotiate(bool audio, bool video) {
- rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer));
if (offer->description()->GetContentByName("audio")) {
@@ -226,7 +234,7 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
int sdp_mline_index,
const std::string& msg) override {
LOG(INFO) << id_ << "ReceiveIceMessage";
- rtc::scoped_ptr<webrtc::IceCandidateInterface> candidate(
+ std::unique_ptr<webrtc::IceCandidateInterface> candidate(
webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, msg, nullptr));
EXPECT_TRUE(pc()->AddIceCandidate(candidate.get()));
}
@@ -544,7 +552,7 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
// Verify the CreateDtmfSender interface
void VerifyDtmf() {
- rtc::scoped_ptr<DummyDtmfObserver> observer(new DummyDtmfObserver());
+ std::unique_ptr<DummyDtmfObserver> observer(new DummyDtmfObserver());
rtc::scoped_refptr<DtmfSenderInterface> dtmf_sender;
// We can't create a DTMF sender with an invalid audio track or a non local
@@ -799,8 +807,10 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
bool Init(
const MediaConstraintsInterface* constraints,
const PeerConnectionFactory::Options* options,
- rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store,
- bool prefer_constraint_apis) {
+ std::unique_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store,
+ bool prefer_constraint_apis,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread) {
EXPECT_TRUE(!peer_connection_);
EXPECT_TRUE(!peer_connection_factory_);
if (!prefer_constraint_apis) {
@@ -808,8 +818,8 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
}
prefer_constraint_apis_ = prefer_constraint_apis;
- rtc::scoped_ptr<cricket::PortAllocator> port_allocator(
- new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+ std::unique_ptr<cricket::PortAllocator> port_allocator(
+ new cricket::FakePortAllocator(network_thread, nullptr));
fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
if (fake_audio_capture_module_ == nullptr) {
@@ -817,8 +827,9 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
}
fake_video_decoder_factory_ = new FakeWebRtcVideoDecoderFactory();
fake_video_encoder_factory_ = new FakeWebRtcVideoEncoderFactory();
+ rtc::Thread* const signaling_thread = rtc::Thread::Current();
peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
- rtc::Thread::Current(), rtc::Thread::Current(),
+ network_thread, worker_thread, signaling_thread,
fake_audio_capture_module_, fake_video_encoder_factory_,
fake_video_decoder_factory_);
if (!peer_connection_factory_) {
@@ -833,9 +844,9 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
}
rtc::scoped_refptr<webrtc::PeerConnectionInterface> CreatePeerConnection(
- rtc::scoped_ptr<cricket::PortAllocator> port_allocator,
+ std::unique_ptr<cricket::PortAllocator> port_allocator,
const MediaConstraintsInterface* constraints,
- rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
+ std::unique_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
// CreatePeerConnection with RTCConfiguration.
webrtc::PeerConnectionInterface::RTCConfiguration config;
webrtc::PeerConnectionInterface::IceServer ice_server;
@@ -853,10 +864,10 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
// If we are not sending any streams ourselves it is time to add some.
AddMediaStream(true, true);
}
- rtc::scoped_ptr<SessionDescriptionInterface> desc(
+ std::unique_ptr<SessionDescriptionInterface> desc(
webrtc::CreateSessionDescription("offer", msg, nullptr));
EXPECT_TRUE(DoSetRemoteDescription(desc.release()));
- rtc::scoped_ptr<SessionDescriptionInterface> answer;
+ std::unique_ptr<SessionDescriptionInterface> answer;
EXPECT_TRUE(DoCreateAnswer(&answer));
std::string sdp;
EXPECT_TRUE(answer->ToString(&sdp));
@@ -869,12 +880,12 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
void HandleIncomingAnswer(const std::string& msg) {
LOG(INFO) << id_ << "HandleIncomingAnswer";
- rtc::scoped_ptr<SessionDescriptionInterface> desc(
+ std::unique_ptr<SessionDescriptionInterface> desc(
webrtc::CreateSessionDescription("answer", msg, nullptr));
EXPECT_TRUE(DoSetRemoteDescription(desc.release()));
}
- bool DoCreateOfferAnswer(rtc::scoped_ptr<SessionDescriptionInterface>* desc,
+ bool DoCreateOfferAnswer(std::unique_ptr<SessionDescriptionInterface>* desc,
bool offer) {
rtc::scoped_refptr<MockCreateSessionDescriptionObserver>
observer(new rtc::RefCountedObject<
@@ -900,11 +911,11 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
return observer->result();
}
- bool DoCreateOffer(rtc::scoped_ptr<SessionDescriptionInterface>* desc) {
+ bool DoCreateOffer(std::unique_ptr<SessionDescriptionInterface>* desc) {
return DoCreateOfferAnswer(desc, true);
}
- bool DoCreateAnswer(rtc::scoped_ptr<SessionDescriptionInterface>* desc) {
+ bool DoCreateAnswer(std::unique_ptr<SessionDescriptionInterface>* desc) {
return DoCreateOfferAnswer(desc, false);
}
@@ -977,10 +988,10 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
// Needed to keep track of number of frames sent.
rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
// Needed to keep track of number of frames received.
- std::map<std::string, rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer>>
+ std::map<std::string, std::unique_ptr<webrtc::FakeVideoTrackRenderer>>
fake_video_renderers_;
// Needed to ensure frames aren't received for removed tracks.
- std::vector<rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer>>
+ std::vector<std::unique_ptr<webrtc::FakeVideoTrackRenderer>>
removed_fake_video_renderers_;
// Needed to keep track of number of frames received when external decoder
// used.
@@ -997,7 +1008,7 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
std::vector<cricket::FakeVideoCapturer*> video_capturers_;
webrtc::VideoRotation capture_rotation_ = webrtc::kVideoRotation_0;
// |local_video_renderer_| attached to the first created local video track.
- rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer> local_video_renderer_;
+ std::unique_ptr<webrtc::FakeVideoTrackRenderer> local_video_renderer_;
webrtc::FakeConstraints offer_answer_constraints_;
PeerConnectionInterface::RTCOfferAnswerOptions offer_answer_options_;
@@ -1011,15 +1022,20 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
bool remove_cvo_ = false;
rtc::scoped_refptr<DataChannelInterface> data_channel_;
- rtc::scoped_ptr<MockDataChannelObserver> data_observer_;
+ std::unique_ptr<MockDataChannelObserver> data_observer_;
};
class P2PTestConductor : public testing::Test {
public:
P2PTestConductor()
- : pss_(new rtc::PhysicalSocketServer),
+ : network_thread_(rtc::Thread::CreateWithSocketServer()),
+ worker_thread_(rtc::Thread::Create()),
+ pss_(new rtc::PhysicalSocketServer),
ss_(new rtc::VirtualSocketServer(pss_.get())),
- ss_scope_(ss_.get()) {}
+ ss_scope_(ss_.get()) {
+ RTC_CHECK(network_thread_->Start());
+ RTC_CHECK(worker_thread_->Start());
+ }
bool SessionActive() {
return initiating_client_->SessionActive() &&
@@ -1127,11 +1143,11 @@ class P2PTestConductor : public testing::Test {
bool CreateTestClientsThatPreferNoConstraints() {
initiating_client_.reset(
- PeerConnectionTestClient::CreateClientPreferNoConstraints("Caller: ",
- nullptr));
+ PeerConnectionTestClient::CreateClientPreferNoConstraints(
+ "Caller: ", nullptr, network_thread_.get(), worker_thread_.get()));
receiving_client_.reset(
- PeerConnectionTestClient::CreateClientPreferNoConstraints("Callee: ",
- nullptr));
+ PeerConnectionTestClient::CreateClientPreferNoConstraints(
+ "Callee: ", nullptr, network_thread_.get(), worker_thread_.get()));
if (!initiating_client_ || !receiving_client_) {
return false;
}
@@ -1151,9 +1167,11 @@ class P2PTestConductor : public testing::Test {
MediaConstraintsInterface* recv_constraints,
PeerConnectionFactory::Options* recv_options) {
initiating_client_.reset(PeerConnectionTestClient::CreateClient(
- "Caller: ", init_constraints, init_options));
+ "Caller: ", init_constraints, init_options, network_thread_.get(),
+ worker_thread_.get()));
receiving_client_.reset(PeerConnectionTestClient::CreateClient(
- "Callee: ", recv_constraints, recv_options));
+ "Callee: ", recv_constraints, recv_options, network_thread_.get(),
+ worker_thread_.get()));
if (!initiating_client_ || !receiving_client_) {
return false;
}
@@ -1246,7 +1264,7 @@ class P2PTestConductor : public testing::Test {
setup_constraints.AddMandatory(MediaConstraintsInterface::kEnableDtlsSrtp,
true);
- rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ std::unique_ptr<FakeDtlsIdentityStore> dtls_identity_store(
rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
: nullptr);
dtls_identity_store->use_alternate_key();
@@ -1254,7 +1272,8 @@ class P2PTestConductor : public testing::Test {
// Make sure the new client is using a different certificate.
return PeerConnectionTestClient::CreateClientWithDtlsIdentityStore(
"New Peer: ", &setup_constraints, nullptr,
- std::move(dtls_identity_store), prefer_constraint_apis_);
+ std::move(dtls_identity_store), prefer_constraint_apis_,
+ network_thread_.get(), worker_thread_.get());
}
void SendRtpData(webrtc::DataChannelInterface* dc, const std::string& data) {
@@ -1294,11 +1313,15 @@ class P2PTestConductor : public testing::Test {
}
private:
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
+ // |worker_thread_| is used by both |initiating_client_| and
+ // |receiving_client_|. Must be destroyed last.
+ std::unique_ptr<rtc::Thread> network_thread_;
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
rtc::SocketServerScope ss_scope_;
- rtc::scoped_ptr<PeerConnectionTestClient> initiating_client_;
- rtc::scoped_ptr<PeerConnectionTestClient> receiving_client_;
+ std::unique_ptr<PeerConnectionTestClient> initiating_client_;
+ std::unique_ptr<PeerConnectionTestClient> receiving_client_;
bool prefer_constraint_apis_ = true;
};
@@ -1394,7 +1417,7 @@ TEST_F(P2PTestConductor, LocalP2PTestDtlsTransferCallee) {
// Keeping the original peer around which will still send packets to the
// receiving client. These SRTP packets will be dropped.
- rtc::scoped_ptr<PeerConnectionTestClient> original_peer(
+ std::unique_ptr<PeerConnectionTestClient> original_peer(
set_initializing_client(CreateDtlsClientWithAlternateKey()));
original_peer->pc()->Close();
@@ -1432,7 +1455,7 @@ TEST_F(P2PTestConductor, LocalP2PTestDtlsTransferCaller) {
// Keeping the original peer around which will still send packets to the
// receiving client. These SRTP packets will be dropped.
- rtc::scoped_ptr<PeerConnectionTestClient> original_peer(
+ std::unique_ptr<PeerConnectionTestClient> original_peer(
set_receiving_client(CreateDtlsClientWithAlternateKey()));
original_peer->pc()->Close();
@@ -1970,6 +1993,39 @@ TEST_F(P2PTestConductor, EarlyWarmupTest) {
kMaxWaitForFramesMs);
}
+TEST_F(P2PTestConductor, ForwardVideoOnlyStream) {
+ ASSERT_TRUE(CreateTestClients());
+ // One-way stream
+ receiving_client()->set_auto_add_stream(false);
+ // Video only, audio forwarding not expected to work.
+ initializing_client()->AddMediaStream(false, true);
+ initializing_client()->Negotiate();
+
+ ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
+ VerifySessionDescriptions();
+
+ ASSERT_TRUE(initializing_client()->can_receive_video());
+ ASSERT_TRUE(receiving_client()->can_receive_video());
+
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
+ initializing_client()->ice_connection_state(),
+ kMaxWaitForFramesMs);
+ EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
+ receiving_client()->ice_connection_state(),
+ kMaxWaitForFramesMs);
+
+ ASSERT_TRUE(receiving_client()->remote_streams()->count() == 1);
+
+ // Echo the stream back.
+ receiving_client()->pc()->AddStream(
+ receiving_client()->remote_streams()->at(0));
+ receiving_client()->Negotiate();
+
+ EXPECT_TRUE_WAIT(
+ initializing_client()->VideoFramesReceivedCheck(kEndVideoFrameCount),
+ kMaxWaitForFramesMs);
+}
+
class IceServerParsingTest : public testing::Test {
public:
// Convenience for parsing a single URL.
diff --git a/chromium/third_party/webrtc/api/peerconnection_unittests.isolate b/chromium/third_party/webrtc/api/peerconnection_unittests.isolate
index bc7db9084de..ad03ccf5f2f 100644
--- a/chromium/third_party/webrtc/api/peerconnection_unittests.isolate
+++ b/chromium/third_party/webrtc/api/peerconnection_unittests.isolate
@@ -1,28 +1,10 @@
+# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
-# libjingle
-# Copyright 2013 Google Inc.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are met:
-#
-# 1. Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# 2. Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-# 3. The name of the author may not be used to endorse or promote products
-# derived from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
-# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
{
'conditions': [
diff --git a/chromium/third_party/webrtc/api/peerconnection_unittests_apk.isolate b/chromium/third_party/webrtc/api/peerconnection_unittests_apk.isolate
new file mode 100644
index 00000000000..3b895e88e4e
--- /dev/null
+++ b/chromium/third_party/webrtc/api/peerconnection_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'peerconnection_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_peerconnection_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/peerconnection_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_peerconnection_unittests',
+ 'peerconnection_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/api/peerconnectionendtoend_unittest.cc b/chromium/third_party/webrtc/api/peerconnectionendtoend_unittest.cc
index be662886d3f..4ca73babdb7 100644
--- a/chromium/third_party/webrtc/api/peerconnectionendtoend_unittest.cc
+++ b/chromium/third_party/webrtc/api/peerconnectionendtoend_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/api/test/peerconnectiontestwrapper.h"
// Notice that mockpeerconnectionobservers.h must be included after the above!
#include "webrtc/api/test/mockpeerconnectionobservers.h"
@@ -17,6 +19,7 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/thread.h"
#include "webrtc/base/sslstreamadapter.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/base/stringutils.h"
@@ -35,7 +38,7 @@ using webrtc::PeerConnectionInterface;
namespace {
-const size_t kMaxWait = 10000;
+const int kMaxWait = 10000;
} // namespace
@@ -46,11 +49,13 @@ class PeerConnectionEndToEndTest
typedef std::vector<rtc::scoped_refptr<DataChannelInterface> >
DataChannelList;
- PeerConnectionEndToEndTest()
- : caller_(new rtc::RefCountedObject<PeerConnectionTestWrapper>(
- "caller")),
- callee_(new rtc::RefCountedObject<PeerConnectionTestWrapper>(
- "callee")) {
+ PeerConnectionEndToEndTest() {
+ RTC_CHECK(network_thread_.Start());
+ RTC_CHECK(worker_thread_.Start());
+ caller_ = new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ "caller", &network_thread_, &worker_thread_);
+ callee_ = new rtc::RefCountedObject<PeerConnectionTestWrapper>(
+ "callee", &network_thread_, &worker_thread_);
#ifdef WEBRTC_ANDROID
webrtc::InitializeAndroidObjects();
#endif
@@ -110,10 +115,10 @@ class PeerConnectionEndToEndTest
// Tests that |dc1| and |dc2| can send to and receive from each other.
void TestDataChannelSendAndReceive(
DataChannelInterface* dc1, DataChannelInterface* dc2) {
- rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc1_observer(
+ std::unique_ptr<webrtc::MockDataChannelObserver> dc1_observer(
new webrtc::MockDataChannelObserver(dc1));
- rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc2_observer(
+ std::unique_ptr<webrtc::MockDataChannelObserver> dc2_observer(
new webrtc::MockDataChannelObserver(dc2));
static const std::string kDummyData = "abcdefg";
@@ -151,6 +156,8 @@ class PeerConnectionEndToEndTest
}
protected:
+ rtc::Thread network_thread_;
+ rtc::Thread worker_thread_;
rtc::scoped_refptr<PeerConnectionTestWrapper> caller_;
rtc::scoped_refptr<PeerConnectionTestWrapper> callee_;
DataChannelList caller_signaled_data_channels_;
@@ -291,10 +298,10 @@ TEST_F(PeerConnectionEndToEndTest,
WaitForDataChannelsToOpen(caller_dc_1, callee_signaled_data_channels_, 0);
WaitForDataChannelsToOpen(caller_dc_2, callee_signaled_data_channels_, 1);
- rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc_1_observer(
+ std::unique_ptr<webrtc::MockDataChannelObserver> dc_1_observer(
new webrtc::MockDataChannelObserver(callee_signaled_data_channels_[0]));
- rtc::scoped_ptr<webrtc::MockDataChannelObserver> dc_2_observer(
+ std::unique_ptr<webrtc::MockDataChannelObserver> dc_2_observer(
new webrtc::MockDataChannelObserver(callee_signaled_data_channels_[1]));
const std::string message_1 = "hello 1";
diff --git a/chromium/third_party/webrtc/api/peerconnectionfactory.cc b/chromium/third_party/webrtc/api/peerconnectionfactory.cc
index 852b7a8cce8..9cb5b46785c 100644
--- a/chromium/third_party/webrtc/api/peerconnectionfactory.cc
+++ b/chromium/third_party/webrtc/api/peerconnectionfactory.cc
@@ -74,19 +74,17 @@ CreatePeerConnectionFactory() {
pc_factory);
}
-rtc::scoped_refptr<PeerConnectionFactoryInterface>
-CreatePeerConnectionFactory(
+rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
+ rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
AudioDeviceModule* default_adm,
cricket::WebRtcVideoEncoderFactory* encoder_factory,
cricket::WebRtcVideoDecoderFactory* decoder_factory) {
rtc::scoped_refptr<PeerConnectionFactory> pc_factory(
- new rtc::RefCountedObject<PeerConnectionFactory>(worker_thread,
- signaling_thread,
- default_adm,
- encoder_factory,
- decoder_factory));
+ new rtc::RefCountedObject<PeerConnectionFactory>(
+ network_thread, worker_thread, signaling_thread, default_adm,
+ encoder_factory, decoder_factory));
// Call Initialize synchronously but make sure its executed on
// |signaling_thread|.
@@ -104,16 +102,19 @@ CreatePeerConnectionFactory(
PeerConnectionFactory::PeerConnectionFactory()
: owns_ptrs_(true),
wraps_current_thread_(false),
- signaling_thread_(rtc::ThreadManager::Instance()->CurrentThread()),
- worker_thread_(new rtc::Thread) {
+ network_thread_(rtc::Thread::CreateWithSocketServer().release()),
+ worker_thread_(rtc::Thread::Create().release()),
+ signaling_thread_(rtc::Thread::Current()) {
if (!signaling_thread_) {
signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread();
wraps_current_thread_ = true;
}
+ network_thread_->Start();
worker_thread_->Start();
}
PeerConnectionFactory::PeerConnectionFactory(
+ rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
AudioDeviceModule* default_adm,
@@ -121,13 +122,15 @@ PeerConnectionFactory::PeerConnectionFactory(
cricket::WebRtcVideoDecoderFactory* video_decoder_factory)
: owns_ptrs_(false),
wraps_current_thread_(false),
- signaling_thread_(signaling_thread),
+ network_thread_(network_thread),
worker_thread_(worker_thread),
+ signaling_thread_(signaling_thread),
default_adm_(default_adm),
video_encoder_factory_(video_encoder_factory),
video_decoder_factory_(video_decoder_factory) {
- ASSERT(worker_thread != NULL);
- ASSERT(signaling_thread != NULL);
+ RTC_DCHECK(network_thread);
+ RTC_DCHECK(worker_thread);
+ RTC_DCHECK(signaling_thread);
// TODO: Currently there is no way creating an external adm in
// libjingle source tree. So we can 't currently assert if this is NULL.
// ASSERT(default_adm != NULL);
@@ -148,12 +151,13 @@ PeerConnectionFactory::~PeerConnectionFactory() {
if (wraps_current_thread_)
rtc::ThreadManager::Instance()->UnwrapCurrentThread();
delete worker_thread_;
+ delete network_thread_;
}
}
bool PeerConnectionFactory::Initialize() {
RTC_DCHECK(signaling_thread_->IsCurrent());
- rtc::InitRandom(rtc::Time());
+ rtc::InitRandom(rtc::Time32());
default_network_manager_.reset(new rtc::BasicNetworkManager());
if (!default_network_manager_) {
@@ -161,7 +165,7 @@ bool PeerConnectionFactory::Initialize() {
}
default_socket_factory_.reset(
- new rtc::BasicPacketSocketFactory(worker_thread_));
+ new rtc::BasicPacketSocketFactory(network_thread_));
if (!default_socket_factory_) {
return false;
}
@@ -172,16 +176,16 @@ bool PeerConnectionFactory::Initialize() {
worker_thread_->Invoke<cricket::MediaEngineInterface*>(rtc::Bind(
&PeerConnectionFactory::CreateMediaEngine_w, this));
- channel_manager_.reset(
- new cricket::ChannelManager(media_engine, worker_thread_));
+ channel_manager_.reset(new cricket::ChannelManager(
+ media_engine, worker_thread_, network_thread_));
channel_manager_->SetVideoRtxEnabled(true);
if (!channel_manager_->Init()) {
return false;
}
- dtls_identity_store_ = new RefCountedDtlsIdentityStore(
- signaling_thread_, worker_thread_);
+ dtls_identity_store_ =
+ new RefCountedDtlsIdentityStore(signaling_thread_, network_thread_);
return true;
}
@@ -211,7 +215,8 @@ PeerConnectionFactory::CreateVideoSource(
rtc::scoped_refptr<VideoTrackSourceInterface> source(
VideoCapturerTrackSource::Create(worker_thread_, capturer, constraints,
false));
- return VideoTrackSourceProxy::Create(signaling_thread_, source);
+ return VideoTrackSourceProxy::Create(signaling_thread_, worker_thread_,
+ source);
}
rtc::scoped_refptr<VideoTrackSourceInterface>
@@ -219,7 +224,8 @@ PeerConnectionFactory::CreateVideoSource(cricket::VideoCapturer* capturer) {
RTC_DCHECK(signaling_thread_->IsCurrent());
rtc::scoped_refptr<VideoTrackSourceInterface> source(
VideoCapturerTrackSource::Create(worker_thread_, capturer, false));
- return VideoTrackSourceProxy::Create(signaling_thread_, source);
+ return VideoTrackSourceProxy::Create(signaling_thread_, worker_thread_,
+ source);
}
bool PeerConnectionFactory::StartAecDump(rtc::PlatformFile file,
@@ -233,9 +239,10 @@ void PeerConnectionFactory::StopAecDump() {
channel_manager_->StopAecDump();
}
-bool PeerConnectionFactory::StartRtcEventLog(rtc::PlatformFile file) {
+bool PeerConnectionFactory::StartRtcEventLog(rtc::PlatformFile file,
+ int64_t max_size_bytes) {
RTC_DCHECK(signaling_thread_->IsCurrent());
- return channel_manager_->StartRtcEventLog(file);
+ return channel_manager_->StartRtcEventLog(file, max_size_bytes);
}
void PeerConnectionFactory::StopRtcEventLog() {
@@ -247,8 +254,8 @@ rtc::scoped_refptr<PeerConnectionInterface>
PeerConnectionFactory::CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration_in,
const MediaConstraintsInterface* constraints,
- rtc::scoped_ptr<cricket::PortAllocator> allocator,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<cricket::PortAllocator> allocator,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) {
RTC_DCHECK(signaling_thread_->IsCurrent());
@@ -263,8 +270,8 @@ PeerConnectionFactory::CreatePeerConnection(
rtc::scoped_refptr<PeerConnectionInterface>
PeerConnectionFactory::CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
- rtc::scoped_ptr<cricket::PortAllocator> allocator,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<cricket::PortAllocator> allocator,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) {
RTC_DCHECK(signaling_thread_->IsCurrent());
@@ -280,28 +287,14 @@ PeerConnectionFactory::CreatePeerConnection(
allocator.reset(new cricket::BasicPortAllocator(
default_network_manager_.get(), default_socket_factory_.get()));
}
- allocator->SetNetworkIgnoreMask(options_.network_ignore_mask);
+ worker_thread_->Invoke<void>(
+ rtc::Bind(&cricket::PortAllocator::SetNetworkIgnoreMask, allocator.get(),
+ options_.network_ignore_mask));
rtc::scoped_refptr<PeerConnection> pc(
new rtc::RefCountedObject<PeerConnection>(this));
- // We rely on default values when constraints aren't found.
- cricket::MediaConfig media_config;
- media_config.video.disable_prerenderer_smoothing =
- configuration.disable_prerenderer_smoothing;
- if (configuration.enable_dscp) {
- media_config.enable_dscp = *(configuration.enable_dscp);
- }
- if (configuration.cpu_overuse_detection) {
- media_config.video.enable_cpu_overuse_detection =
- *(configuration.cpu_overuse_detection);
- }
- if (configuration.suspend_below_min_bitrate) {
- media_config.video.suspend_below_min_bitrate =
- *(configuration.suspend_below_min_bitrate);
- }
-
- if (!pc->Initialize(media_config, configuration, std::move(allocator),
+ if (!pc->Initialize(configuration, std::move(allocator),
std::move(dtls_identity_store), observer)) {
return nullptr;
}
@@ -321,7 +314,7 @@ rtc::scoped_refptr<VideoTrackInterface> PeerConnectionFactory::CreateVideoTrack(
RTC_DCHECK(signaling_thread_->IsCurrent());
rtc::scoped_refptr<VideoTrackInterface> track(
VideoTrack::Create(id, source));
- return VideoTrackProxy::Create(signaling_thread_, track);
+ return VideoTrackProxy::Create(signaling_thread_, worker_thread_, track);
}
rtc::scoped_refptr<AudioTrackInterface>
@@ -350,6 +343,10 @@ rtc::Thread* PeerConnectionFactory::worker_thread() {
return worker_thread_;
}
+rtc::Thread* PeerConnectionFactory::network_thread() {
+ return network_thread_;
+}
+
cricket::MediaEngineInterface* PeerConnectionFactory::CreateMediaEngine_w() {
ASSERT(worker_thread_ == rtc::Thread::Current());
return cricket::WebRtcMediaEngineFactory::Create(
diff --git a/chromium/third_party/webrtc/api/peerconnectionfactory.h b/chromium/third_party/webrtc/api/peerconnectionfactory.h
index b47f75a7ce9..21165cf3d28 100644
--- a/chromium/third_party/webrtc/api/peerconnectionfactory.h
+++ b/chromium/third_party/webrtc/api/peerconnectionfactory.h
@@ -11,13 +11,13 @@
#ifndef WEBRTC_API_PEERCONNECTIONFACTORY_H_
#define WEBRTC_API_PEERCONNECTIONFACTORY_H_
+#include <memory>
#include <string>
#include "webrtc/api/dtlsidentitystore.h"
#include "webrtc/api/mediacontroller.h"
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/peerconnectioninterface.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/thread.h"
#include "webrtc/pc/channelmanager.h"
@@ -34,7 +34,7 @@ typedef rtc::RefCountedObject<DtlsIdentityStoreImpl>
class PeerConnectionFactory : public PeerConnectionFactoryInterface {
public:
- virtual void SetOptions(const Options& options) {
+ void SetOptions(const Options& options) override {
options_ = options;
}
@@ -42,14 +42,14 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
- rtc::scoped_ptr<cricket::PortAllocator> allocator,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<cricket::PortAllocator> allocator,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) override;
virtual rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
- rtc::scoped_ptr<cricket::PortAllocator> allocator,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<cricket::PortAllocator> allocator,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) override;
bool Initialize();
@@ -83,18 +83,24 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) override;
void StopAecDump() override;
- bool StartRtcEventLog(rtc::PlatformFile file) override;
+ bool StartRtcEventLog(rtc::PlatformFile file) override {
+ return StartRtcEventLog(file, -1);
+ }
+ bool StartRtcEventLog(rtc::PlatformFile file,
+ int64_t max_size_bytes) override;
void StopRtcEventLog() override;
virtual webrtc::MediaControllerInterface* CreateMediaController(
const cricket::MediaConfig& config) const;
virtual rtc::Thread* signaling_thread();
virtual rtc::Thread* worker_thread();
+ virtual rtc::Thread* network_thread();
const Options& options() const { return options_; }
protected:
PeerConnectionFactory();
PeerConnectionFactory(
+ rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
AudioDeviceModule* default_adm,
@@ -107,22 +113,21 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface {
bool owns_ptrs_;
bool wraps_current_thread_;
- rtc::Thread* signaling_thread_;
+ rtc::Thread* network_thread_;
rtc::Thread* worker_thread_;
+ rtc::Thread* signaling_thread_;
Options options_;
// External Audio device used for audio playback.
rtc::scoped_refptr<AudioDeviceModule> default_adm_;
- rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+ std::unique_ptr<cricket::ChannelManager> channel_manager_;
// External Video encoder factory. This can be NULL if the client has not
// injected any. In that case, video engine will use the internal SW encoder.
- rtc::scoped_ptr<cricket::WebRtcVideoEncoderFactory>
- video_encoder_factory_;
+ std::unique_ptr<cricket::WebRtcVideoEncoderFactory> video_encoder_factory_;
// External Video decoder factory. This can be NULL if the client has not
// injected any. In that case, video engine will use the internal SW decoder.
- rtc::scoped_ptr<cricket::WebRtcVideoDecoderFactory>
- video_decoder_factory_;
- rtc::scoped_ptr<rtc::BasicNetworkManager> default_network_manager_;
- rtc::scoped_ptr<rtc::BasicPacketSocketFactory> default_socket_factory_;
+ std::unique_ptr<cricket::WebRtcVideoDecoderFactory> video_decoder_factory_;
+ std::unique_ptr<rtc::BasicNetworkManager> default_network_manager_;
+ std::unique_ptr<rtc::BasicPacketSocketFactory> default_socket_factory_;
rtc::scoped_refptr<RefCountedDtlsIdentityStore> dtls_identity_store_;
};
diff --git a/chromium/third_party/webrtc/api/peerconnectionfactory_unittest.cc b/chromium/third_party/webrtc/api/peerconnectionfactory_unittest.cc
index 254e4e0a025..de21e806d55 100644
--- a/chromium/third_party/webrtc/api/peerconnectionfactory_unittest.cc
+++ b/chromium/third_party/webrtc/api/peerconnectionfactory_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include <utility>
@@ -19,12 +20,11 @@
#include "webrtc/api/test/fakedtlsidentitystore.h"
#include "webrtc/api/test/fakevideotrackrenderer.h"
#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread.h"
#include "webrtc/media/base/fakevideocapturer.h"
#include "webrtc/media/engine/webrtccommon.h"
#include "webrtc/media/engine/webrtcvoe.h"
-#include "webrtc/p2p/client/fakeportallocator.h"
+#include "webrtc/p2p/base/fakeportallocator.h"
using webrtc::DataChannelInterface;
using webrtc::DtlsIdentityStoreInterface;
@@ -64,6 +64,7 @@ static const char kTurnIceServerWithIPv6Address[] =
class NullPeerConnectionObserver : public PeerConnectionObserver {
public:
+ virtual ~NullPeerConnectionObserver() = default;
virtual void OnMessage(const std::string& msg) {}
virtual void OnSignalingMessage(const std::string& msg) {}
virtual void OnSignalingChange(
@@ -86,11 +87,9 @@ class PeerConnectionFactoryTest : public testing::Test {
#ifdef WEBRTC_ANDROID
webrtc::InitializeAndroidObjects();
#endif
- factory_ = webrtc::CreatePeerConnectionFactory(rtc::Thread::Current(),
- rtc::Thread::Current(),
- NULL,
- NULL,
- NULL);
+ factory_ = webrtc::CreatePeerConnectionFactory(
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ nullptr, nullptr, nullptr);
ASSERT_TRUE(factory_.get() != NULL);
port_allocator_.reset(
@@ -122,7 +121,7 @@ class PeerConnectionFactoryTest : public testing::Test {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory_;
NullPeerConnectionObserver observer_;
- rtc::scoped_ptr<cricket::FakePortAllocator> port_allocator_;
+ std::unique_ptr<cricket::FakePortAllocator> port_allocator_;
// Since the PC owns the port allocator after it's been initialized,
// this should only be used when known to be safe.
cricket::FakePortAllocator* raw_port_allocator_;
@@ -141,7 +140,7 @@ TEST(PeerConnectionFactoryTestInternal, CreatePCUsingInternalModules) {
NullPeerConnectionObserver observer;
webrtc::PeerConnectionInterface::RTCConfiguration config;
- rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ std::unique_ptr<FakeDtlsIdentityStore> dtls_identity_store(
new FakeDtlsIdentityStore());
rtc::scoped_refptr<PeerConnectionInterface> pc(factory->CreatePeerConnection(
config, nullptr, nullptr, std::move(dtls_identity_store), &observer));
@@ -162,7 +161,7 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) {
ice_server.uri = kTurnIceServerWithTransport;
ice_server.password = kTurnPassword;
config.servers.push_back(ice_server);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
config, nullptr, std::move(port_allocator_),
@@ -192,7 +191,7 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersUrls) {
ice_server.urls.push_back(kTurnIceServerWithTransport);
ice_server.password = kTurnPassword;
config.servers.push_back(ice_server);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
config, nullptr, std::move(port_allocator_),
@@ -221,7 +220,7 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingNoUsernameInUri) {
ice_server.username = kTurnUsername;
ice_server.password = kTurnPassword;
config.servers.push_back(ice_server);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
config, nullptr, std::move(port_allocator_),
@@ -242,7 +241,7 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingTurnUrlWithTransportParam) {
ice_server.uri = kTurnIceServerWithTransport;
ice_server.password = kTurnPassword;
config.servers.push_back(ice_server);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
config, nullptr, std::move(port_allocator_),
@@ -267,7 +266,7 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) {
ice_server.uri = kSecureTurnIceServerWithoutTransportAndPortParam;
ice_server.password = kTurnPassword;
config.servers.push_back(ice_server);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
config, nullptr, std::move(port_allocator_),
@@ -302,7 +301,7 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) {
ice_server.uri = kTurnIceServerWithIPv6Address;
ice_server.password = kTurnPassword;
config.servers.push_back(ice_server);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store(
new FakeDtlsIdentityStore());
rtc::scoped_refptr<PeerConnectionInterface> pc(factory_->CreatePeerConnection(
config, nullptr, std::move(port_allocator_),
diff --git a/chromium/third_party/webrtc/api/peerconnectionfactoryproxy.h b/chromium/third_party/webrtc/api/peerconnectionfactoryproxy.h
index 829bf8177e1..c357de9b32b 100644
--- a/chromium/third_party/webrtc/api/peerconnectionfactoryproxy.h
+++ b/chromium/third_party/webrtc/api/peerconnectionfactoryproxy.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_API_PEERCONNECTIONFACTORYPROXY_H_
#define WEBRTC_API_PEERCONNECTIONFACTORYPROXY_H_
+#include <memory>
#include <string>
#include <utility>
@@ -20,28 +21,30 @@
namespace webrtc {
-BEGIN_PROXY_MAP(PeerConnectionFactory)
+BEGIN_SIGNALING_PROXY_MAP(PeerConnectionFactory)
PROXY_METHOD1(void, SetOptions, const Options&)
- // Can't use PROXY_METHOD5 because scoped_ptr must be moved.
- // TODO(tommi,hbos): Use of templates to support scoped_ptr?
+ // Can't use PROXY_METHOD5 because unique_ptr must be moved.
+ // TODO(tommi,hbos): Use of templates to support unique_ptr?
rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& a1,
const MediaConstraintsInterface* a2,
- rtc::scoped_ptr<cricket::PortAllocator> a3,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> a4,
+ std::unique_ptr<cricket::PortAllocator> a3,
+ std::unique_ptr<DtlsIdentityStoreInterface> a4,
PeerConnectionObserver* a5) override {
- return owner_thread_->Invoke<rtc::scoped_refptr<PeerConnectionInterface>>(
- rtc::Bind(&PeerConnectionFactoryProxy::CreatePeerConnection_ot, this,
- a1, a2, a3.release(), a4.release(), a5));
+ return signaling_thread_
+ ->Invoke<rtc::scoped_refptr<PeerConnectionInterface>>(
+ rtc::Bind(&PeerConnectionFactoryProxy::CreatePeerConnection_ot,
+ this, a1, a2, a3.release(), a4.release(), a5));
}
rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& a1,
- rtc::scoped_ptr<cricket::PortAllocator> a3,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> a4,
+ std::unique_ptr<cricket::PortAllocator> a3,
+ std::unique_ptr<DtlsIdentityStoreInterface> a4,
PeerConnectionObserver* a5) override {
- return owner_thread_->Invoke<rtc::scoped_refptr<PeerConnectionInterface>>(
- rtc::Bind(&PeerConnectionFactoryProxy::CreatePeerConnection_ot, this,
- a1, a3.release(), a4.release(), a5));
+ return signaling_thread_
+ ->Invoke<rtc::scoped_refptr<PeerConnectionInterface>>(
+ rtc::Bind(&PeerConnectionFactoryProxy::CreatePeerConnection_ot,
+ this, a1, a3.release(), a4.release(), a5));
}
PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>,
CreateLocalMediaStream, const std::string&)
@@ -66,6 +69,7 @@ BEGIN_PROXY_MAP(PeerConnectionFactory)
PROXY_METHOD2(bool, StartAecDump, rtc::PlatformFile, int64_t)
PROXY_METHOD0(void, StopAecDump)
PROXY_METHOD1(bool, StartRtcEventLog, rtc::PlatformFile)
+ PROXY_METHOD2(bool, StartRtcEventLog, rtc::PlatformFile, int64_t)
PROXY_METHOD0(void, StopRtcEventLog)
private:
@@ -75,8 +79,8 @@ BEGIN_PROXY_MAP(PeerConnectionFactory)
cricket::PortAllocator* a3,
DtlsIdentityStoreInterface* a4,
PeerConnectionObserver* a5) {
- rtc::scoped_ptr<cricket::PortAllocator> ptr_a3(a3);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> ptr_a4(a4);
+ std::unique_ptr<cricket::PortAllocator> ptr_a3(a3);
+ std::unique_ptr<DtlsIdentityStoreInterface> ptr_a4(a4);
return c_->CreatePeerConnection(a1, a2, std::move(ptr_a3),
std::move(ptr_a4), a5);
}
@@ -86,12 +90,12 @@ BEGIN_PROXY_MAP(PeerConnectionFactory)
cricket::PortAllocator* a3,
DtlsIdentityStoreInterface* a4,
PeerConnectionObserver* a5) {
- rtc::scoped_ptr<cricket::PortAllocator> ptr_a3(a3);
- rtc::scoped_ptr<DtlsIdentityStoreInterface> ptr_a4(a4);
+ std::unique_ptr<cricket::PortAllocator> ptr_a3(a3);
+ std::unique_ptr<DtlsIdentityStoreInterface> ptr_a4(a4);
return c_->CreatePeerConnection(a1, std::move(ptr_a3), std::move(ptr_a4),
a5);
}
- END_PROXY()
+ END_SIGNALING_PROXY()
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/peerconnectioninterface.h b/chromium/third_party/webrtc/api/peerconnectioninterface.h
index 9259275b86a..4fa9bf2408d 100644
--- a/chromium/third_party/webrtc/api/peerconnectioninterface.h
+++ b/chromium/third_party/webrtc/api/peerconnectioninterface.h
@@ -51,13 +51,13 @@
#ifndef WEBRTC_API_PEERCONNECTIONINTERFACE_H_
#define WEBRTC_API_PEERCONNECTIONINTERFACE_H_
+#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "webrtc/api/datachannelinterface.h"
#include "webrtc/api/dtlsidentitystore.h"
-#include "webrtc/api/dtlsidentitystore.h"
#include "webrtc/api/dtmfsenderinterface.h"
#include "webrtc/api/jsep.h"
#include "webrtc/api/mediastreaminterface.h"
@@ -70,6 +70,7 @@
#include "webrtc/base/rtccertificate.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/sslstreamadapter.h"
+#include "webrtc/media/base/mediachannel.h"
#include "webrtc/p2p/base/portallocator.h"
namespace rtc {
@@ -222,6 +223,11 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
};
// TODO(hbos): Change into class with private data and public getters.
+ // TODO(nisse): In particular, accessing fields directly from an
+ // application is brittle, since the organization mirrors the
+ // organization of the implementation, which isn't stable. So we
+ // need getters and setters at least for fields which applications
+ // are interested in.
struct RTCConfiguration {
// This struct is subject to reorganization, both for naming
// consistency, and to group settings to match where they are used
@@ -229,28 +235,33 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
// methods for all settings which are of interest to applications,
// Chrome in particular.
- bool dscp() { return enable_dscp.value_or(false); }
- void set_dscp(bool enable) { enable_dscp = rtc::Optional<bool>(enable); }
+ bool dscp() { return media_config.enable_dscp; }
+ void set_dscp(bool enable) { media_config.enable_dscp = enable; }
// TODO(nisse): The corresponding flag in MediaConfig and
// elsewhere should be renamed enable_cpu_adaptation.
- bool cpu_adaptation() { return cpu_overuse_detection.value_or(true); }
+ bool cpu_adaptation() {
+ return media_config.video.enable_cpu_overuse_detection;
+ }
void set_cpu_adaptation(bool enable) {
- cpu_overuse_detection = rtc::Optional<bool>(enable);
+ media_config.video.enable_cpu_overuse_detection = enable;
}
- // TODO(nisse): Currently no getter method, since it collides with
- // the flag itself. Add when the flag is moved to MediaConfig.
+ bool suspend_below_min_bitrate() {
+ return media_config.video.suspend_below_min_bitrate;
+ }
void set_suspend_below_min_bitrate(bool enable) {
- suspend_below_min_bitrate = rtc::Optional<bool>(enable);
+ media_config.video.suspend_below_min_bitrate = enable;
}
// TODO(nisse): The negation in the corresponding MediaConfig
// attribute is inconsistent, and it should be renamed at some
// point.
- bool prerenderer_smoothing() { return !disable_prerenderer_smoothing; }
+ bool prerenderer_smoothing() {
+ return !media_config.video.disable_prerenderer_smoothing;
+ }
void set_prerenderer_smoothing(bool enable) {
- disable_prerenderer_smoothing = !enable;
+ media_config.video.disable_prerenderer_smoothing = !enable;
}
static const int kUndefined = -1;
@@ -258,46 +269,30 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
static const int kAudioJitterBufferMaxPackets = 50;
// TODO(pthatcher): Rename this ice_transport_type, but update
// Chromium at the same time.
- IceTransportsType type;
+ IceTransportsType type = kAll;
// TODO(pthatcher): Rename this ice_servers, but update Chromium
// at the same time.
IceServers servers;
- BundlePolicy bundle_policy;
- RtcpMuxPolicy rtcp_mux_policy;
- TcpCandidatePolicy tcp_candidate_policy;
- int audio_jitter_buffer_max_packets;
- bool audio_jitter_buffer_fast_accelerate;
- int ice_connection_receiving_timeout; // ms
- int ice_backup_candidate_pair_ping_interval; // ms
- ContinualGatheringPolicy continual_gathering_policy;
+ BundlePolicy bundle_policy = kBundlePolicyBalanced;
+ RtcpMuxPolicy rtcp_mux_policy = kRtcpMuxPolicyNegotiate;
+ TcpCandidatePolicy tcp_candidate_policy = kTcpCandidatePolicyEnabled;
+ int audio_jitter_buffer_max_packets = kAudioJitterBufferMaxPackets;
+ bool audio_jitter_buffer_fast_accelerate = false;
+ int ice_connection_receiving_timeout = kUndefined; // ms
+ int ice_backup_candidate_pair_ping_interval = kUndefined; // ms
+ ContinualGatheringPolicy continual_gathering_policy = GATHER_ONCE;
std::vector<rtc::scoped_refptr<rtc::RTCCertificate>> certificates;
- bool disable_prerenderer_smoothing;
- bool prioritize_most_likely_ice_candidate_pairs;
+ bool prioritize_most_likely_ice_candidate_pairs = false;
+ struct cricket::MediaConfig media_config;
// Flags corresponding to values set by constraint flags.
// rtc::Optional flags can be "missing", in which case the webrtc
// default applies.
- bool disable_ipv6;
- rtc::Optional<bool> enable_dscp;
- bool enable_rtp_data_channel;
- rtc::Optional<bool> cpu_overuse_detection;
- rtc::Optional<bool> suspend_below_min_bitrate;
+ bool disable_ipv6 = false;
+ bool enable_rtp_data_channel = false;
rtc::Optional<int> screencast_min_bitrate;
rtc::Optional<bool> combined_audio_video_bwe;
rtc::Optional<bool> enable_dtls_srtp;
- RTCConfiguration()
- : type(kAll),
- bundle_policy(kBundlePolicyBalanced),
- rtcp_mux_policy(kRtcpMuxPolicyNegotiate),
- tcp_candidate_policy(kTcpCandidatePolicyEnabled),
- audio_jitter_buffer_max_packets(kAudioJitterBufferMaxPackets),
- audio_jitter_buffer_fast_accelerate(false),
- ice_connection_receiving_timeout(kUndefined),
- ice_backup_candidate_pair_ping_interval(kUndefined),
- continual_gathering_policy(GATHER_ONCE),
- disable_prerenderer_smoothing(false),
- prioritize_most_likely_ice_candidate_pairs(false),
- disable_ipv6(false),
- enable_rtp_data_channel(false) {}
+ int ice_candidate_pool_size = 0;
};
struct RTCOfferAnswerOptions {
@@ -584,14 +579,14 @@ class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
virtual rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
- rtc::scoped_ptr<cricket::PortAllocator> allocator,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<cricket::PortAllocator> allocator,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) = 0;
virtual rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
- rtc::scoped_ptr<cricket::PortAllocator> allocator,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<cricket::PortAllocator> allocator,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
PeerConnectionObserver* observer) = 0;
virtual rtc::scoped_refptr<MediaStreamInterface>
@@ -644,13 +639,18 @@ class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
// passes it on to VoiceEngine, which will take the ownership. If the
// operation fails the file will be closed. The logging will stop
// automatically after 10 minutes have passed, or when the StopRtcEventLog
- // function is called.
+ // function is called. A maximum filesize in bytes can be set, the logging
+ // will be stopped before exceeding this limit. If max_size_bytes is set to a
+ // value <= 0, no limit will be used.
// This function as well as the StopRtcEventLog don't really belong on this
// interface, this is a temporary solution until we move the logging object
// from inside voice engine to webrtc::Call, which will happen when the VoE
// restructuring effort is further along.
// TODO(ivoc): Move this into being:
// PeerConnection => MediaController => webrtc::Call.
+ virtual bool StartRtcEventLog(rtc::PlatformFile file,
+ int64_t max_size_bytes) = 0;
+ // Deprecated, use the version above.
virtual bool StartRtcEventLog(rtc::PlatformFile file) = 0;
// Stops logging the RtcEventLog.
@@ -678,19 +678,33 @@ CreatePeerConnectionFactory();
// Create a new instance of PeerConnectionFactoryInterface.
//
-// |worker_thread| and |signaling_thread| are the only mandatory
-// parameters.
+// |network_thread|, |worker_thread| and |signaling_thread| are
+// the only mandatory parameters.
//
// If non-null, ownership of |default_adm|, |encoder_factory| and
// |decoder_factory| are transferred to the returned factory.
-rtc::scoped_refptr<PeerConnectionFactoryInterface>
-CreatePeerConnectionFactory(
+rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
+ rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
AudioDeviceModule* default_adm,
cricket::WebRtcVideoEncoderFactory* encoder_factory,
cricket::WebRtcVideoDecoderFactory* decoder_factory);
+// Create a new instance of PeerConnectionFactoryInterface.
+// Same thread is used as worker and network thread.
+inline rtc::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory(
+ rtc::Thread* worker_and_network_thread,
+ rtc::Thread* signaling_thread,
+ AudioDeviceModule* default_adm,
+ cricket::WebRtcVideoEncoderFactory* encoder_factory,
+ cricket::WebRtcVideoDecoderFactory* decoder_factory) {
+ return CreatePeerConnectionFactory(
+ worker_and_network_thread, worker_and_network_thread, signaling_thread,
+ default_adm, encoder_factory, decoder_factory);
+}
+
} // namespace webrtc
#endif // WEBRTC_API_PEERCONNECTIONINTERFACE_H_
diff --git a/chromium/third_party/webrtc/api/peerconnectioninterface_unittest.cc b/chromium/third_party/webrtc/api/peerconnectioninterface_unittest.cc
index 14a067995b4..445f0257c12 100644
--- a/chromium/third_party/webrtc/api/peerconnectioninterface_unittest.cc
+++ b/chromium/third_party/webrtc/api/peerconnectioninterface_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include <utility>
@@ -32,14 +33,13 @@
#include "webrtc/api/videocapturertracksource.h"
#include "webrtc/api/videotrack.h"
#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/sslstreamadapter.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/thread.h"
#include "webrtc/media/base/fakevideocapturer.h"
#include "webrtc/media/sctp/sctpdataengine.h"
-#include "webrtc/p2p/client/fakeportallocator.h"
+#include "webrtc/p2p/base/fakeportallocator.h"
#include "webrtc/pc/mediasession.h"
static const char kStreamLabel1[] = "local_stream_1";
@@ -239,9 +239,9 @@ static const char kSdpStringMs1Video1[] =
return; \
}
-using rtc::scoped_ptr;
-using rtc::scoped_refptr;
using ::testing::Exactly;
+using cricket::StreamParams;
+using rtc::scoped_refptr;
using webrtc::AudioSourceInterface;
using webrtc::AudioTrack;
using webrtc::AudioTrackInterface;
@@ -249,6 +249,7 @@ using webrtc::DataBuffer;
using webrtc::DataChannelInterface;
using webrtc::FakeConstraints;
using webrtc::IceCandidateInterface;
+using webrtc::JsepSessionDescription;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaStream;
using webrtc::MediaStreamInterface;
@@ -326,12 +327,26 @@ bool ContainsSender(
return false;
}
+// Check if |senders| contains the specified sender, by id and stream id.
+bool ContainsSender(
+ const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
+ const std::string& id,
+ const std::string& stream_id) {
+ for (const auto& sender : senders) {
+ if (sender->id() == id && sender->stream_id() == stream_id) {
+ return true;
+ }
+ }
+ return false;
+}
+
// Create a collection of streams.
// CreateStreamCollection(1) creates a collection that
// correspond to kSdpStringWithStream1.
// CreateStreamCollection(2) correspond to kSdpStringWithStream1And2.
rtc::scoped_refptr<StreamCollection> CreateStreamCollection(
- int number_of_streams) {
+ int number_of_streams,
+ int tracks_per_stream) {
rtc::scoped_refptr<StreamCollection> local_collection(
StreamCollection::Create());
@@ -339,16 +354,19 @@ rtc::scoped_refptr<StreamCollection> CreateStreamCollection(
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream(
webrtc::MediaStream::Create(kStreams[i]));
- // Add a local audio track.
- rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
- webrtc::AudioTrack::Create(kAudioTracks[i], nullptr));
- stream->AddTrack(audio_track);
-
- // Add a local video track.
- rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
- webrtc::VideoTrack::Create(kVideoTracks[i],
- webrtc::FakeVideoTrackSource::Create()));
- stream->AddTrack(video_track);
+ for (int j = 0; j < tracks_per_stream; ++j) {
+ // Add a local audio track.
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ webrtc::AudioTrack::Create(kAudioTracks[i * tracks_per_stream + j],
+ nullptr));
+ stream->AddTrack(audio_track);
+
+ // Add a local video track.
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ webrtc::VideoTrack::Create(kVideoTracks[i * tracks_per_stream + j],
+ webrtc::FakeVideoTrackSource::Create()));
+ stream->AddTrack(video_track);
+ }
local_collection->AddStream(stream);
}
@@ -417,7 +435,7 @@ class MockTrackObserver : public ObserverInterface {
class MockPeerConnectionObserver : public PeerConnectionObserver {
public:
MockPeerConnectionObserver() : remote_streams_(StreamCollection::Create()) {}
- ~MockPeerConnectionObserver() {
+ virtual ~MockPeerConnectionObserver() {
}
void SetPeerConnectionInterface(PeerConnectionInterface* pc) {
pc_ = pc;
@@ -425,8 +443,8 @@ class MockPeerConnectionObserver : public PeerConnectionObserver {
state_ = pc_->signaling_state();
}
}
- virtual void OnSignalingChange(
- PeerConnectionInterface::SignalingState new_state) {
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override {
EXPECT_EQ(pc_->signaling_state(), new_state);
state_ = new_state;
}
@@ -504,7 +522,7 @@ class MockPeerConnectionObserver : public PeerConnectionObserver {
scoped_refptr<PeerConnectionInterface> pc_;
PeerConnectionInterface::SignalingState state_;
- scoped_ptr<IceCandidateInterface> last_candidate_;
+ std::unique_ptr<IceCandidateInterface> last_candidate_;
scoped_refptr<DataChannelInterface> last_datachannel_;
rtc::scoped_refptr<StreamCollection> remote_streams_;
bool renegotiation_needed_ = false;
@@ -527,31 +545,40 @@ class PeerConnectionInterfaceTest : public testing::Test {
virtual void SetUp() {
pc_factory_ = webrtc::CreatePeerConnectionFactory(
- rtc::Thread::Current(), rtc::Thread::Current(), NULL, NULL,
- NULL);
- ASSERT_TRUE(pc_factory_.get() != NULL);
+ rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(),
+ nullptr, nullptr, nullptr);
+ ASSERT_TRUE(pc_factory_);
}
void CreatePeerConnection() {
- CreatePeerConnection("", "", NULL);
+ CreatePeerConnection(PeerConnectionInterface::RTCConfiguration(), nullptr);
}
void CreatePeerConnection(webrtc::MediaConstraintsInterface* constraints) {
- CreatePeerConnection("", "", constraints);
+ CreatePeerConnection(PeerConnectionInterface::RTCConfiguration(),
+ constraints);
}
- void CreatePeerConnection(const std::string& uri,
- const std::string& password,
- webrtc::MediaConstraintsInterface* constraints) {
+ void CreatePeerConnectionWithIceTransportsType(
+ PeerConnectionInterface::IceTransportsType type) {
+ PeerConnectionInterface::RTCConfiguration config;
+ config.type = type;
+ return CreatePeerConnection(config, nullptr);
+ }
+
+ void CreatePeerConnectionWithIceServer(const std::string& uri,
+ const std::string& password) {
PeerConnectionInterface::RTCConfiguration config;
PeerConnectionInterface::IceServer server;
- if (!uri.empty()) {
- server.uri = uri;
- server.password = password;
- config.servers.push_back(server);
- }
+ server.uri = uri;
+ server.password = password;
+ config.servers.push_back(server);
+ CreatePeerConnection(config, nullptr);
+ }
- rtc::scoped_ptr<cricket::FakePortAllocator> port_allocator(
+ void CreatePeerConnection(PeerConnectionInterface::RTCConfiguration config,
+ webrtc::MediaConstraintsInterface* constraints) {
+ std::unique_ptr<cricket::FakePortAllocator> port_allocator(
new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
port_allocator_ = port_allocator.get();
@@ -566,7 +593,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
webrtc::MediaConstraintsInterface::kEnableDtlsSrtp, false);
}
- scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store;
+ std::unique_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store;
bool dtls;
if (FindConstraint(constraints,
webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
@@ -595,7 +622,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
}
void CreatePeerConnectionWithDifferentConfigurations() {
- CreatePeerConnection(kStunAddressOnly, "", NULL);
+ CreatePeerConnectionWithIceServer(kStunAddressOnly, "");
EXPECT_EQ(1u, port_allocator_->stun_servers().size());
EXPECT_EQ(0u, port_allocator_->turn_servers().size());
EXPECT_EQ("address", port_allocator_->stun_servers().begin()->hostname());
@@ -606,7 +633,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
CreatePeerConnectionExpectFail(kStunAddressPortAndMore1);
CreatePeerConnectionExpectFail(kStunAddressPortAndMore2);
- CreatePeerConnection(kTurnIceServerUri, kTurnPassword, NULL);
+ CreatePeerConnectionWithIceServer(kTurnIceServerUri, kTurnPassword);
EXPECT_EQ(0u, port_allocator_->stun_servers().size());
EXPECT_EQ(1u, port_allocator_->turn_servers().size());
EXPECT_EQ(kTurnUsername,
@@ -668,7 +695,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
observer_.renegotiation_needed_ = false;
}
- bool DoCreateOfferAnswer(rtc::scoped_ptr<SessionDescriptionInterface>* desc,
+ bool DoCreateOfferAnswer(std::unique_ptr<SessionDescriptionInterface>* desc,
bool offer,
MediaConstraintsInterface* constraints) {
rtc::scoped_refptr<MockCreateSessionDescriptionObserver>
@@ -684,12 +711,12 @@ class PeerConnectionInterfaceTest : public testing::Test {
return observer->result();
}
- bool DoCreateOffer(rtc::scoped_ptr<SessionDescriptionInterface>* desc,
+ bool DoCreateOffer(std::unique_ptr<SessionDescriptionInterface>* desc,
MediaConstraintsInterface* constraints) {
return DoCreateOfferAnswer(desc, true, constraints);
}
- bool DoCreateAnswer(rtc::scoped_ptr<SessionDescriptionInterface>* desc,
+ bool DoCreateAnswer(std::unique_ptr<SessionDescriptionInterface>* desc,
MediaConstraintsInterface* constraints) {
return DoCreateOfferAnswer(desc, false, constraints);
}
@@ -750,7 +777,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
}
void CreateOfferAsRemoteDescription() {
- rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
std::string sdp;
EXPECT_TRUE(offer->ToString(&sdp));
@@ -770,7 +797,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
}
void CreateAnswerAsLocalDescription() {
- scoped_ptr<SessionDescriptionInterface> answer;
+ std::unique_ptr<SessionDescriptionInterface> answer;
ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
// TODO(perkj): Currently SetLocalDescription fails if any parameters in an
@@ -790,7 +817,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
}
void CreatePrAnswerAsLocalDescription() {
- scoped_ptr<SessionDescriptionInterface> answer;
+ std::unique_ptr<SessionDescriptionInterface> answer;
ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
std::string sdp;
@@ -810,7 +837,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
}
void CreateOfferAsLocalDescription() {
- rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
// TODO(perkj): Currently SetLocalDescription fails if any parameters in an
// audio codec change, even if the parameter has nothing to do with
@@ -880,7 +907,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
// corresponding SessionDescriptionInterface. The SessionDescriptionInterface
// is returned and the MediaStream is stored in
// |reference_collection_|
- rtc::scoped_ptr<SessionDescriptionInterface>
+ std::unique_ptr<SessionDescriptionInterface>
CreateSessionDescriptionAndReference(size_t number_of_audio_tracks,
size_t number_of_video_tracks) {
EXPECT_LE(number_of_audio_tracks, 2u);
@@ -915,7 +942,7 @@ class PeerConnectionInterfaceTest : public testing::Test {
AddVideoTrack(kVideoTracks[1], stream);
}
- return rtc::scoped_ptr<SessionDescriptionInterface>(
+ return std::unique_ptr<SessionDescriptionInterface>(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
sdp_ms1, nullptr));
}
@@ -935,6 +962,34 @@ class PeerConnectionInterfaceTest : public testing::Test {
ASSERT_TRUE(stream->AddTrack(video_track));
}
+ std::unique_ptr<SessionDescriptionInterface> CreateOfferWithOneAudioStream() {
+ CreatePeerConnection();
+ AddVoiceStream(kStreamLabel1);
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
+ return offer;
+ }
+
+ std::unique_ptr<SessionDescriptionInterface>
+ CreateAnswerWithOneAudioStream() {
+ std::unique_ptr<SessionDescriptionInterface> offer =
+ CreateOfferWithOneAudioStream();
+ EXPECT_TRUE(DoSetRemoteDescription(offer.release()));
+ std::unique_ptr<SessionDescriptionInterface> answer;
+ EXPECT_TRUE(DoCreateAnswer(&answer, nullptr));
+ return answer;
+ }
+
+ const std::string& GetFirstAudioStreamCname(
+ const SessionDescriptionInterface* desc) {
+ const cricket::ContentInfo* audio_content =
+ cricket::GetFirstAudioContent(desc->description());
+ const cricket::AudioContentDescription* audio_desc =
+ static_cast<const cricket::AudioContentDescription*>(
+ audio_content->description);
+ return audio_desc->streams()[0].cname;
+ }
+
cricket::FakePortAllocator* port_allocator_ = nullptr;
scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory_;
scoped_refptr<PeerConnectionInterface> pc_;
@@ -942,11 +997,70 @@ class PeerConnectionInterfaceTest : public testing::Test {
rtc::scoped_refptr<StreamCollection> reference_collection_;
};
+// Generate different CNAMEs when PeerConnections are created.
+// The CNAMEs are expected to be generated randomly. It is possible
+// that the test fails, though the possibility is very low.
+TEST_F(PeerConnectionInterfaceTest, CnameGenerationInOffer) {
+ std::unique_ptr<SessionDescriptionInterface> offer1 =
+ CreateOfferWithOneAudioStream();
+ std::unique_ptr<SessionDescriptionInterface> offer2 =
+ CreateOfferWithOneAudioStream();
+ EXPECT_NE(GetFirstAudioStreamCname(offer1.get()),
+ GetFirstAudioStreamCname(offer2.get()));
+}
+
+TEST_F(PeerConnectionInterfaceTest, CnameGenerationInAnswer) {
+ std::unique_ptr<SessionDescriptionInterface> answer1 =
+ CreateAnswerWithOneAudioStream();
+ std::unique_ptr<SessionDescriptionInterface> answer2 =
+ CreateAnswerWithOneAudioStream();
+ EXPECT_NE(GetFirstAudioStreamCname(answer1.get()),
+ GetFirstAudioStreamCname(answer2.get()));
+}
+
TEST_F(PeerConnectionInterfaceTest,
CreatePeerConnectionWithDifferentConfigurations) {
CreatePeerConnectionWithDifferentConfigurations();
}
+TEST_F(PeerConnectionInterfaceTest,
+ CreatePeerConnectionWithDifferentIceTransportsTypes) {
+ CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kNone);
+ EXPECT_EQ(cricket::CF_NONE, port_allocator_->candidate_filter());
+ CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kRelay);
+ EXPECT_EQ(cricket::CF_RELAY, port_allocator_->candidate_filter());
+ CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kNoHost);
+ EXPECT_EQ(cricket::CF_ALL & ~cricket::CF_HOST,
+ port_allocator_->candidate_filter());
+ CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kAll);
+ EXPECT_EQ(cricket::CF_ALL, port_allocator_->candidate_filter());
+}
+
+// Test that when a PeerConnection is created with a nonzero candidate pool
+// size, the pooled PortAllocatorSession is created with all the attributes
+// in the RTCConfiguration.
+TEST_F(PeerConnectionInterfaceTest, CreatePeerConnectionWithPooledCandidates) {
+ PeerConnectionInterface::RTCConfiguration config;
+ PeerConnectionInterface::IceServer server;
+ server.uri = kStunAddressOnly;
+ config.servers.push_back(server);
+ config.type = PeerConnectionInterface::kRelay;
+ config.disable_ipv6 = true;
+ config.tcp_candidate_policy =
+ PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+ config.ice_candidate_pool_size = 1;
+ CreatePeerConnection(config, nullptr);
+
+ const cricket::FakePortAllocatorSession* session =
+ static_cast<const cricket::FakePortAllocatorSession*>(
+ port_allocator_->GetPooledSession());
+ ASSERT_NE(nullptr, session);
+ EXPECT_EQ(1UL, session->stun_servers().size());
+ EXPECT_EQ(0U, session->flags() & cricket::PORTALLOCATOR_ENABLE_IPV6);
+ EXPECT_LT(0U, session->flags() & cricket::PORTALLOCATOR_DISABLE_TCP);
+ EXPECT_EQ(cricket::CF_RELAY, session->candidate_filter());
+}
+
TEST_F(PeerConnectionInterfaceTest, AddStreams) {
CreatePeerConnection();
AddVideoStream(kStreamLabel1);
@@ -980,7 +1094,7 @@ TEST_F(PeerConnectionInterfaceTest, AddStreams) {
TEST_F(PeerConnectionInterfaceTest, AddedStreamsPresentInOffer) {
CreatePeerConnection();
AddAudioVideoStream(kStreamLabel1, "audio_track", "video_track");
- scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
const cricket::ContentInfo* audio_content =
@@ -1055,7 +1169,7 @@ TEST_F(PeerConnectionInterfaceTest, AddTrackRemoveTrack) {
EXPECT_EQ(video_track, video_sender->track());
// Now create an offer and check for the senders.
- scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
const cricket::ContentInfo* audio_content =
@@ -1193,13 +1307,13 @@ TEST_F(PeerConnectionInterfaceTest, IceCandidates) {
EXPECT_FALSE(pc_->AddIceCandidate(observer_.last_candidate_.get()));
// SetRemoteDescription takes ownership of offer.
- rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
AddVideoStream(kStreamLabel1);
EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
EXPECT_TRUE(DoSetRemoteDescription(offer.release()));
// SetLocalDescription takes ownership of answer.
- rtc::scoped_ptr<SessionDescriptionInterface> answer;
+ std::unique_ptr<SessionDescriptionInterface> answer;
EXPECT_TRUE(DoCreateAnswer(&answer, nullptr));
EXPECT_TRUE(DoSetLocalDescription(answer.release()));
@@ -1214,7 +1328,7 @@ TEST_F(PeerConnectionInterfaceTest, IceCandidates) {
TEST_F(PeerConnectionInterfaceTest, CreateOfferAnswerWithInvalidStream) {
CreatePeerConnection();
// Create a regular offer for the CreateAnswer test later.
- rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
EXPECT_TRUE(offer);
offer.reset();
@@ -1226,7 +1340,7 @@ TEST_F(PeerConnectionInterfaceTest, CreateOfferAnswerWithInvalidStream) {
EXPECT_FALSE(DoCreateOffer(&offer, nullptr));
// Test CreateAnswer
- rtc::scoped_ptr<SessionDescriptionInterface> answer;
+ std::unique_ptr<SessionDescriptionInterface> answer;
EXPECT_FALSE(DoCreateAnswer(&answer, nullptr));
}
@@ -1238,7 +1352,7 @@ TEST_F(PeerConnectionInterfaceTest, SsrcInOfferAnswer) {
AddAudioVideoStream(kStreamLabel1, "audio_label", "video_label");
// Test CreateOffer
- scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
int audio_ssrc = 0;
int video_ssrc = 0;
@@ -1250,7 +1364,7 @@ TEST_F(PeerConnectionInterfaceTest, SsrcInOfferAnswer) {
// Test CreateAnswer
EXPECT_TRUE(DoSetRemoteDescription(offer.release()));
- scoped_ptr<SessionDescriptionInterface> answer;
+ std::unique_ptr<SessionDescriptionInterface> answer;
ASSERT_TRUE(DoCreateAnswer(&answer, nullptr));
audio_ssrc = 0;
video_ssrc = 0;
@@ -1276,7 +1390,7 @@ TEST_F(PeerConnectionInterfaceTest, AddTrackAfterAddStream) {
pc_factory_->CreateVideoSource(new cricket::FakeVideoCapturer())));
stream->AddTrack(video_track.get());
- scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
const cricket::MediaContentDescription* video_desc =
@@ -1296,7 +1410,7 @@ TEST_F(PeerConnectionInterfaceTest, RemoveTrackAfterAddStream) {
// Remove the video track.
stream->RemoveTrack(stream->GetVideoTracks()[0]);
- scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
const cricket::MediaContentDescription* video_desc =
@@ -1310,7 +1424,7 @@ TEST_F(PeerConnectionInterfaceTest, CreateSenderWithStream) {
CreatePeerConnection();
pc_->CreateSender("video", kStreamLabel1);
- scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
const cricket::MediaContentDescription* video_desc =
@@ -1373,9 +1487,9 @@ TEST_F(PeerConnectionInterfaceTest, TestDataChannel) {
scoped_refptr<DataChannelInterface> data2 =
pc_->CreateDataChannel("test2", NULL);
ASSERT_TRUE(data1 != NULL);
- rtc::scoped_ptr<MockDataChannelObserver> observer1(
+ std::unique_ptr<MockDataChannelObserver> observer1(
new MockDataChannelObserver(data1));
- rtc::scoped_ptr<MockDataChannelObserver> observer2(
+ std::unique_ptr<MockDataChannelObserver> observer2(
new MockDataChannelObserver(data2));
EXPECT_EQ(DataChannelInterface::kConnecting, data1->state());
@@ -1420,9 +1534,9 @@ TEST_F(PeerConnectionInterfaceTest, TestSendBinaryOnRtpDataChannel) {
scoped_refptr<DataChannelInterface> data2 =
pc_->CreateDataChannel("test2", NULL);
ASSERT_TRUE(data1 != NULL);
- rtc::scoped_ptr<MockDataChannelObserver> observer1(
+ std::unique_ptr<MockDataChannelObserver> observer1(
new MockDataChannelObserver(data1));
- rtc::scoped_ptr<MockDataChannelObserver> observer2(
+ std::unique_ptr<MockDataChannelObserver> observer2(
new MockDataChannelObserver(data2));
EXPECT_EQ(DataChannelInterface::kConnecting, data1->state());
@@ -1447,7 +1561,7 @@ TEST_F(PeerConnectionInterfaceTest, TestSendOnlyDataChannel) {
CreatePeerConnection(&constraints);
scoped_refptr<DataChannelInterface> data1 =
pc_->CreateDataChannel("test1", NULL);
- rtc::scoped_ptr<MockDataChannelObserver> observer1(
+ std::unique_ptr<MockDataChannelObserver> observer1(
new MockDataChannelObserver(data1));
CreateOfferReceiveAnswerWithoutSsrc();
@@ -1662,9 +1776,9 @@ TEST_F(PeerConnectionInterfaceTest, DataChannelCloseWhenPeerConnectionClose) {
scoped_refptr<DataChannelInterface> data2 =
pc_->CreateDataChannel("test2", NULL);
ASSERT_TRUE(data1 != NULL);
- rtc::scoped_ptr<MockDataChannelObserver> observer1(
+ std::unique_ptr<MockDataChannelObserver> observer1(
new MockDataChannelObserver(data1));
- rtc::scoped_ptr<MockDataChannelObserver> observer2(
+ std::unique_ptr<MockDataChannelObserver> observer2(
new MockDataChannelObserver(data2));
CreateOfferReceiveAnswer();
@@ -1769,7 +1883,7 @@ TEST_F(PeerConnectionInterfaceTest, CreateSubsequentRecvOnlyOffer) {
// At this point we should be receiving stream 1, but not sending anything.
// A new offer should be recvonly.
- rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
DoCreateOffer(&offer, nullptr);
const cricket::ContentInfo* video_content =
@@ -1801,7 +1915,7 @@ TEST_F(PeerConnectionInterfaceTest, CreateSubsequentInactiveOffer) {
// At this point we should be receiving stream 1, but not sending anything.
// A new offer would be recvonly, but we'll set the "no receive" constraints
// to make it inactive.
- rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
FakeConstraints offer_constraints;
offer_constraints.AddMandatory(
webrtc::MediaConstraintsInterface::kOfferToReceiveVideo, false);
@@ -1840,6 +1954,35 @@ TEST_F(PeerConnectionInterfaceTest, SetConfigurationChangesIceServers) {
port_allocator_->stun_servers().begin()->hostname());
}
+TEST_F(PeerConnectionInterfaceTest, SetConfigurationChangesCandidateFilter) {
+ CreatePeerConnection();
+ PeerConnectionInterface::RTCConfiguration config;
+ config.type = PeerConnectionInterface::kRelay;
+ EXPECT_TRUE(pc_->SetConfiguration(config));
+ EXPECT_EQ(cricket::CF_RELAY, port_allocator_->candidate_filter());
+}
+
+// Test that when SetConfiguration changes both the pool size and other
+// attributes, the pooled session is created with the updated attributes.
+TEST_F(PeerConnectionInterfaceTest,
+ SetConfigurationCreatesPooledSessionCorrectly) {
+ CreatePeerConnection();
+ PeerConnectionInterface::RTCConfiguration config;
+ config.ice_candidate_pool_size = 1;
+ PeerConnectionInterface::IceServer server;
+ server.uri = kStunAddressOnly;
+ config.servers.push_back(server);
+ config.type = PeerConnectionInterface::kRelay;
+ CreatePeerConnection(config, nullptr);
+
+ const cricket::FakePortAllocatorSession* session =
+ static_cast<const cricket::FakePortAllocatorSession*>(
+ port_allocator_->GetPooledSession());
+ ASSERT_NE(nullptr, session);
+ EXPECT_EQ(1UL, session->stun_servers().size());
+ EXPECT_EQ(cricket::CF_RELAY, session->candidate_filter());
+}
+
// Test that PeerConnection::Close changes the states to closed and all remote
// tracks change state to ended.
TEST_F(PeerConnectionInterfaceTest, CloseAndTestStreamsAndStates) {
@@ -1896,9 +2039,9 @@ TEST_F(PeerConnectionInterfaceTest, CloseAndTestMethods) {
EXPECT_TRUE(pc_->local_description() != NULL);
EXPECT_TRUE(pc_->remote_description() != NULL);
- rtc::scoped_ptr<SessionDescriptionInterface> offer;
+ std::unique_ptr<SessionDescriptionInterface> offer;
EXPECT_TRUE(DoCreateOffer(&offer, nullptr));
- rtc::scoped_ptr<SessionDescriptionInterface> answer;
+ std::unique_ptr<SessionDescriptionInterface> answer;
EXPECT_TRUE(DoCreateAnswer(&answer, nullptr));
std::string sdp;
@@ -1936,7 +2079,7 @@ TEST_F(PeerConnectionInterfaceTest, UpdateRemoteStreams) {
CreatePeerConnection(&constraints);
CreateAndSetRemoteOffer(kSdpStringWithStream1);
- rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1));
+ rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1, 1));
EXPECT_TRUE(
CompareStreamCollections(observer_.remote_streams(), reference.get()));
MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0);
@@ -1946,7 +2089,7 @@ TEST_F(PeerConnectionInterfaceTest, UpdateRemoteStreams) {
// MediaStream.
CreateAndSetRemoteOffer(kSdpStringWithStream1And2);
- rtc::scoped_refptr<StreamCollection> reference2(CreateStreamCollection(2));
+ rtc::scoped_refptr<StreamCollection> reference2(CreateStreamCollection(2, 1));
EXPECT_TRUE(
CompareStreamCollections(observer_.remote_streams(), reference2.get()));
}
@@ -1959,14 +2102,14 @@ TEST_F(PeerConnectionInterfaceTest,
constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
true);
CreatePeerConnection(&constraints);
- rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1 =
+ std::unique_ptr<SessionDescriptionInterface> desc_ms1 =
CreateSessionDescriptionAndReference(1, 1);
EXPECT_TRUE(DoSetRemoteDescription(desc_ms1.release()));
EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
reference_collection_));
// Add extra audio and video tracks to the same MediaStream.
- rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks =
+ std::unique_ptr<SessionDescriptionInterface> desc_ms1_two_tracks =
CreateSessionDescriptionAndReference(2, 2);
EXPECT_TRUE(DoSetRemoteDescription(desc_ms1_two_tracks.release()));
EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(),
@@ -1979,7 +2122,7 @@ TEST_F(PeerConnectionInterfaceTest,
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, video_track2->state());
// Remove the extra audio and video tracks.
- rtc::scoped_ptr<SessionDescriptionInterface> desc_ms2 =
+ std::unique_ptr<SessionDescriptionInterface> desc_ms2 =
CreateSessionDescriptionAndReference(1, 1);
MockTrackObserver audio_track_observer(audio_track2);
MockTrackObserver video_track_observer(video_track2);
@@ -2018,7 +2161,7 @@ TEST_F(PeerConnectionInterfaceTest, RejectMediaContent) {
remote_stream->GetAudioTracks()[0];
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
- rtc::scoped_ptr<SessionDescriptionInterface> local_answer;
+ std::unique_ptr<SessionDescriptionInterface> local_answer;
EXPECT_TRUE(DoCreateAnswer(&local_answer, nullptr));
cricket::ContentInfo* video_info =
local_answer->description()->GetContentByName("video");
@@ -2028,7 +2171,7 @@ TEST_F(PeerConnectionInterfaceTest, RejectMediaContent) {
EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
// Now create an offer where we reject both video and audio.
- rtc::scoped_ptr<SessionDescriptionInterface> local_offer;
+ std::unique_ptr<SessionDescriptionInterface> local_offer;
EXPECT_TRUE(DoCreateOffer(&local_offer, nullptr));
video_info = local_offer->description()->GetContentByName("video");
ASSERT_TRUE(video_info != nullptr);
@@ -2057,7 +2200,7 @@ TEST_F(PeerConnectionInterfaceTest, RemoveTrackThenRejectMediaContent) {
remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
- rtc::scoped_ptr<SessionDescriptionInterface> local_answer(
+ std::unique_ptr<SessionDescriptionInterface> local_answer(
webrtc::CreateSessionDescription(SessionDescriptionInterface::kAnswer,
kSdpStringWithStream1, nullptr));
cricket::ContentInfo* video_info =
@@ -2212,7 +2355,7 @@ TEST_F(PeerConnectionInterfaceTest, VerifyDefaultStreamIsNotCreated) {
true);
CreatePeerConnection(&constraints);
CreateAndSetRemoteOffer(kSdpStringWithStream1);
- rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1));
+ rtc::scoped_refptr<StreamCollection> reference(CreateStreamCollection(1, 1));
EXPECT_TRUE(
CompareStreamCollections(observer_.remote_streams(), reference.get()));
@@ -2229,16 +2372,15 @@ TEST_F(PeerConnectionInterfaceTest, LocalDescriptionChanged) {
constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
true);
CreatePeerConnection(&constraints);
- // Create an offer just to ensure we have an identity before we manually
- // call SetLocalDescription.
- rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
- ASSERT_TRUE(DoCreateOffer(&throwaway, nullptr));
- rtc::scoped_ptr<SessionDescriptionInterface> desc_1 =
- CreateSessionDescriptionAndReference(2, 2);
+ // Create an offer with 1 stream with 2 tracks of each type.
+ rtc::scoped_refptr<StreamCollection> stream_collection =
+ CreateStreamCollection(1, 2);
+ pc_->AddStream(stream_collection->at(0));
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(offer.release()));
- pc_->AddStream(reference_collection_->at(0));
- EXPECT_TRUE(DoSetLocalDescription(desc_1.release()));
auto senders = pc_->GetSenders();
EXPECT_EQ(4u, senders.size());
EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
@@ -2247,11 +2389,12 @@ TEST_F(PeerConnectionInterfaceTest, LocalDescriptionChanged) {
EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
// Remove an audio and video track.
- pc_->RemoveStream(reference_collection_->at(0));
- rtc::scoped_ptr<SessionDescriptionInterface> desc_2 =
- CreateSessionDescriptionAndReference(1, 1);
- pc_->AddStream(reference_collection_->at(0));
- EXPECT_TRUE(DoSetLocalDescription(desc_2.release()));
+ pc_->RemoveStream(stream_collection->at(0));
+ stream_collection = CreateStreamCollection(1, 1);
+ pc_->AddStream(stream_collection->at(0));
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(offer.release()));
+
senders = pc_->GetSenders();
EXPECT_EQ(2u, senders.size());
EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
@@ -2268,19 +2411,20 @@ TEST_F(PeerConnectionInterfaceTest,
constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
true);
CreatePeerConnection(&constraints);
- // Create an offer just to ensure we have an identity before we manually
- // call SetLocalDescription.
- rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
- ASSERT_TRUE(DoCreateOffer(&throwaway, nullptr));
- rtc::scoped_ptr<SessionDescriptionInterface> desc_1 =
- CreateSessionDescriptionAndReference(2, 2);
+ rtc::scoped_refptr<StreamCollection> stream_collection =
+ CreateStreamCollection(1, 2);
+ // Add a stream to create the offer, but remove it afterwards.
+ pc_->AddStream(stream_collection->at(0));
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ pc_->RemoveStream(stream_collection->at(0));
- EXPECT_TRUE(DoSetLocalDescription(desc_1.release()));
+ EXPECT_TRUE(DoSetLocalDescription(offer.release()));
auto senders = pc_->GetSenders();
EXPECT_EQ(0u, senders.size());
- pc_->AddStream(reference_collection_->at(0));
+ pc_->AddStream(stream_collection->at(0));
senders = pc_->GetSenders();
EXPECT_EQ(4u, senders.size());
EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
@@ -2297,37 +2441,44 @@ TEST_F(PeerConnectionInterfaceTest,
constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
true);
CreatePeerConnection(&constraints);
- // Create an offer just to ensure we have an identity before we manually
- // call SetLocalDescription.
- rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
- ASSERT_TRUE(DoCreateOffer(&throwaway, nullptr));
- rtc::scoped_ptr<SessionDescriptionInterface> desc =
- CreateSessionDescriptionAndReference(1, 1);
- std::string sdp;
- desc->ToString(&sdp);
+ rtc::scoped_refptr<StreamCollection> stream_collection =
+ CreateStreamCollection(2, 1);
+ pc_->AddStream(stream_collection->at(0));
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ // Grab a copy of the offer before it gets passed into the PC.
+ std::unique_ptr<JsepSessionDescription> modified_offer(
+ new JsepSessionDescription(JsepSessionDescription::kOffer));
+ modified_offer->Initialize(offer->description()->Copy(), offer->session_id(),
+ offer->session_version());
+ EXPECT_TRUE(DoSetLocalDescription(offer.release()));
- pc_->AddStream(reference_collection_->at(0));
- EXPECT_TRUE(DoSetLocalDescription(desc.release()));
auto senders = pc_->GetSenders();
EXPECT_EQ(2u, senders.size());
EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
// Change the ssrc of the audio and video track.
- std::string ssrc_org = "a=ssrc:1";
- std::string ssrc_to = "a=ssrc:97";
- rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), ssrc_to.c_str(),
- ssrc_to.length(), &sdp);
- ssrc_org = "a=ssrc:2";
- ssrc_to = "a=ssrc:98";
- rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), ssrc_to.c_str(),
- ssrc_to.length(), &sdp);
- rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
- webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, sdp,
- nullptr));
-
- EXPECT_TRUE(DoSetLocalDescription(updated_desc.release()));
+ cricket::MediaContentDescription* desc =
+ cricket::GetFirstAudioContentDescription(modified_offer->description());
+ ASSERT_TRUE(desc != NULL);
+ for (StreamParams& stream : desc->mutable_streams()) {
+ for (unsigned int& ssrc : stream.ssrcs) {
+ ++ssrc;
+ }
+ }
+
+ desc =
+ cricket::GetFirstVideoContentDescription(modified_offer->description());
+ ASSERT_TRUE(desc != NULL);
+ for (StreamParams& stream : desc->mutable_streams()) {
+ for (unsigned int& ssrc : stream.ssrcs) {
+ ++ssrc;
+ }
+ }
+
+ EXPECT_TRUE(DoSetLocalDescription(modified_offer.release()));
senders = pc_->GetSenders();
EXPECT_EQ(2u, senders.size());
EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
@@ -2344,43 +2495,36 @@ TEST_F(PeerConnectionInterfaceTest,
constraints.AddMandatory(webrtc::MediaConstraintsInterface::kEnableDtlsSrtp,
true);
CreatePeerConnection(&constraints);
- // Create an offer just to ensure we have an identity before we manually
- // call SetLocalDescription.
- rtc::scoped_ptr<SessionDescriptionInterface> throwaway;
- ASSERT_TRUE(DoCreateOffer(&throwaway, nullptr));
- rtc::scoped_ptr<SessionDescriptionInterface> desc =
- CreateSessionDescriptionAndReference(1, 1);
- std::string sdp;
- desc->ToString(&sdp);
+ rtc::scoped_refptr<StreamCollection> stream_collection =
+ CreateStreamCollection(2, 1);
+ pc_->AddStream(stream_collection->at(0));
+ std::unique_ptr<SessionDescriptionInterface> offer;
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(offer.release()));
- pc_->AddStream(reference_collection_->at(0));
- EXPECT_TRUE(DoSetLocalDescription(desc.release()));
auto senders = pc_->GetSenders();
EXPECT_EQ(2u, senders.size());
- EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
- EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0], kStreams[0]));
+ EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0], kStreams[0]));
// Add a new MediaStream but with the same tracks as in the first stream.
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream_1(
webrtc::MediaStream::Create(kStreams[1]));
- stream_1->AddTrack(reference_collection_->at(0)->GetVideoTracks()[0]);
- stream_1->AddTrack(reference_collection_->at(0)->GetAudioTracks()[0]);
+ stream_1->AddTrack(stream_collection->at(0)->GetVideoTracks()[0]);
+ stream_1->AddTrack(stream_collection->at(0)->GetAudioTracks()[0]);
pc_->AddStream(stream_1);
- // Replace msid in the original SDP.
- rtc::replace_substrs(kStreams[0], strlen(kStreams[0]), kStreams[1],
- strlen(kStreams[1]), &sdp);
-
- rtc::scoped_ptr<SessionDescriptionInterface> updated_desc(
- webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, sdp,
- nullptr));
+ ASSERT_TRUE(DoCreateOffer(&offer, nullptr));
+ EXPECT_TRUE(DoSetLocalDescription(offer.release()));
- EXPECT_TRUE(DoSetLocalDescription(updated_desc.release()));
- senders = pc_->GetSenders();
- EXPECT_EQ(2u, senders.size());
- EXPECT_TRUE(ContainsSender(senders, kAudioTracks[0]));
- EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0]));
+ auto new_senders = pc_->GetSenders();
+ // Should be the same senders as before, but with updated stream id.
+ // Note that this behavior is subject to change in the future.
+ // We may decide the PC should ignore existing tracks in AddStream.
+ EXPECT_EQ(senders, new_senders);
+ EXPECT_TRUE(ContainsSender(new_senders, kAudioTracks[0], kStreams[1]));
+ EXPECT_TRUE(ContainsSender(new_senders, kVideoTracks[0], kStreams[1]));
}
// The PeerConnectionMediaConfig tests below verify that configuration
diff --git a/chromium/third_party/webrtc/api/peerconnectionproxy.h b/chromium/third_party/webrtc/api/peerconnectionproxy.h
index 1183e619103..d35d5bacded 100644
--- a/chromium/third_party/webrtc/api/peerconnectionproxy.h
+++ b/chromium/third_party/webrtc/api/peerconnectionproxy.h
@@ -17,7 +17,7 @@
namespace webrtc {
// Define proxy for PeerConnectionInterface.
-BEGIN_PROXY_MAP(PeerConnection)
+BEGIN_SIGNALING_PROXY_MAP(PeerConnection)
PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>,
local_streams)
PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>,
@@ -75,7 +75,7 @@ BEGIN_PROXY_MAP(PeerConnection)
PROXY_METHOD0(IceConnectionState, ice_connection_state)
PROXY_METHOD0(IceGatheringState, ice_gathering_state)
PROXY_METHOD0(void, Close)
-END_PROXY()
+END_SIGNALING_PROXY()
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/proxy.h b/chromium/third_party/webrtc/api/proxy.h
index 1351a0427e6..2df85c4bf46 100644
--- a/chromium/third_party/webrtc/api/proxy.h
+++ b/chromium/third_party/webrtc/api/proxy.h
@@ -18,7 +18,7 @@
// public:
// std::string FooA() = 0;
// std::string FooB(bool arg1) const = 0;
-// std::string FooC(bool arg1)= 0;
+// std::string FooC(bool arg1) = 0;
// };
//
// Note that return types can not be a const reference.
@@ -30,14 +30,25 @@
// BEGIN_PROXY_MAP(Test)
// PROXY_METHOD0(std::string, FooA)
// PROXY_CONSTMETHOD1(std::string, FooB, arg1)
-// PROXY_METHOD1(std::string, FooC, arg1)
+// PROXY_WORKER_METHOD1(std::string, FooC, arg1)
// END_PROXY()
//
-// The proxy can be created using TestProxy::Create(Thread*, TestInterface*).
+// where the first two methods are invoked on the signaling thread,
+// and the third is invoked on the worker thread.
+//
+// The proxy can be created using
+//
+// TestProxy::Create(Thread* signaling_thread, Thread* worker_thread,
+// TestInterface*).
+//
+// The variant defined with BEGIN_SIGNALING_PROXY_MAP is unaware of
+// the worker thread, and invokes all methods on the signaling thread.
#ifndef WEBRTC_API_PROXY_H_
#define WEBRTC_API_PROXY_H_
+#include <memory>
+
#include "webrtc/base/event.h"
#include "webrtc/base/thread.h"
@@ -108,7 +119,7 @@ class SynchronousMethodCall
private:
void OnMessage(rtc::Message*) { proxy_->OnMessage(NULL); e_->Set(); }
- rtc::scoped_ptr<rtc::Event> e_;
+ std::unique_ptr<rtc::Event> e_;
rtc::MessageHandler* proxy_;
};
@@ -295,79 +306,125 @@ class MethodCall5 : public rtc::Message,
T5 a5_;
};
-#define BEGIN_PROXY_MAP(c) \
+#define BEGIN_SIGNALING_PROXY_MAP(c) \
class c##Proxy : public c##Interface { \
protected: \
typedef c##Interface C; \
- c##Proxy(rtc::Thread* thread, C* c) : owner_thread_(thread), c_(c) {} \
+ c##Proxy(rtc::Thread* signaling_thread, C* c) \
+ : signaling_thread_(signaling_thread), c_(c) {} \
~c##Proxy() { \
- MethodCall0<c##Proxy, void> call(this, &c##Proxy::Release_s); \
- call.Marshal(owner_thread_); \
+ MethodCall0<c##Proxy, void> call( \
+ this, &c##Proxy::Release_s); \
+ call.Marshal(signaling_thread_); \
} \
\
public: \
- static rtc::scoped_refptr<C> Create(rtc::Thread* thread, C* c) { \
- return new rtc::RefCountedObject<c##Proxy>(thread, c); \
+ static rtc::scoped_refptr<C> Create(rtc::Thread* signaling_thread, C* c) { \
+ return new rtc::RefCountedObject<c##Proxy>( \
+ signaling_thread, c); \
+ }
+
+#define BEGIN_PROXY_MAP(c) \
+ class c##Proxy : public c##Interface { \
+ protected: \
+ typedef c##Interface C; \
+ c##Proxy(rtc::Thread* signaling_thread, rtc::Thread* worker_thread, C* c) \
+ : signaling_thread_(signaling_thread), \
+ worker_thread_(worker_thread), \
+ c_(c) {} \
+ ~c##Proxy() { \
+ MethodCall0<c##Proxy, void> call(this, &c##Proxy::Release_s); \
+ call.Marshal(signaling_thread_); \
+ } \
+ \
+ public: \
+ static rtc::scoped_refptr<C> Create( \
+ rtc::Thread* signaling_thread, rtc::Thread* worker_thread, C* c) { \
+ return new rtc::RefCountedObject<c##Proxy>( \
+ signaling_thread, worker_thread, c); \
}
#define PROXY_METHOD0(r, method) \
r method() override { \
MethodCall0<C, r> call(c_.get(), &C::method); \
- return call.Marshal(owner_thread_); \
+ return call.Marshal(signaling_thread_); \
}
#define PROXY_CONSTMETHOD0(r, method) \
r method() const override { \
ConstMethodCall0<C, r> call(c_.get(), &C::method); \
- return call.Marshal(owner_thread_); \
+ return call.Marshal(signaling_thread_); \
}
#define PROXY_METHOD1(r, method, t1) \
r method(t1 a1) override { \
MethodCall1<C, r, t1> call(c_.get(), &C::method, a1); \
- return call.Marshal(owner_thread_); \
+ return call.Marshal(signaling_thread_); \
}
#define PROXY_CONSTMETHOD1(r, method, t1) \
r method(t1 a1) const override { \
ConstMethodCall1<C, r, t1> call(c_.get(), &C::method, a1); \
- return call.Marshal(owner_thread_); \
+ return call.Marshal(signaling_thread_); \
}
#define PROXY_METHOD2(r, method, t1, t2) \
r method(t1 a1, t2 a2) override { \
MethodCall2<C, r, t1, t2> call(c_.get(), &C::method, a1, a2); \
- return call.Marshal(owner_thread_); \
+ return call.Marshal(signaling_thread_); \
}
#define PROXY_METHOD3(r, method, t1, t2, t3) \
r method(t1 a1, t2 a2, t3 a3) override { \
MethodCall3<C, r, t1, t2, t3> call(c_.get(), &C::method, a1, a2, a3); \
- return call.Marshal(owner_thread_); \
+ return call.Marshal(signaling_thread_); \
}
#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \
r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \
MethodCall4<C, r, t1, t2, t3, t4> call(c_.get(), &C::method, a1, a2, a3, \
a4); \
- return call.Marshal(owner_thread_); \
+ return call.Marshal(signaling_thread_); \
}
#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \
r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \
MethodCall5<C, r, t1, t2, t3, t4, t5> call(c_.get(), &C::method, a1, a2, \
a3, a4, a5); \
- return call.Marshal(owner_thread_); \
+ return call.Marshal(signaling_thread_); \
+ }
+
+// Define methods which should be invoked on the worker thread.
+#define PROXY_WORKER_METHOD1(r, method, t1) \
+ r method(t1 a1) override { \
+ MethodCall1<C, r, t1> call(c_.get(), &C::method, a1); \
+ return call.Marshal(worker_thread_); \
+ }
+
+#define PROXY_WORKER_METHOD2(r, method, t1, t2) \
+ r method(t1 a1, t2 a2) override { \
+ MethodCall2<C, r, t1, t2> call(c_.get(), &C::method, a1, a2); \
+ return call.Marshal(worker_thread_); \
}
-#define END_PROXY() \
+#define END_SIGNALING_PROXY() \
private:\
void Release_s() {\
c_ = NULL;\
}\
- mutable rtc::Thread* owner_thread_;\
+ mutable rtc::Thread* signaling_thread_;\
rtc::scoped_refptr<C> c_;\
- };\
+ };
+
+#define END_PROXY() \
+ private: \
+ void Release_s() { \
+ c_ = NULL; \
+ } \
+ mutable rtc::Thread* signaling_thread_; \
+ mutable rtc::Thread* worker_thread_; \
+ rtc::scoped_refptr<C> c_; \
+ }; \
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/proxy_unittest.cc b/chromium/third_party/webrtc/api/proxy_unittest.cc
index 85e54380768..931ba28eba8 100644
--- a/chromium/third_party/webrtc/api/proxy_unittest.cc
+++ b/chromium/third_party/webrtc/api/proxy_unittest.cc
@@ -10,12 +10,12 @@
#include "webrtc/api/proxy.h"
+#include <memory>
#include <string>
-#include "testing/base/public/gmock.h"
+#include "testing/gmock/include/gmock/gmock.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/refcount.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread.h"
using ::testing::_;
@@ -40,16 +40,6 @@ class FakeInterface : public rtc::RefCountInterface {
~FakeInterface() {}
};
-// Proxy for the test interface.
-BEGIN_PROXY_MAP(Fake)
- PROXY_METHOD0(void, VoidMethod0)
- PROXY_METHOD0(std::string, Method0)
- PROXY_CONSTMETHOD0(std::string, ConstMethod0)
- PROXY_METHOD1(std::string, Method1, std::string)
- PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
- PROXY_METHOD2(std::string, Method2, std::string, std::string)
-END_PROXY()
-
// Implementation of the test interface.
class Fake : public FakeInterface {
public:
@@ -71,60 +61,156 @@ class Fake : public FakeInterface {
~Fake() {}
};
-class ProxyTest: public testing::Test {
+// Proxies for the test interface.
+BEGIN_PROXY_MAP(Fake)
+ PROXY_METHOD0(void, VoidMethod0)
+ PROXY_METHOD0(std::string, Method0)
+ PROXY_CONSTMETHOD0(std::string, ConstMethod0)
+ PROXY_WORKER_METHOD1(std::string, Method1, std::string)
+ PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
+ PROXY_WORKER_METHOD2(std::string, Method2, std::string, std::string)
+END_PROXY()
+
+// Preprocessor hack to get a proxy class a name different than FakeProxy.
+#define FakeProxy FakeSignalingProxy
+BEGIN_SIGNALING_PROXY_MAP(Fake)
+ PROXY_METHOD0(void, VoidMethod0)
+ PROXY_METHOD0(std::string, Method0)
+ PROXY_CONSTMETHOD0(std::string, ConstMethod0)
+ PROXY_METHOD1(std::string, Method1, std::string)
+ PROXY_CONSTMETHOD1(std::string, ConstMethod1, std::string)
+ PROXY_METHOD2(std::string, Method2, std::string, std::string)
+END_SIGNALING_PROXY()
+#undef FakeProxy
+
+class SignalingProxyTest : public testing::Test {
public:
- // Checks that the functions is called on the |signaling_thread_|.
- void CheckThread() {
- EXPECT_EQ(rtc::Thread::Current(), signaling_thread_.get());
- }
+ // Checks that the functions are called on the right thread.
+ void CheckSignalingThread() { EXPECT_TRUE(signaling_thread_->IsCurrent()); }
protected:
- virtual void SetUp() {
+ void SetUp() override {
signaling_thread_.reset(new rtc::Thread());
ASSERT_TRUE(signaling_thread_->Start());
fake_ = Fake::Create();
- fake_proxy_ = FakeProxy::Create(signaling_thread_.get(), fake_.get());
+ fake_signaling_proxy_ =
+ FakeSignalingProxy::Create(signaling_thread_.get(), fake_.get());
}
protected:
- rtc::scoped_ptr<rtc::Thread> signaling_thread_;
- rtc::scoped_refptr<FakeInterface> fake_proxy_;
+ std::unique_ptr<rtc::Thread> signaling_thread_;
+ rtc::scoped_refptr<FakeInterface> fake_signaling_proxy_;
rtc::scoped_refptr<Fake> fake_;
};
+TEST_F(SignalingProxyTest, VoidMethod0) {
+ EXPECT_CALL(*fake_, VoidMethod0())
+ .Times(Exactly(1))
+ .WillOnce(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread));
+ fake_signaling_proxy_->VoidMethod0();
+}
+
+TEST_F(SignalingProxyTest, Method0) {
+ EXPECT_CALL(*fake_, Method0())
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("Method0")));
+ EXPECT_EQ("Method0", fake_signaling_proxy_->Method0());
+}
+
+TEST_F(SignalingProxyTest, ConstMethod0) {
+ EXPECT_CALL(*fake_, ConstMethod0())
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("ConstMethod0")));
+ EXPECT_EQ("ConstMethod0", fake_signaling_proxy_->ConstMethod0());
+}
+
+TEST_F(SignalingProxyTest, Method1) {
+ const std::string arg1 = "arg1";
+ EXPECT_CALL(*fake_, Method1(arg1))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("Method1")));
+ EXPECT_EQ("Method1", fake_signaling_proxy_->Method1(arg1));
+}
+
+TEST_F(SignalingProxyTest, ConstMethod1) {
+ const std::string arg1 = "arg1";
+ EXPECT_CALL(*fake_, ConstMethod1(arg1))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("ConstMethod1")));
+ EXPECT_EQ("ConstMethod1", fake_signaling_proxy_->ConstMethod1(arg1));
+}
+
+TEST_F(SignalingProxyTest, Method2) {
+ const std::string arg1 = "arg1";
+ const std::string arg2 = "arg2";
+ EXPECT_CALL(*fake_, Method2(arg1, arg2))
+ .Times(Exactly(1))
+ .WillOnce(DoAll(
+ InvokeWithoutArgs(this, &SignalingProxyTest::CheckSignalingThread),
+ Return("Method2")));
+ EXPECT_EQ("Method2", fake_signaling_proxy_->Method2(arg1, arg2));
+}
+
+class ProxyTest : public SignalingProxyTest {
+ public:
+ // Checks that the functions are called on the right thread.
+ void CheckWorkerThread() { EXPECT_TRUE(worker_thread_->IsCurrent()); }
+
+ protected:
+ void SetUp() override {
+ SignalingProxyTest::SetUp();
+ worker_thread_.reset(new rtc::Thread());
+ ASSERT_TRUE(worker_thread_->Start());
+ fake_proxy_ = FakeProxy::Create(signaling_thread_.get(),
+ worker_thread_.get(), fake_.get());
+ }
+
+ protected:
+ std::unique_ptr<rtc::Thread> worker_thread_;
+ rtc::scoped_refptr<FakeInterface> fake_proxy_;
+};
+
TEST_F(ProxyTest, VoidMethod0) {
EXPECT_CALL(*fake_, VoidMethod0())
- .Times(Exactly(1))
- .WillOnce(InvokeWithoutArgs(this, &ProxyTest::CheckThread));
+ .Times(Exactly(1))
+ .WillOnce(InvokeWithoutArgs(this, &ProxyTest::CheckSignalingThread));
fake_proxy_->VoidMethod0();
}
TEST_F(ProxyTest, Method0) {
EXPECT_CALL(*fake_, Method0())
- .Times(Exactly(1))
- .WillOnce(
- DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
- Return("Method0")));
+ .Times(Exactly(1))
+ .WillOnce(
+ DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckSignalingThread),
+ Return("Method0")));
EXPECT_EQ("Method0",
fake_proxy_->Method0());
}
TEST_F(ProxyTest, ConstMethod0) {
EXPECT_CALL(*fake_, ConstMethod0())
- .Times(Exactly(1))
- .WillOnce(
- DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
- Return("ConstMethod0")));
+ .Times(Exactly(1))
+ .WillOnce(
+ DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckSignalingThread),
+ Return("ConstMethod0")));
EXPECT_EQ("ConstMethod0",
fake_proxy_->ConstMethod0());
}
-TEST_F(ProxyTest, Method1) {
+TEST_F(ProxyTest, WorkerMethod1) {
const std::string arg1 = "arg1";
EXPECT_CALL(*fake_, Method1(arg1))
- .Times(Exactly(1))
- .WillOnce(
- DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+ .Times(Exactly(1))
+ .WillOnce(DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckWorkerThread),
Return("Method1")));
EXPECT_EQ("Method1", fake_proxy_->Method1(arg1));
}
@@ -132,20 +218,19 @@ TEST_F(ProxyTest, Method1) {
TEST_F(ProxyTest, ConstMethod1) {
const std::string arg1 = "arg1";
EXPECT_CALL(*fake_, ConstMethod1(arg1))
- .Times(Exactly(1))
- .WillOnce(
- DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
- Return("ConstMethod1")));
+ .Times(Exactly(1))
+ .WillOnce(
+ DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckSignalingThread),
+ Return("ConstMethod1")));
EXPECT_EQ("ConstMethod1", fake_proxy_->ConstMethod1(arg1));
}
-TEST_F(ProxyTest, Method2) {
+TEST_F(ProxyTest, WorkerMethod2) {
const std::string arg1 = "arg1";
const std::string arg2 = "arg2";
EXPECT_CALL(*fake_, Method2(arg1, arg2))
- .Times(Exactly(1))
- .WillOnce(
- DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckThread),
+ .Times(Exactly(1))
+ .WillOnce(DoAll(InvokeWithoutArgs(this, &ProxyTest::CheckWorkerThread),
Return("Method2")));
EXPECT_EQ("Method2", fake_proxy_->Method2(arg1, arg2));
}
diff --git a/chromium/third_party/webrtc/api/quicdatachannel.cc b/chromium/third_party/webrtc/api/quicdatachannel.cc
new file mode 100644
index 00000000000..5420da15eab
--- /dev/null
+++ b/chromium/third_party/webrtc/api/quicdatachannel.cc
@@ -0,0 +1,391 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/quicdatachannel.h"
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/copyonwritebuffer.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/p2p/quic/quictransportchannel.h"
+#include "webrtc/p2p/quic/reliablequicstream.h"
+
+namespace webrtc {
+
+void WriteQuicDataChannelMessageHeader(int data_channel_id,
+ uint64_t message_id,
+ rtc::CopyOnWriteBuffer* header) {
+ RTC_DCHECK(header);
+ // 64-bit varints require at most 10 bytes (7*10 == 70), and 32-bit varints
+ // require at most 5 bytes (7*5 == 35).
+ size_t max_length = 15;
+ rtc::ByteBufferWriter byte_buffer(nullptr, max_length,
+ rtc::ByteBuffer::ByteOrder::ORDER_HOST);
+ byte_buffer.WriteUVarint(data_channel_id);
+ byte_buffer.WriteUVarint(message_id);
+ header->SetData(byte_buffer.Data(), byte_buffer.Length());
+}
+
+bool ParseQuicDataMessageHeader(const char* data,
+ size_t len,
+ int* data_channel_id,
+ uint64_t* message_id,
+ size_t* bytes_read) {
+ RTC_DCHECK(data_channel_id);
+ RTC_DCHECK(message_id);
+ RTC_DCHECK(bytes_read);
+
+ rtc::ByteBufferReader byte_buffer(data, len, rtc::ByteBuffer::ORDER_HOST);
+ uint64_t dcid;
+ if (!byte_buffer.ReadUVarint(&dcid)) {
+ LOG(LS_ERROR) << "Could not read the data channel ID";
+ return false;
+ }
+ *data_channel_id = dcid;
+ if (!byte_buffer.ReadUVarint(message_id)) {
+ LOG(LS_ERROR) << "Could not read message ID for data channel "
+ << *data_channel_id;
+ return false;
+ }
+ size_t remaining_bytes = byte_buffer.Length();
+ *bytes_read = len - remaining_bytes;
+ return true;
+}
+
+QuicDataChannel::QuicDataChannel(rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ const std::string& label,
+ const DataChannelInit& config)
+ : signaling_thread_(signaling_thread),
+ worker_thread_(worker_thread),
+ id_(config.id),
+ state_(kConnecting),
+ buffered_amount_(0),
+ next_message_id_(0),
+ label_(label),
+ protocol_(config.protocol) {}
+
+QuicDataChannel::~QuicDataChannel() {}
+
+void QuicDataChannel::RegisterObserver(DataChannelObserver* observer) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ observer_ = observer;
+}
+
+void QuicDataChannel::UnregisterObserver() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ observer_ = nullptr;
+}
+
+bool QuicDataChannel::Send(const DataBuffer& buffer) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ if (state_ != kOpen) {
+ LOG(LS_ERROR) << "QUIC data channel " << id_
+ << " is not open so cannot send.";
+ return false;
+ }
+ return worker_thread_->Invoke<bool>(
+ rtc::Bind(&QuicDataChannel::Send_w, this, buffer));
+}
+
+bool QuicDataChannel::Send_w(const DataBuffer& buffer) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+
+ // Encode and send the header containing the data channel ID and message ID.
+ rtc::CopyOnWriteBuffer header;
+ WriteQuicDataChannelMessageHeader(id_, ++next_message_id_, &header);
+ RTC_DCHECK(quic_transport_channel_);
+ cricket::ReliableQuicStream* stream =
+ quic_transport_channel_->CreateQuicStream();
+ RTC_DCHECK(stream);
+
+ // Send the header with a FIN if the message is empty.
+ bool header_fin = (buffer.size() == 0);
+ rtc::StreamResult header_result =
+ stream->Write(header.data<char>(), header.size(), header_fin);
+
+ if (header_result == rtc::SR_BLOCK) {
+ // The header is write blocked but we should try sending the message. Since
+ // the ReliableQuicStream queues data in order, if the header is write
+ // blocked then the message will be write blocked. Otherwise if the message
+ // is sent then the header is sent.
+ LOG(LS_INFO) << "Stream " << stream->id()
+ << " header is write blocked for QUIC data channel " << id_;
+ } else if (header_result != rtc::SR_SUCCESS) {
+ LOG(LS_ERROR) << "Stream " << stream->id()
+ << " failed to write header for QUIC data channel " << id_
+ << ". Unexpected error " << header_result;
+ return false;
+ }
+
+ // If the message is not empty, then send the message with a FIN.
+ bool message_fin = true;
+ rtc::StreamResult message_result =
+ header_fin ? header_result : stream->Write(buffer.data.data<char>(),
+ buffer.size(), message_fin);
+
+ if (message_result == rtc::SR_SUCCESS) {
+ // The message is sent and we don't need this QUIC stream.
+ LOG(LS_INFO) << "Stream " << stream->id()
+ << " successfully wrote message for QUIC data channel " << id_;
+ stream->Close();
+ return true;
+ }
+ // TODO(mikescarlett): Register the ReliableQuicStream's priority to the
+ // QuicWriteBlockedList so that the QUIC session doesn't drop messages when
+ // the QUIC transport channel becomes unwritable.
+ if (message_result == rtc::SR_BLOCK) {
+ // The QUIC stream is write blocked, so the message is queued by the QUIC
+ // session. If this is due to the QUIC not being writable, it will be sent
+ // once QUIC becomes writable again. Otherwise it may be due to exceeding
+ // the QUIC flow control limit, in which case the remote peer's QUIC session
+ // will tell the QUIC stream to send more data.
+ LOG(LS_INFO) << "Stream " << stream->id()
+ << " message is write blocked for QUIC data channel " << id_;
+ SetBufferedAmount_w(buffered_amount_ + stream->queued_data_bytes());
+ stream->SignalQueuedBytesWritten.connect(
+ this, &QuicDataChannel::OnQueuedBytesWritten);
+ write_blocked_quic_streams_[stream->id()] = stream;
+ // The QUIC stream will be removed from |write_blocked_quic_streams_| once
+ // it closes.
+ stream->SignalClosed.connect(this,
+ &QuicDataChannel::OnWriteBlockedStreamClosed);
+ return true;
+ }
+ LOG(LS_ERROR) << "Stream " << stream->id()
+ << " failed to write message for QUIC data channel " << id_
+ << ". Unexpected error: " << message_result;
+ return false;
+}
+
+void QuicDataChannel::OnQueuedBytesWritten(net::QuicStreamId stream_id,
+ uint64_t queued_bytes_written) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ SetBufferedAmount_w(buffered_amount_ - queued_bytes_written);
+ const auto& kv = write_blocked_quic_streams_.find(stream_id);
+ if (kv == write_blocked_quic_streams_.end()) {
+ RTC_DCHECK(false);
+ return;
+ }
+ cricket::ReliableQuicStream* stream = kv->second;
+ // True if the QUIC stream is done sending data.
+ if (stream->fin_sent()) {
+ LOG(LS_INFO) << "Stream " << stream->id()
+ << " successfully wrote data for QUIC data channel " << id_;
+ stream->Close();
+ }
+}
+
+void QuicDataChannel::SetBufferedAmount_w(uint64_t buffered_amount) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ buffered_amount_ = buffered_amount;
+ invoker_.AsyncInvoke<void>(
+ signaling_thread_, rtc::Bind(&QuicDataChannel::OnBufferedAmountChange_s,
+ this, buffered_amount));
+}
+
+void QuicDataChannel::Close() {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ if (state_ == kClosed || state_ == kClosing) {
+ return;
+ }
+ LOG(LS_INFO) << "Closing QUIC data channel.";
+ SetState_s(kClosing);
+ worker_thread_->Invoke<void>(rtc::Bind(&QuicDataChannel::Close_w, this));
+ SetState_s(kClosed);
+}
+
+void QuicDataChannel::Close_w() {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ for (auto& kv : incoming_quic_messages_) {
+ Message& message = kv.second;
+ cricket::ReliableQuicStream* stream = message.stream;
+ stream->Close();
+ }
+
+ for (auto& kv : write_blocked_quic_streams_) {
+ cricket::ReliableQuicStream* stream = kv.second;
+ stream->Close();
+ }
+}
+
+bool QuicDataChannel::SetTransportChannel(
+ cricket::QuicTransportChannel* channel) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+
+ if (!channel) {
+ LOG(LS_ERROR) << "|channel| is NULL. Cannot set transport channel.";
+ return false;
+ }
+ if (quic_transport_channel_) {
+ if (channel == quic_transport_channel_) {
+ LOG(LS_WARNING) << "Ignoring duplicate transport channel.";
+ return true;
+ }
+ LOG(LS_ERROR) << "|channel| does not match existing transport channel.";
+ return false;
+ }
+
+ quic_transport_channel_ = channel;
+ LOG(LS_INFO) << "Setting QuicTransportChannel for QUIC data channel " << id_;
+ DataState data_channel_state = worker_thread_->Invoke<DataState>(
+ rtc::Bind(&QuicDataChannel::SetTransportChannel_w, this));
+ SetState_s(data_channel_state);
+ return true;
+}
+
+DataChannelInterface::DataState QuicDataChannel::SetTransportChannel_w() {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ quic_transport_channel_->SignalReadyToSend.connect(
+ this, &QuicDataChannel::OnReadyToSend);
+ quic_transport_channel_->SignalClosed.connect(
+ this, &QuicDataChannel::OnConnectionClosed);
+ if (quic_transport_channel_->writable()) {
+ return kOpen;
+ }
+ return kConnecting;
+}
+
+void QuicDataChannel::OnIncomingMessage(Message&& message) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(message.stream);
+ if (!observer_) {
+ LOG(LS_WARNING) << "QUIC data channel " << id_
+ << " received a message but has no observer.";
+ message.stream->Close();
+ return;
+ }
+ // A FIN is received if the message fits into a single QUIC stream frame and
+ // the remote peer is done sending.
+ if (message.stream->fin_received()) {
+ LOG(LS_INFO) << "Stream " << message.stream->id()
+ << " has finished receiving data for QUIC data channel "
+ << id_;
+ DataBuffer final_message(message.buffer, false);
+ invoker_.AsyncInvoke<void>(signaling_thread_,
+ rtc::Bind(&QuicDataChannel::OnMessage_s, this,
+ std::move(final_message)));
+ message.stream->Close();
+ return;
+ }
+ // Otherwise the message is divided across multiple QUIC stream frames, so
+ // queue the data. OnDataReceived() will be called each time the remaining
+ // QUIC stream frames arrive.
+ LOG(LS_INFO) << "QUIC data channel " << id_
+ << " is queuing incoming data for stream "
+ << message.stream->id();
+ incoming_quic_messages_[message.stream->id()] = std::move(message);
+ message.stream->SignalDataReceived.connect(this,
+ &QuicDataChannel::OnDataReceived);
+ // The QUIC stream will be removed from |incoming_quic_messages_| once it
+ // closes.
+ message.stream->SignalClosed.connect(
+ this, &QuicDataChannel::OnIncomingQueuedStreamClosed);
+}
+
+void QuicDataChannel::OnDataReceived(net::QuicStreamId stream_id,
+ const char* data,
+ size_t len) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(data);
+ const auto& kv = incoming_quic_messages_.find(stream_id);
+ if (kv == incoming_quic_messages_.end()) {
+ RTC_DCHECK(false);
+ return;
+ }
+ Message& message = kv->second;
+ cricket::ReliableQuicStream* stream = message.stream;
+ rtc::CopyOnWriteBuffer& received_data = message.buffer;
+ // If the QUIC stream has not received a FIN, then the remote peer is not
+ // finished sending data.
+ if (!stream->fin_received()) {
+ received_data.AppendData(data, len);
+ return;
+ }
+ // Otherwise we are done receiving and can provide the data channel observer
+ // with the message.
+ LOG(LS_INFO) << "Stream " << stream_id
+ << " has finished receiving data for QUIC data channel " << id_;
+ received_data.AppendData(data, len);
+ DataBuffer final_message(std::move(received_data), false);
+ invoker_.AsyncInvoke<void>(
+ signaling_thread_,
+ rtc::Bind(&QuicDataChannel::OnMessage_s, this, std::move(final_message)));
+ // Once the stream is closed, OnDataReceived will not fire for the stream.
+ stream->Close();
+}
+
+void QuicDataChannel::OnReadyToSend(cricket::TransportChannel* channel) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(channel == quic_transport_channel_);
+ LOG(LS_INFO) << "QuicTransportChannel is ready to send";
+ invoker_.AsyncInvoke<void>(
+ signaling_thread_, rtc::Bind(&QuicDataChannel::SetState_s, this, kOpen));
+}
+
+void QuicDataChannel::OnWriteBlockedStreamClosed(net::QuicStreamId stream_id,
+ int error) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ LOG(LS_VERBOSE) << "Write blocked stream " << stream_id << " is closed.";
+ write_blocked_quic_streams_.erase(stream_id);
+}
+
+void QuicDataChannel::OnIncomingQueuedStreamClosed(net::QuicStreamId stream_id,
+ int error) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ LOG(LS_VERBOSE) << "Incoming queued stream " << stream_id << " is closed.";
+ incoming_quic_messages_.erase(stream_id);
+}
+
+void QuicDataChannel::OnConnectionClosed() {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ invoker_.AsyncInvoke<void>(signaling_thread_,
+ rtc::Bind(&QuicDataChannel::Close, this));
+}
+
+void QuicDataChannel::OnMessage_s(const DataBuffer& received_data) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ if (observer_) {
+ observer_->OnMessage(received_data);
+ }
+}
+
+void QuicDataChannel::SetState_s(DataState state) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ if (state_ == state || state_ == kClosed) {
+ return;
+ }
+ if (state_ == kClosing && state != kClosed) {
+ return;
+ }
+ LOG(LS_INFO) << "Setting state to " << state << " for QUIC data channel "
+ << id_;
+ state_ = state;
+ if (observer_) {
+ observer_->OnStateChange();
+ }
+}
+
+void QuicDataChannel::OnBufferedAmountChange_s(uint64_t buffered_amount) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ if (observer_) {
+ observer_->OnBufferedAmountChange(buffered_amount);
+ }
+}
+
+size_t QuicDataChannel::GetNumWriteBlockedStreams() const {
+ return write_blocked_quic_streams_.size();
+}
+
+size_t QuicDataChannel::GetNumIncomingStreams() const {
+ return incoming_quic_messages_.size();
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/quicdatachannel.h b/chromium/third_party/webrtc/api/quicdatachannel.h
new file mode 100644
index 00000000000..a6b987b144d
--- /dev/null
+++ b/chromium/third_party/webrtc/api/quicdatachannel.h
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_QUICDATACHANNEL_H_
+#define WEBRTC_API_QUICDATACHANNEL_H_
+
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+
+#include "webrtc/api/datachannelinterface.h"
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/thread.h"
+
+namespace cricket {
+class QuicTransportChannel;
+class ReliableQuicStream;
+class TransportChannel;
+} // namepsace cricket
+
+namespace net {
+// TODO(mikescarlett): Make this uint64_t once QUIC uses 64-bit ids.
+typedef uint32_t QuicStreamId;
+} // namespace net
+
+namespace rtc {
+class CopyOnWriteBuffer;
+} // namespace rtc
+
+namespace webrtc {
+
+// Encodes a QUIC message header with the data channel ID and message ID, then
+// stores the result in |header|.
+void WriteQuicDataChannelMessageHeader(int data_channel_id,
+ uint64_t message_id,
+ rtc::CopyOnWriteBuffer* header);
+
+// Decodes the data channel ID and message ID from the initial data received by
+// an incoming QUIC stream. The data channel ID is output to |data_channel_id|,
+// the message ID is output to |message_id|, and the number of bytes read is
+// output to |bytes_read|. Returns false if either ID cannot be read.
+bool ParseQuicDataMessageHeader(const char* data,
+ size_t len,
+ int* data_channel_id,
+ uint64_t* message_id,
+ size_t* bytes_read);
+
+// QuicDataChannel is an implementation of DataChannelInterface based on the
+// QUIC protocol. It uses a QuicTransportChannel to establish encryption and
+// transfer data, and a QuicDataTransport to receive incoming messages at
+// the correct data channel. Currently this class implements unordered, reliable
+// delivery and does not send an "OPEN" message.
+//
+// Each time a message is sent:
+//
+// - The QuicDataChannel prepends it with the data channel id and message id.
+// The QuicTransportChannel creates a ReliableQuicStream, then the
+// ReliableQuicStream sends the message with a FIN.
+//
+// - The remote QuicSession creates a ReliableQuicStream to receive the data.
+// The remote QuicDataTransport dispatches the ReliableQuicStream to the
+// QuicDataChannel with the same id as this data channel.
+//
+// - The remote QuicDataChannel queues data from the ReliableQuicStream. Once
+// it receives a QUIC stream frame with a FIN, it provides the message to the
+// DataChannelObserver.
+//
+// TODO(mikescarlett): Implement ordered delivery, unreliable delivery, and
+// an OPEN message similar to the one for SCTP.
+class QuicDataChannel : public rtc::RefCountedObject<DataChannelInterface>,
+ public sigslot::has_slots<> {
+ public:
+ // Message stores buffered data from the incoming QUIC stream. The QUIC stream
+ // is provided so that remaining data can be received from the remote peer.
+ struct Message {
+ uint64_t id;
+ rtc::CopyOnWriteBuffer buffer;
+ cricket::ReliableQuicStream* stream;
+ };
+
+ QuicDataChannel(rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread,
+ const std::string& label,
+ const DataChannelInit& config);
+ ~QuicDataChannel() override;
+
+ // DataChannelInterface overrides.
+ std::string label() const override { return label_; }
+ bool reliable() const override { return true; }
+ bool ordered() const override { return false; }
+ uint16_t maxRetransmitTime() const override { return -1; }
+ uint16_t maxRetransmits() const override { return -1; }
+ bool negotiated() const override { return false; }
+ int id() const override { return id_; }
+ DataState state() const override { return state_; }
+ uint64_t buffered_amount() const override { return buffered_amount_; }
+ std::string protocol() const override { return protocol_; }
+ void RegisterObserver(DataChannelObserver* observer) override;
+ void UnregisterObserver() override;
+ void Close() override;
+ bool Send(const DataBuffer& buffer) override;
+
+ // Called from QuicDataTransport to set the QUIC transport channel that the
+ // QuicDataChannel sends messages with. Returns false if a different QUIC
+ // transport channel is already set or |channel| is NULL.
+ //
+ // The QUIC transport channel is not set in the constructor to allow creating
+ // the QuicDataChannel before the PeerConnection has a QUIC transport channel,
+ // such as before the session description is not set.
+ bool SetTransportChannel(cricket::QuicTransportChannel* channel);
+
+ // Called from QuicDataTransport when an incoming ReliableQuicStream is
+ // receiving a message received for this data channel. Once this function is
+ // called, |message| is owned by the QuicDataChannel and should not be
+ // accessed by the QuicDataTransport.
+ void OnIncomingMessage(Message&& message);
+
+ // Methods for testing.
+ // Gets the number of outgoing QUIC streams with write blocked data that are
+ // currently open for this data channel and are not finished writing a
+ // message. This is equivalent to the size of |write_blocked_quic_streams_|.
+ size_t GetNumWriteBlockedStreams() const;
+ // Gets the number of incoming QUIC streams with buffered data that are
+ // currently open for this data channel and are not finished receiving a
+ // message. This is equivalent to the size of |incoming_quic_messages_|.
+ size_t GetNumIncomingStreams() const;
+
+ private:
+ // Callbacks from ReliableQuicStream.
+ // Called when an incoming QUIC stream in |incoming_quic_messages_| has
+ // received a QUIC stream frame.
+ void OnDataReceived(net::QuicStreamId stream_id,
+ const char* data,
+ size_t len);
+ // Called when a write blocked QUIC stream that has been added to
+ // |write_blocked_quic_streams_| is closed.
+ void OnWriteBlockedStreamClosed(net::QuicStreamId stream_id, int error);
+ // Called when an incoming QUIC stream that has been added to
+ // |incoming_quic_messages_| is closed.
+ void OnIncomingQueuedStreamClosed(net::QuicStreamId stream_id, int error);
+ // Called when a write blocked QUIC stream in |write_blocked_quic_streams_|
+ // has written previously queued data.
+ void OnQueuedBytesWritten(net::QuicStreamId stream_id,
+ uint64_t queued_bytes_written);
+
+ // Callbacks from |quic_transport_channel_|.
+ void OnReadyToSend(cricket::TransportChannel* channel);
+ void OnConnectionClosed();
+
+ // Worker thread methods.
+ // Sends the data buffer to the remote peer using an outgoing QUIC stream.
+ // Returns true if the data buffer can be successfully sent, or if it is
+ // queued to be sent later.
+ bool Send_w(const DataBuffer& buffer);
+ // Connects the |quic_transport_channel_| signals to this QuicDataChannel,
+ // then returns the new QuicDataChannel state.
+ DataState SetTransportChannel_w();
+ // Closes the QUIC streams associated with this QuicDataChannel.
+ void Close_w();
+ // Sets |buffered_amount_|.
+ void SetBufferedAmount_w(uint64_t buffered_amount);
+
+ // Signaling thread methods.
+ // Triggers QuicDataChannelObserver::OnMessage when a message from the remote
+ // peer is ready to be read.
+ void OnMessage_s(const DataBuffer& received_data);
+ // Triggers QuicDataChannel::OnStateChange if the state change is valid.
+ // Otherwise does nothing if |state| == |state_| or |state| != kClosed when
+ // the data channel is closing.
+ void SetState_s(DataState state);
+ // Triggers QuicDataChannelObserver::OnBufferedAmountChange when the total
+ // buffered data changes for a QUIC stream.
+ void OnBufferedAmountChange_s(uint64_t buffered_amount);
+
+ // QUIC transport channel which owns the QUIC session. It is used to create
+ // a QUIC stream for sending outgoing messages.
+ cricket::QuicTransportChannel* quic_transport_channel_ = nullptr;
+ // Signaling thread for DataChannelInterface methods.
+ rtc::Thread* const signaling_thread_;
+ // Worker thread for sending data and |quic_transport_channel_| callbacks.
+ rtc::Thread* const worker_thread_;
+ rtc::AsyncInvoker invoker_;
+ // Map of QUIC stream ID => ReliableQuicStream* for write blocked QUIC
+ // streams.
+ std::unordered_map<net::QuicStreamId, cricket::ReliableQuicStream*>
+ write_blocked_quic_streams_;
+ // Map of QUIC stream ID => Message for each incoming QUIC stream.
+ std::unordered_map<net::QuicStreamId, Message> incoming_quic_messages_;
+ // Handles received data from the remote peer and data channel state changes.
+ DataChannelObserver* observer_ = nullptr;
+ // QuicDataChannel ID.
+ int id_;
+ // Connectivity state of the QuicDataChannel.
+ DataState state_;
+ // Total bytes that are buffered among the QUIC streams.
+ uint64_t buffered_amount_;
+ // Counter for number of sent messages that is used for message IDs.
+ uint64_t next_message_id_;
+
+ // Variables for application use.
+ const std::string& label_;
+ const std::string& protocol_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_API_QUICDATACHANNEL_H_
diff --git a/chromium/third_party/webrtc/api/quicdatachannel_unittest.cc b/chromium/third_party/webrtc/api/quicdatachannel_unittest.cc
new file mode 100644
index 00000000000..e701c29b4f3
--- /dev/null
+++ b/chromium/third_party/webrtc/api/quicdatachannel_unittest.cc
@@ -0,0 +1,659 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/quicdatachannel.h"
+
+#include <map>
+#include <memory>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/p2p/base/faketransportcontroller.h"
+#include "webrtc/p2p/quic/quictransportchannel.h"
+#include "webrtc/p2p/quic/reliablequicstream.h"
+
+using cricket::FakeTransportChannel;
+using cricket::QuicTransportChannel;
+using cricket::ReliableQuicStream;
+
+using webrtc::DataBuffer;
+using webrtc::DataChannelObserver;
+using webrtc::DataChannelInit;
+using webrtc::QuicDataChannel;
+
+namespace {
+
+// Timeout for asynchronous operations.
+static const int kTimeoutMs = 1000; // milliseconds
+
+// Small messages that can be sent within a single QUIC packet.
+static const std::string kSmallMessage1 = "Hello, world!";
+static const std::string kSmallMessage2 = "WebRTC";
+static const std::string kSmallMessage3 = "1";
+static const std::string kSmallMessage4 = "abcdefghijklmnopqrstuvwxyz";
+static const DataBuffer kSmallBuffer1(kSmallMessage1);
+static const DataBuffer kSmallBuffer2(kSmallMessage2);
+static const DataBuffer kSmallBuffer3(kSmallMessage3);
+static const DataBuffer kSmallBuffer4(kSmallMessage4);
+
+// Large messages (> 1350 bytes) that exceed the max size of a QUIC packet.
+// These are < 16 KB so they don't exceed the QUIC stream flow control limit.
+static const std::string kLargeMessage1 = std::string("a", 2000);
+static const std::string kLargeMessage2 = std::string("a", 4000);
+static const std::string kLargeMessage3 = std::string("a", 8000);
+static const std::string kLargeMessage4 = std::string("a", 12000);
+static const DataBuffer kLargeBuffer1(kLargeMessage1);
+static const DataBuffer kLargeBuffer2(kLargeMessage2);
+static const DataBuffer kLargeBuffer3(kLargeMessage3);
+static const DataBuffer kLargeBuffer4(kLargeMessage4);
+
+// Oversized message (> 16 KB) that violates the QUIC stream flow control limit.
+static const std::string kOversizedMessage = std::string("a", 20000);
+static const DataBuffer kOversizedBuffer(kOversizedMessage);
+
+// Creates a fingerprint from a certificate.
+static rtc::SSLFingerprint* CreateFingerprint(rtc::RTCCertificate* cert) {
+ std::string digest_algorithm;
+ cert->ssl_certificate().GetSignatureDigestAlgorithm(&digest_algorithm);
+ std::unique_ptr<rtc::SSLFingerprint> fingerprint(
+ rtc::SSLFingerprint::Create(digest_algorithm, cert->identity()));
+ return fingerprint.release();
+}
+
+// FakeObserver receives messages from the QuicDataChannel.
+class FakeObserver : public DataChannelObserver {
+ public:
+ FakeObserver()
+ : on_state_change_count_(0), on_buffered_amount_change_count_(0) {}
+
+ // DataChannelObserver overrides.
+ void OnStateChange() override { ++on_state_change_count_; }
+ void OnBufferedAmountChange(uint64_t previous_amount) override {
+ ++on_buffered_amount_change_count_;
+ }
+ void OnMessage(const webrtc::DataBuffer& buffer) override {
+ messages_.push_back(std::string(buffer.data.data<char>(), buffer.size()));
+ }
+
+ const std::vector<std::string>& messages() const { return messages_; }
+
+ size_t messages_received() const { return messages_.size(); }
+
+ size_t on_state_change_count() const { return on_state_change_count_; }
+
+ size_t on_buffered_amount_change_count() const {
+ return on_buffered_amount_change_count_;
+ }
+
+ private:
+ std::vector<std::string> messages_;
+ size_t on_state_change_count_;
+ size_t on_buffered_amount_change_count_;
+};
+
+// FakeQuicDataTransport simulates QuicDataTransport by dispatching QUIC
+// stream messages to data channels and encoding/decoding messages.
+class FakeQuicDataTransport : public sigslot::has_slots<> {
+ public:
+ FakeQuicDataTransport() {}
+
+ void ConnectToTransportChannel(QuicTransportChannel* quic_transport_channel) {
+ quic_transport_channel->SignalIncomingStream.connect(
+ this, &FakeQuicDataTransport::OnIncomingStream);
+ }
+
+ rtc::scoped_refptr<QuicDataChannel> CreateDataChannel(
+ int id,
+ const std::string& label,
+ const std::string& protocol) {
+ DataChannelInit config;
+ config.id = id;
+ config.protocol = protocol;
+ rtc::scoped_refptr<QuicDataChannel> data_channel(new QuicDataChannel(
+ rtc::Thread::Current(), rtc::Thread::Current(), label, config));
+ data_channel_by_id_[id] = data_channel;
+ return data_channel;
+ }
+
+ private:
+ void OnIncomingStream(cricket::ReliableQuicStream* stream) {
+ incoming_stream_ = stream;
+ incoming_stream_->SignalDataReceived.connect(
+ this, &FakeQuicDataTransport::OnDataReceived);
+ }
+
+ void OnDataReceived(net::QuicStreamId id, const char* data, size_t len) {
+ ASSERT_EQ(incoming_stream_->id(), id);
+ incoming_stream_->SignalDataReceived.disconnect(this);
+ // Retrieve the data channel ID and message ID.
+ int data_channel_id;
+ uint64_t message_id;
+ size_t bytes_read;
+ ASSERT_TRUE(webrtc::ParseQuicDataMessageHeader(data, len, &data_channel_id,
+ &message_id, &bytes_read));
+ data += bytes_read;
+ len -= bytes_read;
+ // Dispatch the message to the matching QuicDataChannel.
+ const auto& kv = data_channel_by_id_.find(data_channel_id);
+ ASSERT_NE(kv, data_channel_by_id_.end());
+ QuicDataChannel* data_channel = kv->second;
+ QuicDataChannel::Message message;
+ message.id = message_id;
+ message.buffer = rtc::CopyOnWriteBuffer(data, len);
+ message.stream = incoming_stream_;
+ data_channel->OnIncomingMessage(std::move(message));
+ incoming_stream_ = nullptr;
+ }
+
+ // Map of data channel ID => QuicDataChannel.
+ std::map<int, rtc::scoped_refptr<QuicDataChannel>> data_channel_by_id_;
+ // Last incoming QUIC stream which has arrived.
+ cricket::ReliableQuicStream* incoming_stream_ = nullptr;
+};
+
+// A peer who creates a QuicDataChannel to transfer data, and simulates network
+// connectivity with a fake ICE channel wrapped by the QUIC transport channel.
+class QuicDataChannelPeer {
+ public:
+ QuicDataChannelPeer()
+ : ice_transport_channel_(new FakeTransportChannel("data", 0)),
+ quic_transport_channel_(ice_transport_channel_) {
+ ice_transport_channel_->SetAsync(true);
+ fake_quic_data_transport_.ConnectToTransportChannel(
+ &quic_transport_channel_);
+ }
+
+ void GenerateCertificateAndFingerprint() {
+ rtc::scoped_refptr<rtc::RTCCertificate> local_cert =
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate("cert_name", rtc::KT_DEFAULT)));
+ quic_transport_channel_.SetLocalCertificate(local_cert);
+ local_fingerprint_.reset(CreateFingerprint(local_cert.get()));
+ }
+
+ rtc::scoped_refptr<QuicDataChannel> CreateDataChannelWithTransportChannel(
+ int id,
+ const std::string& label,
+ const std::string& protocol) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ fake_quic_data_transport_.CreateDataChannel(id, label, protocol);
+ data_channel->SetTransportChannel(&quic_transport_channel_);
+ return data_channel;
+ }
+
+ rtc::scoped_refptr<QuicDataChannel> CreateDataChannelWithoutTransportChannel(
+ int id,
+ const std::string& label,
+ const std::string& protocol) {
+ return fake_quic_data_transport_.CreateDataChannel(id, label, protocol);
+ }
+
+ // Connects |ice_transport_channel_| to that of the other peer.
+ void Connect(QuicDataChannelPeer* other_peer) {
+ ice_transport_channel_->Connect();
+ other_peer->ice_transport_channel_->Connect();
+ ice_transport_channel_->SetDestination(other_peer->ice_transport_channel_);
+ }
+
+ std::unique_ptr<rtc::SSLFingerprint>& local_fingerprint() {
+ return local_fingerprint_;
+ }
+
+ QuicTransportChannel* quic_transport_channel() {
+ return &quic_transport_channel_;
+ }
+
+ FakeTransportChannel* ice_transport_channel() {
+ return ice_transport_channel_;
+ }
+
+ private:
+ FakeTransportChannel* ice_transport_channel_;
+ QuicTransportChannel quic_transport_channel_;
+
+ std::unique_ptr<rtc::SSLFingerprint> local_fingerprint_;
+
+ FakeQuicDataTransport fake_quic_data_transport_;
+};
+
+class QuicDataChannelTest : public testing::Test {
+ public:
+ QuicDataChannelTest() {}
+
+ // Connect the QuicTransportChannels and complete the crypto handshake.
+ void ConnectTransportChannels() {
+ SetCryptoParameters();
+ peer1_.Connect(&peer2_);
+ ASSERT_TRUE_WAIT(peer1_.quic_transport_channel()->writable() &&
+ peer2_.quic_transport_channel()->writable(),
+ kTimeoutMs);
+ }
+
+ // Sets crypto parameters required for the QUIC handshake.
+ void SetCryptoParameters() {
+ peer1_.GenerateCertificateAndFingerprint();
+ peer2_.GenerateCertificateAndFingerprint();
+
+ peer1_.quic_transport_channel()->SetSslRole(rtc::SSL_CLIENT);
+ peer2_.quic_transport_channel()->SetSslRole(rtc::SSL_SERVER);
+
+ std::unique_ptr<rtc::SSLFingerprint>& peer1_fingerprint =
+ peer1_.local_fingerprint();
+ std::unique_ptr<rtc::SSLFingerprint>& peer2_fingerprint =
+ peer2_.local_fingerprint();
+
+ peer1_.quic_transport_channel()->SetRemoteFingerprint(
+ peer2_fingerprint->algorithm,
+ reinterpret_cast<const uint8_t*>(peer2_fingerprint->digest.data()),
+ peer2_fingerprint->digest.size());
+ peer2_.quic_transport_channel()->SetRemoteFingerprint(
+ peer1_fingerprint->algorithm,
+ reinterpret_cast<const uint8_t*>(peer1_fingerprint->digest.data()),
+ peer1_fingerprint->digest.size());
+ }
+
+ protected:
+ QuicDataChannelPeer peer1_;
+ QuicDataChannelPeer peer2_;
+};
+
+// Tests that a QuicDataChannel transitions from connecting to open when
+// the QuicTransportChannel becomes writable for the first time.
+TEST_F(QuicDataChannelTest, DataChannelOpensWhenTransportChannelConnects) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(4, "label", "protocol");
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, data_channel->state());
+ ConnectTransportChannels();
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, data_channel->state(),
+ kTimeoutMs);
+}
+
+// Tests that a QuicDataChannel transitions from connecting to open when
+// SetTransportChannel is called with a QuicTransportChannel that is already
+// writable.
+TEST_F(QuicDataChannelTest, DataChannelOpensWhenTransportChannelWritable) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithoutTransportChannel(4, "label", "protocol");
+ ConnectTransportChannels();
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, data_channel->state());
+ data_channel->SetTransportChannel(peer1_.quic_transport_channel());
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen, data_channel->state());
+}
+
+// Tests that the QuicDataChannel transfers messages small enough to fit into a
+// single QUIC stream frame.
+TEST_F(QuicDataChannelTest, TransferSmallMessage) {
+ ConnectTransportChannels();
+ int data_channel_id = 2;
+ std::string label = "label";
+ std::string protocol = "protocol";
+ rtc::scoped_refptr<QuicDataChannel> peer1_data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ ASSERT_TRUE(peer1_data_channel->state() ==
+ webrtc::DataChannelInterface::kOpen);
+ rtc::scoped_refptr<QuicDataChannel> peer2_data_channel =
+ peer2_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ ASSERT_TRUE(peer2_data_channel->state() ==
+ webrtc::DataChannelInterface::kOpen);
+
+ FakeObserver peer1_observer;
+ peer1_data_channel->RegisterObserver(&peer1_observer);
+ FakeObserver peer2_observer;
+ peer2_data_channel->RegisterObserver(&peer2_observer);
+
+ // peer1 -> peer2
+ EXPECT_TRUE(peer1_data_channel->Send(kSmallBuffer1));
+ ASSERT_EQ_WAIT(1, peer2_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(kSmallMessage1, peer2_observer.messages()[0]);
+ // peer2 -> peer1
+ EXPECT_TRUE(peer2_data_channel->Send(kSmallBuffer2));
+ ASSERT_EQ_WAIT(1, peer1_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(kSmallMessage2, peer1_observer.messages()[0]);
+ // peer2 -> peer1
+ EXPECT_TRUE(peer2_data_channel->Send(kSmallBuffer3));
+ ASSERT_EQ_WAIT(2, peer1_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(kSmallMessage3, peer1_observer.messages()[1]);
+ // peer1 -> peer2
+ EXPECT_TRUE(peer1_data_channel->Send(kSmallBuffer4));
+ ASSERT_EQ_WAIT(2, peer2_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(kSmallMessage4, peer2_observer.messages()[1]);
+}
+
+// Tests that QuicDataChannel transfers messages large enough to fit into
+// multiple QUIC stream frames, which don't violate the QUIC flow control limit.
+// These require buffering by the QuicDataChannel.
+TEST_F(QuicDataChannelTest, TransferLargeMessage) {
+ ConnectTransportChannels();
+ int data_channel_id = 347;
+ std::string label = "label";
+ std::string protocol = "protocol";
+ rtc::scoped_refptr<QuicDataChannel> peer1_data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ ASSERT_TRUE(peer1_data_channel->state() ==
+ webrtc::DataChannelInterface::kOpen);
+ rtc::scoped_refptr<QuicDataChannel> peer2_data_channel =
+ peer2_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ ASSERT_TRUE(peer2_data_channel->state() ==
+ webrtc::DataChannelInterface::kOpen);
+
+ FakeObserver peer1_observer;
+ peer1_data_channel->RegisterObserver(&peer1_observer);
+ FakeObserver peer2_observer;
+ peer2_data_channel->RegisterObserver(&peer2_observer);
+
+ // peer1 -> peer2
+ EXPECT_TRUE(peer1_data_channel->Send(kLargeBuffer1));
+ ASSERT_TRUE_WAIT(peer2_observer.messages_received() == 1, kTimeoutMs);
+ EXPECT_EQ(kLargeMessage1, peer2_observer.messages()[0]);
+ // peer2 -> peer1
+ EXPECT_TRUE(peer2_data_channel->Send(kLargeBuffer2));
+ ASSERT_EQ_WAIT(1, peer1_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(kLargeMessage2, peer1_observer.messages()[0]);
+ // peer2 -> peer1
+ EXPECT_TRUE(peer2_data_channel->Send(kLargeBuffer3));
+ ASSERT_EQ_WAIT(2, peer1_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(kLargeMessage3, peer1_observer.messages()[1]);
+ // peer1 -> peer2
+ EXPECT_TRUE(peer1_data_channel->Send(kLargeBuffer4));
+ ASSERT_EQ_WAIT(2, peer2_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(kLargeMessage4, peer2_observer.messages()[1]);
+}
+
+// Tests that when a message size exceeds the flow control limit (> 16KB), the
+// QuicDataChannel can queue the data and send it after receiving window update
+// frames from the remote peer.
+TEST_F(QuicDataChannelTest, TransferOversizedMessage) {
+ ConnectTransportChannels();
+ int data_channel_id = 189;
+ std::string label = "label";
+ std::string protocol = "protocol";
+ rtc::scoped_refptr<QuicDataChannel> peer1_data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ rtc::scoped_refptr<QuicDataChannel> peer2_data_channel =
+ peer2_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ ASSERT_TRUE(peer2_data_channel->state() ==
+ webrtc::DataChannelInterface::kOpen);
+
+ FakeObserver peer1_observer;
+ peer1_data_channel->RegisterObserver(&peer1_observer);
+ FakeObserver peer2_observer;
+ peer2_data_channel->RegisterObserver(&peer2_observer);
+
+ EXPECT_TRUE(peer1_data_channel->Send(kOversizedBuffer));
+ EXPECT_EQ(1, peer1_data_channel->GetNumWriteBlockedStreams());
+ EXPECT_EQ_WAIT(1, peer2_data_channel->GetNumIncomingStreams(), kTimeoutMs);
+ ASSERT_EQ_WAIT(1, peer2_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(kOversizedMessage, peer2_observer.messages()[0]);
+ EXPECT_EQ(0, peer1_data_channel->GetNumWriteBlockedStreams());
+ EXPECT_EQ(0, peer2_data_channel->GetNumIncomingStreams());
+}
+
+// Tests that empty messages can be sent.
+TEST_F(QuicDataChannelTest, TransferEmptyMessage) {
+ ConnectTransportChannels();
+ int data_channel_id = 69;
+ std::string label = "label";
+ std::string protocol = "protocol";
+ rtc::scoped_refptr<QuicDataChannel> peer1_data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ rtc::scoped_refptr<QuicDataChannel> peer2_data_channel =
+ peer2_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ ASSERT_TRUE(peer2_data_channel->state() ==
+ webrtc::DataChannelInterface::kOpen);
+
+ FakeObserver peer1_observer;
+ peer1_data_channel->RegisterObserver(&peer1_observer);
+ FakeObserver peer2_observer;
+ peer2_data_channel->RegisterObserver(&peer2_observer);
+
+ EXPECT_TRUE(peer1_data_channel->Send(DataBuffer("")));
+ ASSERT_EQ_WAIT(1, peer2_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ("", peer2_observer.messages()[0]);
+}
+
+// Tests that when the QuicDataChannel is open and sends a message while the
+// QuicTransportChannel is unwritable, it gets buffered then received once the
+// QuicTransportChannel becomes writable again.
+TEST_F(QuicDataChannelTest, MessagesReceivedWhenTransportChannelReconnects) {
+ ConnectTransportChannels();
+ int data_channel_id = 401;
+ std::string label = "label";
+ std::string protocol = "protocol";
+ rtc::scoped_refptr<QuicDataChannel> peer1_data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ ASSERT_TRUE(peer1_data_channel->state() ==
+ webrtc::DataChannelInterface::kOpen);
+ rtc::scoped_refptr<QuicDataChannel> peer2_data_channel =
+ peer2_.CreateDataChannelWithTransportChannel(data_channel_id, label,
+ protocol);
+ ASSERT_TRUE(peer2_data_channel->state() ==
+ webrtc::DataChannelInterface::kOpen);
+
+ FakeObserver peer1_observer;
+ peer1_data_channel->RegisterObserver(&peer1_observer);
+ FakeObserver peer2_observer;
+ peer2_data_channel->RegisterObserver(&peer2_observer);
+ // writable => unwritable
+ peer1_.ice_transport_channel()->SetWritable(false);
+ ASSERT_FALSE(peer1_.quic_transport_channel()->writable());
+ // Verify that sent data is buffered.
+ EXPECT_TRUE(peer1_data_channel->Send(kSmallBuffer1));
+ EXPECT_EQ(1, peer1_data_channel->GetNumWriteBlockedStreams());
+ EXPECT_TRUE(peer1_data_channel->Send(kSmallBuffer2));
+ EXPECT_EQ(2, peer1_data_channel->GetNumWriteBlockedStreams());
+ EXPECT_TRUE(peer1_data_channel->Send(kSmallBuffer3));
+ EXPECT_EQ(3, peer1_data_channel->GetNumWriteBlockedStreams());
+ EXPECT_TRUE(peer1_data_channel->Send(kSmallBuffer4));
+ EXPECT_EQ(4, peer1_data_channel->GetNumWriteBlockedStreams());
+ // unwritable => writable
+ peer1_.ice_transport_channel()->SetWritable(true);
+ ASSERT_TRUE(peer1_.quic_transport_channel()->writable());
+ ASSERT_EQ_WAIT(4, peer2_observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ(0, peer1_data_channel->GetNumWriteBlockedStreams());
+ EXPECT_EQ(0, peer2_data_channel->GetNumIncomingStreams());
+}
+
+// Tests that the QuicDataChannel does not send before it is open.
+TEST_F(QuicDataChannelTest, TransferMessageBeforeChannelOpens) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(6, "label", "protocol");
+ ASSERT_TRUE(data_channel->state() ==
+ webrtc::DataChannelInterface::kConnecting);
+ EXPECT_FALSE(data_channel->Send(kSmallBuffer1));
+}
+
+// Tests that the QuicDataChannel does not send after it is closed.
+TEST_F(QuicDataChannelTest, TransferDataAfterChannelClosed) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(42, "label", "protocol");
+ data_channel->Close();
+ ASSERT_EQ_WAIT(webrtc::DataChannelInterface::kClosed, data_channel->state(),
+ kTimeoutMs);
+ EXPECT_FALSE(data_channel->Send(kSmallBuffer1));
+}
+
+// Tests that QuicDataChannel state changes fire OnStateChanged() for the
+// observer, with the correct data channel states, when the data channel
+// transitions from kConnecting => kOpen => kClosing => kClosed.
+TEST_F(QuicDataChannelTest, OnStateChangedFired) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(7, "label", "protocol");
+ FakeObserver observer;
+ data_channel->RegisterObserver(&observer);
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, data_channel->state());
+ EXPECT_EQ(0, observer.on_state_change_count());
+ ConnectTransportChannels();
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, data_channel->state(),
+ kTimeoutMs);
+ EXPECT_EQ(1, observer.on_state_change_count());
+ data_channel->Close();
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed, data_channel->state(),
+ kTimeoutMs);
+ // 2 state changes due to kClosing and kClosed.
+ EXPECT_EQ(3, observer.on_state_change_count());
+}
+
+// Tests that a QuicTransportChannel can be closed without being opened when it
+// is connected to a transprot chanenl.
+TEST_F(QuicDataChannelTest, NeverOpenedWithTransportChannel) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(7, "label", "protocol");
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, data_channel->state());
+ data_channel->Close();
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed, data_channel->state(),
+ kTimeoutMs);
+}
+
+// Tests that a QuicTransportChannel can be closed without being opened or
+// connected to a transport channel.
+TEST_F(QuicDataChannelTest, NeverOpenedWithoutTransportChannel) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithoutTransportChannel(7, "label", "protocol");
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, data_channel->state());
+ data_channel->Close();
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed, data_channel->state(),
+ kTimeoutMs);
+}
+
+// Tests that the QuicDataChannel is closed when the QUIC connection closes.
+TEST_F(QuicDataChannelTest, ClosedOnTransportError) {
+ ConnectTransportChannels();
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(1, "label", "protocol");
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen, data_channel->state());
+ ReliableQuicStream* stream =
+ peer1_.quic_transport_channel()->CreateQuicStream();
+ ASSERT_NE(nullptr, stream);
+ stream->CloseConnectionWithDetails(net::QuicErrorCode::QUIC_NO_ERROR,
+ "Closing QUIC for testing");
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed, data_channel->state(),
+ kTimeoutMs);
+}
+
+// Tests that an already closed QuicDataChannel does not fire onStateChange and
+// remains closed.
+TEST_F(QuicDataChannelTest, DoesNotChangeStateWhenClosed) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(4, "label", "protocol");
+ FakeObserver observer;
+ data_channel->RegisterObserver(&observer);
+ data_channel->Close();
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed, data_channel->state(),
+ kTimeoutMs);
+ // OnStateChange called for kClosing and kClosed.
+ EXPECT_EQ(2, observer.on_state_change_count());
+ // Call Close() again to verify that the state cannot be kClosing.
+ data_channel->Close();
+ EXPECT_EQ(webrtc::DataChannelInterface::kClosed, data_channel->state());
+ EXPECT_EQ(2, observer.on_state_change_count());
+ ConnectTransportChannels();
+ EXPECT_EQ(webrtc::DataChannelInterface::kClosed, data_channel->state());
+ EXPECT_EQ(2, observer.on_state_change_count());
+ // writable => unwritable
+ peer1_.ice_transport_channel()->SetWritable(false);
+ ASSERT_FALSE(peer1_.quic_transport_channel()->writable());
+ EXPECT_EQ(webrtc::DataChannelInterface::kClosed, data_channel->state());
+ EXPECT_EQ(2, observer.on_state_change_count());
+ // unwritable => writable
+ peer1_.ice_transport_channel()->SetWritable(true);
+ ASSERT_TRUE(peer1_.quic_transport_channel()->writable());
+ EXPECT_EQ(webrtc::DataChannelInterface::kClosed, data_channel->state());
+ EXPECT_EQ(2, observer.on_state_change_count());
+}
+
+// Tests that when the QuicDataChannel is open and the QuicTransportChannel
+// transitions between writable and unwritable, it does not fire onStateChange
+// and remains open.
+TEST_F(QuicDataChannelTest, DoesNotChangeStateWhenTransportChannelReconnects) {
+ ConnectTransportChannels();
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(4, "label", "protocol");
+ FakeObserver observer;
+ data_channel->RegisterObserver(&observer);
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen, data_channel->state());
+ EXPECT_EQ(0, observer.on_state_change_count());
+ // writable => unwritable
+ peer1_.ice_transport_channel()->SetWritable(false);
+ ASSERT_FALSE(peer1_.quic_transport_channel()->writable());
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen, data_channel->state());
+ EXPECT_EQ(0, observer.on_state_change_count());
+ // unwritable => writable
+ peer1_.ice_transport_channel()->SetWritable(true);
+ ASSERT_TRUE(peer1_.quic_transport_channel()->writable());
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen, data_channel->state());
+ EXPECT_EQ(0, observer.on_state_change_count());
+}
+
+// Tests that SetTransportChannel returns false when setting a NULL transport
+// channel or a transport channel that is not equivalent to the one already set.
+TEST_F(QuicDataChannelTest, SetTransportChannelReturnValue) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel =
+ peer1_.CreateDataChannelWithTransportChannel(4, "label", "protocol");
+ EXPECT_FALSE(data_channel->SetTransportChannel(nullptr));
+ QuicTransportChannel* transport_channel = peer1_.quic_transport_channel();
+ EXPECT_TRUE(data_channel->SetTransportChannel(transport_channel));
+ EXPECT_TRUE(data_channel->SetTransportChannel(transport_channel));
+ QuicTransportChannel* other_transport_channel =
+ peer2_.quic_transport_channel();
+ EXPECT_FALSE(data_channel->SetTransportChannel(other_transport_channel));
+}
+
+// Tests that the QUIC message header is encoded with the correct number of
+// bytes and is properly decoded.
+TEST_F(QuicDataChannelTest, EncodeParseQuicDataMessageHeader) {
+ int data_channel_id1 = 127; // 1 byte
+ uint64_t message_id1 = 0; // 1 byte
+ rtc::CopyOnWriteBuffer header1;
+ webrtc::WriteQuicDataChannelMessageHeader(data_channel_id1, message_id1,
+ &header1);
+ EXPECT_EQ(2u, header1.size());
+
+ int decoded_data_channel_id1;
+ uint64_t decoded_message_id1;
+ size_t bytes_read1;
+ ASSERT_TRUE(webrtc::ParseQuicDataMessageHeader(
+ header1.data<char>(), header1.size(), &decoded_data_channel_id1,
+ &decoded_message_id1, &bytes_read1));
+ EXPECT_EQ(data_channel_id1, decoded_data_channel_id1);
+ EXPECT_EQ(message_id1, decoded_message_id1);
+ EXPECT_EQ(2u, bytes_read1);
+
+ int data_channel_id2 = 4178; // 2 bytes
+ uint64_t message_id2 = 1324921792003; // 6 bytes
+ rtc::CopyOnWriteBuffer header2;
+ webrtc::WriteQuicDataChannelMessageHeader(data_channel_id2, message_id2,
+ &header2);
+ EXPECT_EQ(8u, header2.size());
+
+ int decoded_data_channel_id2;
+ uint64_t decoded_message_id2;
+ size_t bytes_read2;
+ ASSERT_TRUE(webrtc::ParseQuicDataMessageHeader(
+ header2.data<char>(), header2.size(), &decoded_data_channel_id2,
+ &decoded_message_id2, &bytes_read2));
+ EXPECT_EQ(data_channel_id2, decoded_data_channel_id2);
+ EXPECT_EQ(message_id2, decoded_message_id2);
+ EXPECT_EQ(8u, bytes_read2);
+}
+
+} // namespace
diff --git a/chromium/third_party/webrtc/api/quicdatatransport.cc b/chromium/third_party/webrtc/api/quicdatatransport.cc
new file mode 100644
index 00000000000..70ad03dbfdd
--- /dev/null
+++ b/chromium/third_party/webrtc/api/quicdatatransport.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/quicdatatransport.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/p2p/quic/quictransportchannel.h"
+#include "webrtc/p2p/quic/reliablequicstream.h"
+
+namespace webrtc {
+
+QuicDataTransport::QuicDataTransport(rtc::Thread* signaling_thread,
+ rtc::Thread* worker_thread)
+ : signaling_thread_(signaling_thread), worker_thread_(worker_thread) {
+ RTC_DCHECK(signaling_thread_);
+ RTC_DCHECK(worker_thread_);
+}
+
+QuicDataTransport::~QuicDataTransport() {}
+
+bool QuicDataTransport::SetTransportChannel(
+ cricket::QuicTransportChannel* channel) {
+ if (!channel) {
+ LOG(LS_ERROR) << "|channel| is NULL. Cannot set transport channel.";
+ return false;
+ }
+ if (quic_transport_channel_) {
+ if (channel == quic_transport_channel_) {
+ LOG(LS_WARNING) << "Ignoring duplicate transport channel.";
+ return true;
+ }
+ LOG(LS_ERROR) << "|channel| does not match existing transport channel.";
+ return false;
+ }
+
+ LOG(LS_INFO) << "Setting QuicTransportChannel for QuicDataTransport";
+ quic_transport_channel_ = channel;
+ quic_transport_channel_->SignalIncomingStream.connect(
+ this, &QuicDataTransport::OnIncomingStream);
+
+ bool success = true;
+ for (const auto& kv : data_channel_by_id_) {
+ rtc::scoped_refptr<QuicDataChannel> data_channel = kv.second;
+ if (!data_channel->SetTransportChannel(quic_transport_channel_)) {
+ LOG(LS_ERROR)
+ << "Cannot set QUIC transport channel for QUIC data channel "
+ << kv.first;
+ success = false;
+ }
+ }
+ return success;
+}
+
+rtc::scoped_refptr<DataChannelInterface> QuicDataTransport::CreateDataChannel(
+ const std::string& label,
+ const DataChannelInit* config) {
+ if (config == nullptr) {
+ return nullptr;
+ }
+ if (data_channel_by_id_.find(config->id) != data_channel_by_id_.end()) {
+ LOG(LS_ERROR) << "QUIC data channel already exists with id " << config->id;
+ return nullptr;
+ }
+ rtc::scoped_refptr<QuicDataChannel> data_channel(
+ new QuicDataChannel(signaling_thread_, worker_thread_, label, *config));
+ if (quic_transport_channel_) {
+ if (!data_channel->SetTransportChannel(quic_transport_channel_)) {
+ LOG(LS_ERROR)
+ << "Cannot set QUIC transport channel for QUIC data channel "
+ << config->id;
+ }
+ }
+
+ data_channel_by_id_[data_channel->id()] = data_channel;
+ return data_channel;
+}
+
+void QuicDataTransport::DestroyDataChannel(int id) {
+ data_channel_by_id_.erase(id);
+}
+
+bool QuicDataTransport::HasDataChannel(int id) const {
+ return data_channel_by_id_.find(id) != data_channel_by_id_.end();
+}
+
+bool QuicDataTransport::HasDataChannels() const {
+ return !data_channel_by_id_.empty();
+}
+
+// Called when a QUIC stream is created for incoming data.
+void QuicDataTransport::OnIncomingStream(cricket::ReliableQuicStream* stream) {
+ RTC_DCHECK(stream != nullptr);
+ quic_stream_by_id_[stream->id()] = stream;
+ stream->SignalDataReceived.connect(this, &QuicDataTransport::OnDataReceived);
+}
+
+// Called when the first QUIC stream frame is received for incoming data.
+void QuicDataTransport::OnDataReceived(net::QuicStreamId id,
+ const char* data,
+ size_t len) {
+ const auto& quic_stream_kv = quic_stream_by_id_.find(id);
+ if (quic_stream_kv == quic_stream_by_id_.end()) {
+ RTC_DCHECK(false);
+ return;
+ }
+ cricket::ReliableQuicStream* stream = quic_stream_kv->second;
+ stream->SignalDataReceived.disconnect(this);
+ quic_stream_by_id_.erase(id);
+ // Read the data channel ID and message ID.
+ int data_channel_id;
+ uint64_t message_id;
+ size_t bytes_read;
+ if (!ParseQuicDataMessageHeader(data, len, &data_channel_id, &message_id,
+ &bytes_read)) {
+ LOG(LS_ERROR) << "Could not read QUIC message header from QUIC stream "
+ << id;
+ return;
+ }
+ data += bytes_read;
+ len -= bytes_read;
+ // Retrieve the data channel which will handle the message.
+ const auto& data_channel_kv = data_channel_by_id_.find(data_channel_id);
+ if (data_channel_kv == data_channel_by_id_.end()) {
+ // TODO(mikescarlett): Implement OPEN message to create a new
+ // QuicDataChannel when messages are received for a nonexistent ID.
+ LOG(LS_ERROR) << "Data was received for QUIC data channel "
+ << data_channel_id
+ << " but it is not registered to the QuicDataTransport.";
+ return;
+ }
+ QuicDataChannel* data_channel = data_channel_kv->second;
+ QuicDataChannel::Message message;
+ message.id = message_id;
+ message.buffer = rtc::CopyOnWriteBuffer(data, len);
+ message.stream = stream;
+ data_channel->OnIncomingMessage(std::move(message));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/quicdatatransport.h b/chromium/third_party/webrtc/api/quicdatatransport.h
new file mode 100644
index 00000000000..f0c427d1b53
--- /dev/null
+++ b/chromium/third_party/webrtc/api/quicdatatransport.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_QUICDATATRANSPORT_H_
+#define WEBRTC_API_QUICDATATRANSPORT_H_
+
+#include <string>
+#include <unordered_map>
+
+#include "webrtc/api/datachannelinterface.h"
+#include "webrtc/api/quicdatachannel.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sigslot.h"
+#include "webrtc/base/thread.h"
+
+namespace cricket {
+class QuicTransportChannel;
+class ReliableQuicStream;
+} // namepsace cricket
+
+namespace webrtc {
+
+// QuicDataTransport creates QuicDataChannels for the PeerConnection. It also
+// handles QUIC stream demuxing by distributing incoming QUIC streams from the
+// QuicTransportChannel among the QuicDataChannels that it has created.
+//
+// QuicDataTransport reads the data channel ID from the incoming QUIC stream,
+// then looks it up in a map of ID => QuicDataChannel. If the data channel
+// exists, it sends the QUIC stream to the QuicDataChannel.
+class QuicDataTransport : public sigslot::has_slots<> {
+ public:
+ QuicDataTransport(rtc::Thread* signaling_thread, rtc::Thread* worker_thread);
+ ~QuicDataTransport() override;
+
+ // Sets the QUIC transport channel for the QuicDataChannels and the
+ // QuicDataTransport. Returns false if a different QUIC transport channel is
+ // already set, the QUIC transport channel cannot be set for any of the
+ // QuicDataChannels, or |channel| is NULL.
+ bool SetTransportChannel(cricket::QuicTransportChannel* channel);
+
+ // Creates a QuicDataChannel that uses this QuicDataTransport.
+ rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ const std::string& label,
+ const DataChannelInit* config);
+
+ // Removes a QuicDataChannel with the given ID from the QuicDataTransport's
+ // data channel map.
+ void DestroyDataChannel(int id);
+
+ // True if the QuicDataTransport has a data channel with the given ID.
+ bool HasDataChannel(int id) const;
+
+ // True if the QuicDataTransport has data channels.
+ bool HasDataChannels() const;
+
+ private:
+ // Called from the QuicTransportChannel when a ReliableQuicStream is created
+ // to receive incoming data.
+ void OnIncomingStream(cricket::ReliableQuicStream* stream);
+ // Called from the ReliableQuicStream when the first QUIC stream frame is
+ // received for incoming data. The QuicDataTransport reads the data channel ID
+ // and message ID from the incoming data, then dispatches the
+ // ReliableQuicStream to the QuicDataChannel with the same data channel ID.
+ void OnDataReceived(net::QuicStreamId stream_id,
+ const char* data,
+ size_t len);
+
+ // Map of data channel ID => QUIC data channel values.
+ std::unordered_map<int, rtc::scoped_refptr<QuicDataChannel>>
+ data_channel_by_id_;
+ // Map of QUIC stream ID => ReliableQuicStream* values.
+ std::unordered_map<net::QuicStreamId, cricket::ReliableQuicStream*>
+ quic_stream_by_id_;
+ // QuicTransportChannel for sending/receiving data.
+ cricket::QuicTransportChannel* quic_transport_channel_ = nullptr;
+ // Signaling and worker threads for the QUIC data channel.
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const worker_thread_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_API_QUICDATATRANSPORT_H_
diff --git a/chromium/third_party/webrtc/api/quicdatatransport_unittest.cc b/chromium/third_party/webrtc/api/quicdatatransport_unittest.cc
new file mode 100644
index 00000000000..d668c55b0bf
--- /dev/null
+++ b/chromium/third_party/webrtc/api/quicdatatransport_unittest.cc
@@ -0,0 +1,356 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/quicdatatransport.h"
+
+#include <memory>
+#include <set>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "webrtc/api/quicdatachannel.h"
+#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/p2p/base/faketransportcontroller.h"
+#include "webrtc/p2p/quic/quictransportchannel.h"
+#include "webrtc/p2p/quic/reliablequicstream.h"
+
+using webrtc::DataBuffer;
+using webrtc::DataChannelInit;
+using webrtc::DataChannelInterface;
+using webrtc::DataChannelObserver;
+using webrtc::QuicDataChannel;
+using webrtc::QuicDataTransport;
+using cricket::FakeTransportChannel;
+using cricket::QuicTransportChannel;
+using cricket::ReliableQuicStream;
+
+namespace {
+
+// Timeout for asynchronous operations.
+static const int kTimeoutMs = 1000; // milliseconds
+
+// FakeObserver receives messages from the data channel.
+class FakeObserver : public DataChannelObserver {
+ public:
+ FakeObserver() {}
+
+ void OnStateChange() override {}
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override {}
+
+ void OnMessage(const webrtc::DataBuffer& buffer) override {
+ messages_.push_back(std::string(buffer.data.data<char>(), buffer.size()));
+ }
+
+ const std::vector<std::string>& messages() const { return messages_; }
+
+ size_t messages_received() const { return messages_.size(); }
+
+ private:
+ std::vector<std::string> messages_;
+};
+
+// A peer who uses a QUIC transport channel and fake ICE transport channel to
+// send or receive data.
+class QuicDataTransportPeer {
+ public:
+ QuicDataTransportPeer()
+ : quic_data_transport_(rtc::Thread::Current(), rtc::Thread::Current()),
+ ice_transport_channel_(new FakeTransportChannel("data", 0)),
+ quic_transport_channel_(ice_transport_channel_) {
+ ice_transport_channel_->SetAsync(true);
+ }
+
+ void GenerateCertificateAndFingerprint() {
+ rtc::scoped_refptr<rtc::RTCCertificate> local_cert =
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate("cert_name", rtc::KT_DEFAULT)));
+ quic_transport_channel_.SetLocalCertificate(local_cert);
+ local_fingerprint_.reset(CreateFingerprint(local_cert.get()));
+ }
+
+ // Connects |ice_transport_channel_| to that of the other peer.
+ void Connect(QuicDataTransportPeer* other_peer) {
+ ice_transport_channel_->Connect();
+ other_peer->ice_transport_channel_->Connect();
+ ice_transport_channel_->SetDestination(other_peer->ice_transport_channel_);
+ }
+
+ std::unique_ptr<rtc::SSLFingerprint>& local_fingerprint() {
+ return local_fingerprint_;
+ }
+
+ QuicTransportChannel* quic_transport_channel() {
+ return &quic_transport_channel_;
+ }
+
+ // Write a messge directly to the ReliableQuicStream.
+ void WriteMessage(int data_channel_id,
+ uint64_t message_id,
+ const std::string& message) {
+ ReliableQuicStream* stream = quic_transport_channel_.CreateQuicStream();
+ rtc::CopyOnWriteBuffer payload;
+ webrtc::WriteQuicDataChannelMessageHeader(data_channel_id, message_id,
+ &payload);
+ stream->Write(payload.data<char>(), payload.size(), false);
+ stream->Write(message.data(), message.size(), true);
+ }
+
+ rtc::scoped_refptr<DataChannelInterface> CreateDataChannel(
+ const DataChannelInit* config) {
+ return quic_data_transport_.CreateDataChannel("testing", config);
+ }
+
+ QuicDataTransport* quic_data_transport() { return &quic_data_transport_; }
+
+ private:
+ // Creates a fingerprint from a certificate.
+ rtc::SSLFingerprint* CreateFingerprint(rtc::RTCCertificate* cert) {
+ std::string digest_algorithm;
+ cert->ssl_certificate().GetSignatureDigestAlgorithm(&digest_algorithm);
+ std::unique_ptr<rtc::SSLFingerprint> fingerprint(
+ rtc::SSLFingerprint::Create(digest_algorithm, cert->identity()));
+ return fingerprint.release();
+ }
+
+ QuicDataTransport quic_data_transport_;
+ FakeTransportChannel* ice_transport_channel_;
+ QuicTransportChannel quic_transport_channel_;
+ std::unique_ptr<rtc::SSLFingerprint> local_fingerprint_;
+};
+
+class QuicDataTransportTest : public testing::Test {
+ public:
+ QuicDataTransportTest() {}
+
+ void ConnectTransportChannels() {
+ SetCryptoParameters();
+ peer1_.Connect(&peer2_);
+ ASSERT_TRUE_WAIT(peer1_.quic_transport_channel()->writable() &&
+ peer2_.quic_transport_channel()->writable(),
+ kTimeoutMs);
+ }
+
+ void SetTransportChannels() {
+ ASSERT_TRUE(peer1_.quic_data_transport()->SetTransportChannel(
+ peer1_.quic_transport_channel()));
+ ASSERT_TRUE(peer2_.quic_data_transport()->SetTransportChannel(
+ peer2_.quic_transport_channel()));
+ }
+
+ // Sets crypto parameters required for the QUIC handshake.
+ void SetCryptoParameters() {
+ peer1_.GenerateCertificateAndFingerprint();
+ peer2_.GenerateCertificateAndFingerprint();
+
+ peer1_.quic_transport_channel()->SetSslRole(rtc::SSL_CLIENT);
+ peer2_.quic_transport_channel()->SetSslRole(rtc::SSL_SERVER);
+
+ std::unique_ptr<rtc::SSLFingerprint>& peer1_fingerprint =
+ peer1_.local_fingerprint();
+ std::unique_ptr<rtc::SSLFingerprint>& peer2_fingerprint =
+ peer2_.local_fingerprint();
+
+ peer1_.quic_transport_channel()->SetRemoteFingerprint(
+ peer2_fingerprint->algorithm,
+ reinterpret_cast<const uint8_t*>(peer2_fingerprint->digest.data()),
+ peer2_fingerprint->digest.size());
+ peer2_.quic_transport_channel()->SetRemoteFingerprint(
+ peer1_fingerprint->algorithm,
+ reinterpret_cast<const uint8_t*>(peer1_fingerprint->digest.data()),
+ peer1_fingerprint->digest.size());
+ }
+
+ protected:
+ QuicDataTransportPeer peer1_;
+ QuicDataTransportPeer peer2_;
+};
+
+// Tests creation and destruction of data channels.
+TEST_F(QuicDataTransportTest, CreateAndDestroyDataChannels) {
+ QuicDataTransport* quic_data_transport = peer2_.quic_data_transport();
+ EXPECT_FALSE(quic_data_transport->HasDataChannels());
+ for (int data_channel_id = 0; data_channel_id < 5; ++data_channel_id) {
+ EXPECT_FALSE(quic_data_transport->HasDataChannel(data_channel_id));
+ webrtc::DataChannelInit config;
+ config.id = data_channel_id;
+ rtc::scoped_refptr<DataChannelInterface> data_channel =
+ peer2_.CreateDataChannel(&config);
+ EXPECT_NE(nullptr, data_channel);
+ EXPECT_EQ(data_channel_id, data_channel->id());
+ EXPECT_TRUE(quic_data_transport->HasDataChannel(data_channel_id));
+ }
+ EXPECT_TRUE(quic_data_transport->HasDataChannels());
+ for (int data_channel_id = 0; data_channel_id < 5; ++data_channel_id) {
+ quic_data_transport->DestroyDataChannel(data_channel_id);
+ EXPECT_FALSE(quic_data_transport->HasDataChannel(data_channel_id));
+ }
+ EXPECT_FALSE(quic_data_transport->HasDataChannels());
+}
+
+// Tests that the QuicDataTransport does not allow creating multiple
+// QuicDataChannels with the same id.
+TEST_F(QuicDataTransportTest, CannotCreateDataChannelsWithSameId) {
+ webrtc::DataChannelInit config;
+ config.id = 2;
+ EXPECT_NE(nullptr, peer2_.CreateDataChannel(&config));
+ EXPECT_EQ(nullptr, peer2_.CreateDataChannel(&config));
+}
+
+// Tests that any data channels created by the QuicDataTransport are in state
+// kConnecting before the QuicTransportChannel is set, then transiton to state
+// kOpen when the transport channel becomes writable.
+TEST_F(QuicDataTransportTest, DataChannelsOpenWhenTransportChannelWritable) {
+ webrtc::DataChannelInit config1;
+ config1.id = 7;
+ rtc::scoped_refptr<DataChannelInterface> data_channel1 =
+ peer2_.CreateDataChannel(&config1);
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, data_channel1->state());
+ SetTransportChannels();
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, data_channel1->state());
+ webrtc::DataChannelInit config2;
+ config2.id = 14;
+ rtc::scoped_refptr<DataChannelInterface> data_channel2 =
+ peer2_.CreateDataChannel(&config2);
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, data_channel2->state());
+ // Existing data channels should open once the transport channel is writable.
+ ConnectTransportChannels();
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, data_channel1->state(),
+ kTimeoutMs);
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen, data_channel2->state(),
+ kTimeoutMs);
+ // Any data channels created afterwards should start in state kOpen.
+ webrtc::DataChannelInit config3;
+ config3.id = 21;
+ rtc::scoped_refptr<DataChannelInterface> data_channel3 =
+ peer2_.CreateDataChannel(&config3);
+ EXPECT_EQ(webrtc::DataChannelInterface::kOpen, data_channel3->state());
+}
+
+// Tests that the QuicTransport dispatches messages for one QuicDataChannel.
+TEST_F(QuicDataTransportTest, ReceiveMessagesForSingleDataChannel) {
+ ConnectTransportChannels();
+ SetTransportChannels();
+
+ int data_channel_id = 1337;
+ webrtc::DataChannelInit config;
+ config.id = data_channel_id;
+ rtc::scoped_refptr<DataChannelInterface> peer2_data_channel =
+ peer2_.CreateDataChannel(&config);
+ FakeObserver observer;
+ peer2_data_channel->RegisterObserver(&observer);
+
+ uint64_t message1_id = 26u;
+ peer1_.WriteMessage(data_channel_id, message1_id, "Testing");
+ ASSERT_EQ_WAIT(1, observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Testing", observer.messages()[0]);
+
+ uint64_t message2_id = 402u;
+ peer1_.WriteMessage(data_channel_id, message2_id, "Hello, World!");
+ ASSERT_EQ_WAIT(2, observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Hello, World!", observer.messages()[1]);
+
+ uint64_t message3_id = 100260415u;
+ peer1_.WriteMessage(data_channel_id, message3_id, "Third message");
+ ASSERT_EQ_WAIT(3, observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Third message", observer.messages()[2]);
+}
+
+// Tests that the QuicTransport dispatches messages to the correct data channel
+// when multiple are in use.
+TEST_F(QuicDataTransportTest, ReceiveMessagesForMultipleDataChannels) {
+ ConnectTransportChannels();
+ SetTransportChannels();
+
+ std::vector<rtc::scoped_refptr<DataChannelInterface>> data_channels;
+ for (int data_channel_id = 0; data_channel_id < 5; ++data_channel_id) {
+ webrtc::DataChannelInit config;
+ config.id = data_channel_id;
+ data_channels.push_back(peer2_.CreateDataChannel(&config));
+ }
+
+ for (int data_channel_id = 0; data_channel_id < 5; ++data_channel_id) {
+ uint64_t message1_id = 48023u;
+ FakeObserver observer;
+ DataChannelInterface* peer2_data_channel =
+ data_channels[data_channel_id].get();
+ peer2_data_channel->RegisterObserver(&observer);
+ peer1_.WriteMessage(data_channel_id, message1_id, "Testing");
+ ASSERT_EQ_WAIT(1, observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Testing", observer.messages()[0]);
+
+ uint64_t message2_id = 1372643095u;
+ peer1_.WriteMessage(data_channel_id, message2_id, "Hello, World!");
+ ASSERT_EQ_WAIT(2, observer.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Hello, World!", observer.messages()[1]);
+ }
+}
+
+// Tests end-to-end that both peers can use multiple QuicDataChannels to
+// send/receive messages using a QuicDataTransport.
+TEST_F(QuicDataTransportTest, EndToEndSendReceiveMessages) {
+ ConnectTransportChannels();
+ SetTransportChannels();
+
+ std::vector<rtc::scoped_refptr<DataChannelInterface>> peer1_data_channels;
+ std::vector<rtc::scoped_refptr<DataChannelInterface>> peer2_data_channels;
+
+ for (int data_channel_id = 0; data_channel_id < 5; ++data_channel_id) {
+ webrtc::DataChannelInit config;
+ config.id = data_channel_id;
+ peer1_data_channels.push_back(peer1_.CreateDataChannel(&config));
+ peer2_data_channels.push_back(peer2_.CreateDataChannel(&config));
+ }
+
+ for (int data_channel_id = 0; data_channel_id < 5; ++data_channel_id) {
+ DataChannelInterface* peer1_data_channel =
+ peer1_data_channels[data_channel_id].get();
+ FakeObserver observer1;
+ peer1_data_channel->RegisterObserver(&observer1);
+ DataChannelInterface* peer2_data_channel =
+ peer2_data_channels[data_channel_id].get();
+ FakeObserver observer2;
+ peer2_data_channel->RegisterObserver(&observer2);
+
+ peer1_data_channel->Send(webrtc::DataBuffer("Peer 1 message 1"));
+ ASSERT_EQ_WAIT(1, observer2.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Peer 1 message 1", observer2.messages()[0]);
+
+ peer1_data_channel->Send(webrtc::DataBuffer("Peer 1 message 2"));
+ ASSERT_EQ_WAIT(2, observer2.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Peer 1 message 2", observer2.messages()[1]);
+
+ peer2_data_channel->Send(webrtc::DataBuffer("Peer 2 message 1"));
+ ASSERT_EQ_WAIT(1, observer1.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Peer 2 message 1", observer1.messages()[0]);
+
+ peer2_data_channel->Send(webrtc::DataBuffer("Peer 2 message 2"));
+ ASSERT_EQ_WAIT(2, observer1.messages_received(), kTimeoutMs);
+ EXPECT_EQ("Peer 2 message 2", observer1.messages()[1]);
+ }
+}
+
+// Tests that SetTransportChannel returns false when setting a NULL transport
+// channel or a transport channel that is not equivalent to the one already set.
+TEST_F(QuicDataTransportTest, SetTransportChannelReturnValue) {
+ QuicDataTransport* quic_data_transport = peer1_.quic_data_transport();
+ EXPECT_FALSE(quic_data_transport->SetTransportChannel(nullptr));
+ QuicTransportChannel* transport_channel = peer1_.quic_transport_channel();
+ EXPECT_TRUE(quic_data_transport->SetTransportChannel(transport_channel));
+ EXPECT_TRUE(quic_data_transport->SetTransportChannel(transport_channel));
+ QuicTransportChannel* other_transport_channel =
+ peer2_.quic_transport_channel();
+ EXPECT_FALSE(
+ quic_data_transport->SetTransportChannel(other_transport_channel));
+}
+
+} // namespace
diff --git a/chromium/third_party/webrtc/api/remoteaudiosource.cc b/chromium/third_party/webrtc/api/remoteaudiosource.cc
index 2fedc9d0f94..2d0785a08f1 100644
--- a/chromium/third_party/webrtc/api/remoteaudiosource.cc
+++ b/chromium/third_party/webrtc/api/remoteaudiosource.cc
@@ -12,10 +12,12 @@
#include <algorithm>
#include <functional>
+#include <memory>
#include <utility>
#include "webrtc/api/mediastreamprovider.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread.h"
@@ -80,7 +82,7 @@ void RemoteAudioSource::Initialize(uint32_t ssrc,
// we register for callbacks here and not on demand in AddSink.
if (provider) { // May be null in tests.
provider->SetRawAudioSink(
- ssrc, rtc::scoped_ptr<AudioSinkInterface>(new Sink(this)));
+ ssrc, std::unique_ptr<AudioSinkInterface>(new Sink(this)));
}
}
diff --git a/chromium/third_party/webrtc/api/rtpparameters.h b/chromium/third_party/webrtc/api/rtpparameters.h
index 2c29d9843d9..5c79ab4e9df 100644
--- a/chromium/third_party/webrtc/api/rtpparameters.h
+++ b/chromium/third_party/webrtc/api/rtpparameters.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_API_RTPPARAMETERS_H_
#define WEBRTC_API_RTPPARAMETERS_H_
+#include <string>
#include <vector>
namespace webrtc {
@@ -20,10 +21,37 @@ namespace webrtc {
struct RtpEncodingParameters {
bool active = true;
int max_bitrate_bps = -1;
+
+ bool operator==(const RtpEncodingParameters& o) const {
+ return active == o.active && max_bitrate_bps == o.max_bitrate_bps;
+ }
+ bool operator!=(const RtpEncodingParameters& o) const {
+ return !(*this == o);
+ }
+};
+
+struct RtpCodecParameters {
+ int payload_type;
+ std::string mime_type;
+ int clock_rate;
+ int channels = 1;
+ // TODO(deadbeef): Add sdpFmtpLine field.
+
+ bool operator==(const RtpCodecParameters& o) const {
+ return payload_type == o.payload_type && mime_type == o.mime_type &&
+ clock_rate == o.clock_rate && channels == o.channels;
+ }
+ bool operator!=(const RtpCodecParameters& o) const { return !(*this == o); }
};
struct RtpParameters {
std::vector<RtpEncodingParameters> encodings;
+ std::vector<RtpCodecParameters> codecs;
+
+ bool operator==(const RtpParameters& o) const {
+ return encodings == o.encodings && codecs == o.codecs;
+ }
+ bool operator!=(const RtpParameters& o) const { return !(*this == o); }
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/rtpreceiver.cc b/chromium/third_party/webrtc/api/rtpreceiver.cc
index 0150dfd546c..1b52ce27def 100644
--- a/chromium/third_party/webrtc/api/rtpreceiver.cc
+++ b/chromium/third_party/webrtc/api/rtpreceiver.cc
@@ -12,7 +12,9 @@
#include "webrtc/api/mediastreamtrackproxy.h"
#include "webrtc/api/audiotrack.h"
+#include "webrtc/api/videosourceproxy.h"
#include "webrtc/api/videotrack.h"
+#include "webrtc/base/trace_event.h"
namespace webrtc {
@@ -65,6 +67,15 @@ void AudioRtpReceiver::Stop() {
provider_ = nullptr;
}
+RtpParameters AudioRtpReceiver::GetParameters() const {
+ return provider_->GetAudioRtpReceiveParameters(ssrc_);
+}
+
+bool AudioRtpReceiver::SetParameters(const RtpParameters& parameters) {
+ TRACE_EVENT0("webrtc", "AudioRtpReceiver::SetParameters");
+ return provider_->SetAudioRtpReceiveParameters(ssrc_, parameters);
+}
+
void AudioRtpReceiver::Reconfigure() {
if (!provider_) {
return;
@@ -81,11 +92,15 @@ VideoRtpReceiver::VideoRtpReceiver(MediaStreamInterface* stream,
ssrc_(ssrc),
provider_(provider),
source_(new RefCountedObject<VideoTrackSource>(&broadcaster_,
- worker_thread,
true /* remote */)),
track_(VideoTrackProxy::Create(
rtc::Thread::Current(),
- VideoTrack::Create(track_id, source_.get()))) {
+ worker_thread,
+ VideoTrack::Create(
+ track_id,
+ VideoTrackSourceProxy::Create(rtc::Thread::Current(),
+ worker_thread,
+ source_)))) {
source_->SetState(MediaSourceInterface::kLive);
provider_->SetVideoPlayout(ssrc_, true, &broadcaster_);
stream->AddTrack(track_);
@@ -108,4 +123,13 @@ void VideoRtpReceiver::Stop() {
provider_ = nullptr;
}
+RtpParameters VideoRtpReceiver::GetParameters() const {
+ return provider_->GetVideoRtpReceiveParameters(ssrc_);
+}
+
+bool VideoRtpReceiver::SetParameters(const RtpParameters& parameters) {
+ TRACE_EVENT0("webrtc", "VideoRtpReceiver::SetParameters");
+ return provider_->SetVideoRtpReceiveParameters(ssrc_, parameters);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/rtpreceiver.h b/chromium/third_party/webrtc/api/rtpreceiver.h
index b5818573da1..2e7339d4da4 100644
--- a/chromium/third_party/webrtc/api/rtpreceiver.h
+++ b/chromium/third_party/webrtc/api/rtpreceiver.h
@@ -56,6 +56,9 @@ class AudioRtpReceiver : public ObserverInterface,
void Stop() override;
+ RtpParameters GetParameters() const override;
+ bool SetParameters(const RtpParameters& parameters) override;
+
private:
void Reconfigure();
@@ -89,6 +92,9 @@ class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInterface> {
void Stop() override;
+ RtpParameters GetParameters() const override;
+ bool SetParameters(const RtpParameters& parameters) override;
+
private:
std::string id_;
uint32_t ssrc_;
diff --git a/chromium/third_party/webrtc/api/rtpreceiverinterface.h b/chromium/third_party/webrtc/api/rtpreceiverinterface.h
index 5c7790db607..ef4f0e113f3 100644
--- a/chromium/third_party/webrtc/api/rtpreceiverinterface.h
+++ b/chromium/third_party/webrtc/api/rtpreceiverinterface.h
@@ -33,16 +33,24 @@ class RtpReceiverInterface : public rtc::RefCountInterface {
virtual void Stop() = 0;
+ // The WebRTC specification only defines RTCRtpParameters in terms of senders,
+ // but this API also applies them to receivers, similar to ORTC:
+ // http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*.
+ virtual RtpParameters GetParameters() const = 0;
+ virtual bool SetParameters(const RtpParameters& parameters) = 0;
+
protected:
virtual ~RtpReceiverInterface() {}
};
// Define proxy for RtpReceiverInterface.
-BEGIN_PROXY_MAP(RtpReceiver)
+BEGIN_SIGNALING_PROXY_MAP(RtpReceiver)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
PROXY_CONSTMETHOD0(std::string, id)
PROXY_METHOD0(void, Stop)
-END_PROXY()
+PROXY_CONSTMETHOD0(RtpParameters, GetParameters);
+PROXY_METHOD1(bool, SetParameters, const RtpParameters&)
+END_SIGNALING_PROXY()
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/rtpsender.cc b/chromium/third_party/webrtc/api/rtpsender.cc
index 94cea6c2c99..5577b9c7500 100644
--- a/chromium/third_party/webrtc/api/rtpsender.cc
+++ b/chromium/third_party/webrtc/api/rtpsender.cc
@@ -13,6 +13,7 @@
#include "webrtc/api/localaudiosource.h"
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/base/helpers.h"
+#include "webrtc/base/trace_event.h"
namespace webrtc {
@@ -86,6 +87,7 @@ AudioRtpSender::~AudioRtpSender() {
}
void AudioRtpSender::OnChanged() {
+ TRACE_EVENT0("webrtc", "AudioRtpSender::OnChanged");
RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
@@ -96,6 +98,7 @@ void AudioRtpSender::OnChanged() {
}
bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
+ TRACE_EVENT0("webrtc", "AudioRtpSender::SetTrack");
if (stopped_) {
LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
return false;
@@ -119,6 +122,9 @@ bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
// Attach to new track.
bool prev_can_send_track = can_send_track();
+ // Keep a reference to the old track to keep it alive until we call
+ // SetAudioSend.
+ rtc::scoped_refptr<AudioTrackInterface> old_track = track_;
track_ = audio_track;
if (track_) {
cached_track_enabled_ = track_->enabled();
@@ -140,6 +146,7 @@ bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
}
void AudioRtpSender::SetSsrc(uint32_t ssrc) {
+ TRACE_EVENT0("webrtc", "AudioRtpSender::SetSsrc");
if (stopped_ || ssrc == ssrc_) {
return;
}
@@ -161,6 +168,7 @@ void AudioRtpSender::SetSsrc(uint32_t ssrc) {
}
void AudioRtpSender::Stop() {
+ TRACE_EVENT0("webrtc", "AudioRtpSender::Stop");
// TODO(deadbeef): Need to do more here to fully stop sending packets.
if (stopped_) {
return;
@@ -200,11 +208,12 @@ void AudioRtpSender::SetAudioSend() {
}
RtpParameters AudioRtpSender::GetParameters() const {
- return provider_->GetAudioRtpParameters(ssrc_);
+ return provider_->GetAudioRtpSendParameters(ssrc_);
}
bool AudioRtpSender::SetParameters(const RtpParameters& parameters) {
- return provider_->SetAudioRtpParameters(ssrc_, parameters);
+ TRACE_EVENT0("webrtc", "AudioRtpSender::SetParameters");
+ return provider_->SetAudioRtpSendParameters(ssrc_, parameters);
}
VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
@@ -240,6 +249,7 @@ VideoRtpSender::~VideoRtpSender() {
}
void VideoRtpSender::OnChanged() {
+ TRACE_EVENT0("webrtc", "VideoRtpSender::OnChanged");
RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
@@ -250,6 +260,7 @@ void VideoRtpSender::OnChanged() {
}
bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
+ TRACE_EVENT0("webrtc", "VideoRtpSender::SetTrack");
if (stopped_) {
LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
return false;
@@ -268,6 +279,9 @@ bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
// Attach to new track.
bool prev_can_send_track = can_send_track();
+ // Keep a reference to the old track to keep it alive until we call
+ // SetSource.
+ rtc::scoped_refptr<VideoTrackInterface> old_track = track_;
track_ = video_track;
if (track_) {
cached_track_enabled_ = track_->enabled();
@@ -276,42 +290,40 @@ bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
// Update video provider.
if (can_send_track()) {
- VideoTrackSourceInterface* source = track_->GetSource();
// TODO(deadbeef): If SetTrack is called with a disabled track, and the
// previous track was enabled, this could cause a frame from the new track
- // to slip out. Really, what we need is for SetCaptureDevice and
- // SetVideoSend
+ // to slip out. Really, what we need is for SetSource and SetVideoSend
// to be combined into one atomic operation, all the way down to
// WebRtcVideoSendStream.
- provider_->SetCaptureDevice(ssrc_,
- source ? source->GetVideoCapturer() : nullptr);
+
+ provider_->SetSource(ssrc_, track_);
SetVideoSend();
} else if (prev_can_send_track) {
- provider_->SetCaptureDevice(ssrc_, nullptr);
+ provider_->SetSource(ssrc_, nullptr);
provider_->SetVideoSend(ssrc_, false, nullptr);
}
return true;
}
void VideoRtpSender::SetSsrc(uint32_t ssrc) {
+ TRACE_EVENT0("webrtc", "VideoRtpSender::SetSsrc");
if (stopped_ || ssrc == ssrc_) {
return;
}
// If we are already sending with a particular SSRC, stop sending.
if (can_send_track()) {
- provider_->SetCaptureDevice(ssrc_, nullptr);
+ provider_->SetSource(ssrc_, nullptr);
provider_->SetVideoSend(ssrc_, false, nullptr);
}
ssrc_ = ssrc;
if (can_send_track()) {
- VideoTrackSourceInterface* source = track_->GetSource();
- provider_->SetCaptureDevice(ssrc_,
- source ? source->GetVideoCapturer() : nullptr);
+ provider_->SetSource(ssrc_, track_);
SetVideoSend();
}
}
void VideoRtpSender::Stop() {
+ TRACE_EVENT0("webrtc", "VideoRtpSender::Stop");
// TODO(deadbeef): Need to do more here to fully stop sending packets.
if (stopped_) {
return;
@@ -320,7 +332,7 @@ void VideoRtpSender::Stop() {
track_->UnregisterObserver(this);
}
if (can_send_track()) {
- provider_->SetCaptureDevice(ssrc_, nullptr);
+ provider_->SetSource(ssrc_, nullptr);
provider_->SetVideoSend(ssrc_, false, nullptr);
}
stopped_ = true;
@@ -338,11 +350,12 @@ void VideoRtpSender::SetVideoSend() {
}
RtpParameters VideoRtpSender::GetParameters() const {
- return provider_->GetVideoRtpParameters(ssrc_);
+ return provider_->GetVideoRtpSendParameters(ssrc_);
}
bool VideoRtpSender::SetParameters(const RtpParameters& parameters) {
- return provider_->SetVideoRtpParameters(ssrc_, parameters);
+ TRACE_EVENT0("webrtc", "VideoRtpSender::SetParameters");
+ return provider_->SetVideoRtpSendParameters(ssrc_, parameters);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/rtpsender.h b/chromium/third_party/webrtc/api/rtpsender.h
index 3919e070dc5..ffe5daeb010 100644
--- a/chromium/third_party/webrtc/api/rtpsender.h
+++ b/chromium/third_party/webrtc/api/rtpsender.h
@@ -15,6 +15,7 @@
#ifndef WEBRTC_API_RTPSENDER_H_
#define WEBRTC_API_RTPSENDER_H_
+#include <memory>
#include <string>
#include "webrtc/api/mediastreamprovider.h"
@@ -22,7 +23,6 @@
#include "webrtc/api/statscollector.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/media/base/audiosource.h"
namespace webrtc {
@@ -97,8 +97,8 @@ class AudioRtpSender : public ObserverInterface,
void Stop() override;
- RtpParameters GetParameters() const;
- bool SetParameters(const RtpParameters& parameters);
+ RtpParameters GetParameters() const override;
+ bool SetParameters(const RtpParameters& parameters) override;
private:
// TODO(nisse): Since SSRC == 0 is technically valid, figure out
@@ -119,7 +119,7 @@ class AudioRtpSender : public ObserverInterface,
// Used to pass the data callback from the |track_| to the other end of
// cricket::AudioSource.
- rtc::scoped_ptr<LocalAudioSinkAdapter> sink_adapter_;
+ std::unique_ptr<LocalAudioSinkAdapter> sink_adapter_;
};
class VideoRtpSender : public ObserverInterface,
@@ -163,8 +163,8 @@ class VideoRtpSender : public ObserverInterface,
void Stop() override;
- RtpParameters GetParameters() const;
- bool SetParameters(const RtpParameters& parameters);
+ RtpParameters GetParameters() const override;
+ bool SetParameters(const RtpParameters& parameters) override;
private:
bool can_send_track() const { return track_ && ssrc_; }
diff --git a/chromium/third_party/webrtc/api/rtpsenderinterface.h b/chromium/third_party/webrtc/api/rtpsenderinterface.h
index 776d01ace6b..2291bb4e214 100644
--- a/chromium/third_party/webrtc/api/rtpsenderinterface.h
+++ b/chromium/third_party/webrtc/api/rtpsenderinterface.h
@@ -60,7 +60,7 @@ class RtpSenderInterface : public rtc::RefCountInterface {
};
// Define proxy for RtpSenderInterface.
-BEGIN_PROXY_MAP(RtpSender)
+BEGIN_SIGNALING_PROXY_MAP(RtpSender)
PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
PROXY_METHOD1(void, SetSsrc, uint32_t)
@@ -72,7 +72,7 @@ PROXY_CONSTMETHOD0(std::string, stream_id)
PROXY_METHOD0(void, Stop)
PROXY_CONSTMETHOD0(RtpParameters, GetParameters);
PROXY_METHOD1(bool, SetParameters, const RtpParameters&)
-END_PROXY()
+END_SIGNALING_PROXY()
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/rtpsenderreceiver_unittest.cc b/chromium/third_party/webrtc/api/rtpsenderreceiver_unittest.cc
index 7f32f29bc4b..3db9d5e00ee 100644
--- a/chromium/third_party/webrtc/api/rtpsenderreceiver_unittest.cc
+++ b/chromium/third_party/webrtc/api/rtpsenderreceiver_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include <utility>
@@ -27,6 +28,7 @@
using ::testing::_;
using ::testing::Exactly;
+using ::testing::InvokeWithoutArgs;
using ::testing::Return;
static const char kStreamLabel1[] = "local_stream_1";
@@ -42,7 +44,11 @@ namespace webrtc {
// Helper class to test RtpSender/RtpReceiver.
class MockAudioProvider : public AudioProviderInterface {
public:
- ~MockAudioProvider() override {}
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
+ ~MockAudioProvider() /* override */ {}
MOCK_METHOD2(SetAudioPlayout,
void(uint32_t ssrc,
@@ -53,25 +59,30 @@ class MockAudioProvider : public AudioProviderInterface {
const cricket::AudioOptions& options,
cricket::AudioSource* source));
MOCK_METHOD2(SetAudioPlayoutVolume, void(uint32_t ssrc, double volume));
- MOCK_CONST_METHOD1(GetAudioRtpParameters, RtpParameters(uint32_t ssrc));
- MOCK_METHOD2(SetAudioRtpParameters,
+ MOCK_CONST_METHOD1(GetAudioRtpSendParameters, RtpParameters(uint32_t ssrc));
+ MOCK_METHOD2(SetAudioRtpSendParameters,
+ bool(uint32_t ssrc, const RtpParameters&));
+ MOCK_CONST_METHOD1(GetAudioRtpReceiveParameters,
+ RtpParameters(uint32_t ssrc));
+ MOCK_METHOD2(SetAudioRtpReceiveParameters,
bool(uint32_t ssrc, const RtpParameters&));
- void SetRawAudioSink(uint32_t,
- rtc::scoped_ptr<AudioSinkInterface> sink) override {
+ void SetRawAudioSink(
+ uint32_t, std::unique_ptr<AudioSinkInterface> sink) /* override */ {
sink_ = std::move(sink);
}
private:
- rtc::scoped_ptr<AudioSinkInterface> sink_;
+ std::unique_ptr<AudioSinkInterface> sink_;
};
// Helper class to test RtpSender/RtpReceiver.
class MockVideoProvider : public VideoProviderInterface {
public:
virtual ~MockVideoProvider() {}
- MOCK_METHOD2(SetCaptureDevice,
- bool(uint32_t ssrc, cricket::VideoCapturer* camera));
+ MOCK_METHOD2(SetSource,
+ bool(uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source));
MOCK_METHOD3(SetVideoPlayout,
void(uint32_t ssrc,
bool enable,
@@ -81,8 +92,12 @@ class MockVideoProvider : public VideoProviderInterface {
bool enable,
const cricket::VideoOptions* options));
- MOCK_CONST_METHOD1(GetVideoRtpParameters, RtpParameters(uint32_t ssrc));
- MOCK_METHOD2(SetVideoRtpParameters,
+ MOCK_CONST_METHOD1(GetVideoRtpSendParameters, RtpParameters(uint32_t ssrc));
+ MOCK_METHOD2(SetVideoRtpSendParameters,
+ bool(uint32_t ssrc, const RtpParameters&));
+ MOCK_CONST_METHOD1(GetVideoRtpReceiveParameters,
+ RtpParameters(uint32_t ssrc));
+ MOCK_METHOD2(SetVideoRtpReceiveParameters,
bool(uint32_t ssrc, const RtpParameters&));
};
@@ -111,9 +126,7 @@ class RtpSenderReceiverTest : public testing::Test {
void CreateVideoRtpSender() {
AddVideoTrack();
- EXPECT_CALL(video_provider_,
- SetCaptureDevice(
- kVideoSsrc, video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, video_track_.get()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
video_rtp_sender_ = new VideoRtpSender(stream_->GetVideoTracks()[0],
stream_->label(), &video_provider_);
@@ -127,7 +140,7 @@ class RtpSenderReceiverTest : public testing::Test {
}
void DestroyVideoRtpSender() {
- EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, NULL)).Times(1);
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, NULL)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
video_rtp_sender_ = nullptr;
}
@@ -345,14 +358,12 @@ TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupSsrcThenTrack) {
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(&video_provider_);
sender->SetSsrc(kVideoSsrc);
- EXPECT_CALL(video_provider_,
- SetCaptureDevice(kVideoSsrc,
- video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, video_track_.get()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
sender->SetTrack(video_track_);
// Calls expected from destructor.
- EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
}
@@ -363,14 +374,12 @@ TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupTrackThenSsrc) {
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(&video_provider_);
sender->SetTrack(video_track_);
- EXPECT_CALL(video_provider_,
- SetCaptureDevice(kVideoSsrc,
- video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, video_track_.get()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
sender->SetSsrc(kVideoSsrc);
// Calls expected from destructor.
- EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
}
@@ -396,21 +405,19 @@ TEST_F(RtpSenderReceiverTest, AudioSenderSsrcSetToZero) {
// set to 0.
TEST_F(RtpSenderReceiverTest, VideoSenderSsrcSetToZero) {
AddVideoTrack();
- EXPECT_CALL(video_provider_,
- SetCaptureDevice(kVideoSsrc,
- video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, video_track_.get()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
- EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
sender->SetSsrc(0);
// Make sure it's SetSsrc that called methods on the provider, and not the
// destructor.
- EXPECT_CALL(video_provider_, SetCaptureDevice(_, _)).Times(0);
+ EXPECT_CALL(video_provider_, SetSource(_, _)).Times(0);
EXPECT_CALL(video_provider_, SetVideoSend(_, _, _)).Times(0);
}
@@ -422,7 +429,14 @@ TEST_F(RtpSenderReceiverTest, AudioSenderTrackSetToNull) {
new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
sender->SetSsrc(kAudioSsrc);
- EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
+ // Expect that SetAudioSend will be called before the reference to the track
+ // is released.
+ EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, nullptr))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs([&track] {
+ EXPECT_LT(2, track->AddRef());
+ track->Release();
+ }));
EXPECT_TRUE(sender->SetTrack(nullptr));
// Make sure it's SetTrack that called methods on the provider, and not the
@@ -431,22 +445,31 @@ TEST_F(RtpSenderReceiverTest, AudioSenderTrackSetToNull) {
}
TEST_F(RtpSenderReceiverTest, VideoSenderTrackSetToNull) {
- AddVideoTrack();
- EXPECT_CALL(video_provider_,
- SetCaptureDevice(kVideoSsrc,
- video_track_->GetSource()->GetVideoCapturer()));
+ rtc::scoped_refptr<VideoTrackSourceInterface> source(
+ FakeVideoTrackSource::Create());
+ rtc::scoped_refptr<VideoTrackInterface> track =
+ VideoTrack::Create(kVideoTrackId, source);
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, track.get()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
rtc::scoped_refptr<VideoRtpSender> sender =
- new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
+ new VideoRtpSender(track, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
- EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ // Expect that SetSource will be called before the reference to the track
+ // is released.
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, nullptr))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs([&track] {
+ EXPECT_LT(2, track->AddRef());
+ track->Release();
+ return true;
+ }));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
EXPECT_TRUE(sender->SetTrack(nullptr));
// Make sure it's SetTrack that called methods on the provider, and not the
// destructor.
- EXPECT_CALL(video_provider_, SetCaptureDevice(_, _)).Times(0);
+ EXPECT_CALL(video_provider_, SetSource(_, _)).Times(0);
EXPECT_CALL(video_provider_, SetVideoSend(_, _, _)).Times(0);
}
@@ -469,33 +492,29 @@ TEST_F(RtpSenderReceiverTest, AudioSenderSsrcChanged) {
TEST_F(RtpSenderReceiverTest, VideoSenderSsrcChanged) {
AddVideoTrack();
- EXPECT_CALL(video_provider_,
- SetCaptureDevice(kVideoSsrc,
- video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, video_track_.get()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
- EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
- EXPECT_CALL(video_provider_,
- SetCaptureDevice(kVideoSsrc2,
- video_track_->GetSource()->GetVideoCapturer()));
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc2, video_track_.get()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, true, _));
sender->SetSsrc(kVideoSsrc2);
// Calls expected from destructor.
- EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc2, nullptr)).Times(1);
+ EXPECT_CALL(video_provider_, SetSource(kVideoSsrc2, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, false, _)).Times(1);
}
TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParameters) {
CreateAudioRtpSender();
- EXPECT_CALL(audio_provider_, GetAudioRtpParameters(kAudioSsrc))
+ EXPECT_CALL(audio_provider_, GetAudioRtpSendParameters(kAudioSsrc))
.WillOnce(Return(RtpParameters()));
- EXPECT_CALL(audio_provider_, SetAudioRtpParameters(kAudioSsrc, _))
+ EXPECT_CALL(audio_provider_, SetAudioRtpSendParameters(kAudioSsrc, _))
.WillOnce(Return(true));
RtpParameters params = audio_rtp_sender_->GetParameters();
EXPECT_TRUE(audio_rtp_sender_->SetParameters(params));
@@ -506,9 +525,9 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParameters) {
TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParameters) {
CreateVideoRtpSender();
- EXPECT_CALL(video_provider_, GetVideoRtpParameters(kVideoSsrc))
+ EXPECT_CALL(video_provider_, GetVideoRtpSendParameters(kVideoSsrc))
.WillOnce(Return(RtpParameters()));
- EXPECT_CALL(video_provider_, SetVideoRtpParameters(kVideoSsrc, _))
+ EXPECT_CALL(video_provider_, SetVideoRtpSendParameters(kVideoSsrc, _))
.WillOnce(Return(true));
RtpParameters params = video_rtp_sender_->GetParameters();
EXPECT_TRUE(video_rtp_sender_->SetParameters(params));
@@ -516,4 +535,30 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParameters) {
DestroyVideoRtpSender();
}
+TEST_F(RtpSenderReceiverTest, AudioReceiverCanSetParameters) {
+ CreateAudioRtpReceiver();
+
+ EXPECT_CALL(audio_provider_, GetAudioRtpReceiveParameters(kAudioSsrc))
+ .WillOnce(Return(RtpParameters()));
+ EXPECT_CALL(audio_provider_, SetAudioRtpReceiveParameters(kAudioSsrc, _))
+ .WillOnce(Return(true));
+ RtpParameters params = audio_rtp_receiver_->GetParameters();
+ EXPECT_TRUE(audio_rtp_receiver_->SetParameters(params));
+
+ DestroyAudioRtpReceiver();
+}
+
+TEST_F(RtpSenderReceiverTest, VideoReceiverCanSetParameters) {
+ CreateVideoRtpReceiver();
+
+ EXPECT_CALL(video_provider_, GetVideoRtpReceiveParameters(kVideoSsrc))
+ .WillOnce(Return(RtpParameters()));
+ EXPECT_CALL(video_provider_, SetVideoRtpReceiveParameters(kVideoSsrc, _))
+ .WillOnce(Return(true));
+ RtpParameters params = video_rtp_receiver_->GetParameters();
+ EXPECT_TRUE(video_rtp_receiver_->SetParameters(params));
+
+ DestroyVideoRtpReceiver();
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/statscollector.cc b/chromium/third_party/webrtc/api/statscollector.cc
index 0901fc61b1e..df1d0aa0d32 100644
--- a/chromium/third_party/webrtc/api/statscollector.cc
+++ b/chromium/third_party/webrtc/api/statscollector.cc
@@ -10,18 +10,16 @@
#include "webrtc/api/statscollector.h"
+#include <memory>
#include <utility>
#include <vector>
#include "webrtc/api/peerconnection.h"
#include "webrtc/base/base64.h"
#include "webrtc/base/checks.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/timing.h"
#include "webrtc/pc/channel.h"
-using rtc::scoped_ptr;
-
namespace webrtc {
namespace {
@@ -90,7 +88,9 @@ void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info,
StatsReport* report) {
report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name);
report->AddInt64(StatsReport::kStatsValueNameBytesSent, info.bytes_sent);
- report->AddInt64(StatsReport::kStatsValueNameRtt, info.rtt_ms);
+ if (info.rtt_ms >= 0) {
+ report->AddInt64(StatsReport::kStatsValueNameRtt, info.rtt_ms);
+ }
}
void ExtractCommonReceiveProperties(const cricket::MediaReceiverInfo& info,
@@ -107,17 +107,23 @@ void SetAudioProcessingStats(StatsReport* report,
int echo_delay_std_ms) {
report->AddBoolean(StatsReport::kStatsValueNameTypingNoiseState,
typing_noise_detected);
- report->AddFloat(StatsReport::kStatsValueNameEchoCancellationQualityMin,
- aec_quality_min);
+ if (aec_quality_min >= 0.0f) {
+ report->AddFloat(StatsReport::kStatsValueNameEchoCancellationQualityMin,
+ aec_quality_min);
+ }
const IntForAdd ints[] = {
- { StatsReport::kStatsValueNameEchoReturnLoss, echo_return_loss },
- { StatsReport::kStatsValueNameEchoReturnLossEnhancement,
- echo_return_loss_enhancement },
{ StatsReport::kStatsValueNameEchoDelayMedian, echo_delay_median_ms },
{ StatsReport::kStatsValueNameEchoDelayStdDev, echo_delay_std_ms },
};
- for (const auto& i : ints)
- report->AddInt(i.name, i.value);
+ for (const auto& i : ints) {
+ if (i.value >= 0) {
+ report->AddInt(i.name, i.value);
+ }
+ }
+ // These can take on valid negative values.
+ report->AddInt(StatsReport::kStatsValueNameEchoReturnLoss, echo_return_loss);
+ report->AddInt(StatsReport::kStatsValueNameEchoReturnLossEnhancement,
+ echo_return_loss_enhancement);
}
void ExtractStats(const cricket::VoiceReceiverInfo& info, StatsReport* report) {
@@ -133,7 +139,6 @@ void ExtractStats(const cricket::VoiceReceiverInfo& info, StatsReport* report) {
};
const IntForAdd ints[] = {
- { StatsReport::kStatsValueNameAudioOutputLevel, info.audio_level },
{ StatsReport::kStatsValueNameCurrentDelayMs, info.delay_estimate_ms },
{ StatsReport::kStatsValueNameDecodingCNG, info.decoding_cng },
{ StatsReport::kStatsValueNameDecodingCTN, info.decoding_calls_to_neteq },
@@ -155,11 +160,17 @@ void ExtractStats(const cricket::VoiceReceiverInfo& info, StatsReport* report) {
for (const auto& i : ints)
report->AddInt(i.name, i.value);
+ if (info.audio_level >= 0) {
+ report->AddInt(StatsReport::kStatsValueNameAudioOutputLevel,
+ info.audio_level);
+ }
report->AddInt64(StatsReport::kStatsValueNameBytesReceived,
info.bytes_rcvd);
- report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
- info.capture_start_ntp_time_ms);
+ if (info.capture_start_ntp_time_ms >= 0) {
+ report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ info.capture_start_ntp_time_ms);
+ }
report->AddString(StatsReport::kStatsValueNameMediaType, "audio");
}
@@ -179,8 +190,11 @@ void ExtractStats(const cricket::VoiceSenderInfo& info, StatsReport* report) {
{ StatsReport::kStatsValueNamePacketsSent, info.packets_sent },
};
- for (const auto& i : ints)
- report->AddInt(i.name, i.value);
+ for (const auto& i : ints) {
+ if (i.value >= 0) {
+ report->AddInt(i.name, i.value);
+ }
+ }
report->AddString(StatsReport::kStatsValueNameMediaType, "audio");
}
@@ -190,8 +204,10 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, StatsReport* report) {
info.decoder_implementation_name);
report->AddInt64(StatsReport::kStatsValueNameBytesReceived,
info.bytes_rcvd);
- report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
- info.capture_start_ntp_time_ms);
+ if (info.capture_start_ntp_time_ms >= 0) {
+ report->AddInt64(StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ info.capture_start_ntp_time_ms);
+ }
const IntForAdd ints[] = {
{ StatsReport::kStatsValueNameCurrentDelayMs, info.current_delay_ms },
{ StatsReport::kStatsValueNameDecodeMs, info.decode_ms },
@@ -532,7 +548,7 @@ StatsReport* StatsCollector::AddOneCertificateReport(
if (!cert->GetSignatureDigestAlgorithm(&digest_algorithm))
return nullptr;
- rtc::scoped_ptr<rtc::SSLFingerprint> ssl_fingerprint(
+ std::unique_ptr<rtc::SSLFingerprint> ssl_fingerprint(
rtc::SSLFingerprint::Create(digest_algorithm, cert));
// SSLFingerprint::Create can fail if the algorithm returned by
@@ -574,7 +590,7 @@ StatsReport* StatsCollector::AddCertificateReports(
RTC_DCHECK(cert != NULL);
StatsReport* issuer = nullptr;
- rtc::scoped_ptr<rtc::SSLCertChain> chain = cert->GetChain();
+ std::unique_ptr<rtc::SSLCertChain> chain = cert->GetChain();
if (chain) {
// This loop runs in reverse, i.e. from root to leaf, so that each
// certificate's issuer's report ID is known before the child certificate's
@@ -702,7 +718,7 @@ void StatsCollector::ExtractSessionInfo() {
local_cert_report_id = r->id();
}
- rtc::scoped_ptr<rtc::SSLCertificate> cert =
+ std::unique_ptr<rtc::SSLCertificate> cert =
pc_->session()->GetRemoteSSLCertificate(
transport_iter.second.transport_name);
if (cert) {
@@ -872,7 +888,10 @@ void StatsCollector::ExtractDataInfo() {
StatsReport* report = reports_.ReplaceOrAddNew(id);
report->set_timestamp(stats_gathering_started_);
report->AddString(StatsReport::kStatsValueNameLabel, dc->label());
- report->AddInt(StatsReport::kStatsValueNameDataChannelId, dc->id());
+ // Filter out the initial id (-1).
+ if (dc->id() >= 0) {
+ report->AddInt(StatsReport::kStatsValueNameDataChannelId, dc->id());
+ }
report->AddString(StatsReport::kStatsValueNameProtocol, dc->protocol());
report->AddString(StatsReport::kStatsValueNameState,
DataChannelInterface::DataStateString(dc->state()));
@@ -937,6 +956,9 @@ void StatsCollector::UpdateReportFromAudioTrack(AudioTrackInterface* track,
report, stats.typing_noise_detected, stats.echo_return_loss,
stats.echo_return_loss_enhancement, stats.echo_delay_median_ms,
stats.aec_quality_min, stats.echo_delay_std_ms);
+
+ report->AddFloat(StatsReport::kStatsValueNameAecDivergentFilterFraction,
+ stats.aec_divergent_filter_fraction);
}
}
diff --git a/chromium/third_party/webrtc/api/statscollector_unittest.cc b/chromium/third_party/webrtc/api/statscollector_unittest.cc
index 5873e7382e8..7953515072b 100644
--- a/chromium/third_party/webrtc/api/statscollector_unittest.cc
+++ b/chromium/third_party/webrtc/api/statscollector_unittest.cc
@@ -11,6 +11,7 @@
#include <stdio.h>
#include <algorithm>
+#include <memory>
#include "webrtc/api/statscollector.h"
@@ -32,7 +33,6 @@
#include "webrtc/p2p/base/faketransportcontroller.h"
#include "webrtc/pc/channelmanager.h"
-using rtc::scoped_ptr;
using testing::_;
using testing::DoAll;
using testing::Field;
@@ -67,10 +67,15 @@ const uint32_t kSsrcOfTrack = 1234;
class MockWebRtcSession : public webrtc::WebRtcSession {
public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
explicit MockWebRtcSession(webrtc::MediaControllerInterface* media_controller)
: WebRtcSession(media_controller,
rtc::Thread::Current(),
rtc::Thread::Current(),
+ rtc::Thread::Current(),
nullptr) {}
MOCK_METHOD0(voice_channel, cricket::VoiceChannel*());
MOCK_METHOD0(video_channel, cricket::VideoChannel*());
@@ -84,9 +89,9 @@ class MockWebRtcSession : public webrtc::WebRtcSession {
rtc::scoped_refptr<rtc::RTCCertificate>* certificate));
// Workaround for gmock's inability to cope with move-only return values.
- rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate(
- const std::string& transport_name) override {
- return rtc::scoped_ptr<rtc::SSLCertificate>(
+ std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate(
+ const std::string& transport_name) /* override */ {
+ return std::unique_ptr<rtc::SSLCertificate>(
GetRemoteSSLCertificate_ReturnsRawPointer(transport_name));
}
MOCK_METHOD1(GetRemoteSSLCertificate_ReturnsRawPointer,
@@ -161,6 +166,48 @@ class FakeAudioTrack
rtc::scoped_refptr<FakeAudioProcessor> processor_;
};
+// This fake audio processor is used to verify that the undesired initial values
+// (-1) will be filtered out.
+class FakeAudioProcessorWithInitValue : public webrtc::AudioProcessorInterface {
+ public:
+ FakeAudioProcessorWithInitValue() {}
+ ~FakeAudioProcessorWithInitValue() {}
+
+ private:
+ void GetStats(AudioProcessorInterface::AudioProcessorStats* stats) override {
+ stats->typing_noise_detected = false;
+ stats->echo_return_loss = -100;
+ stats->echo_return_loss_enhancement = -100;
+ stats->echo_delay_median_ms = -1;
+ stats->aec_quality_min = -1.0f;
+ stats->echo_delay_std_ms = -1;
+ }
+};
+
+class FakeAudioTrackWithInitValue
+ : public webrtc::MediaStreamTrack<webrtc::AudioTrackInterface> {
+ public:
+ explicit FakeAudioTrackWithInitValue(const std::string& id)
+ : webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(id),
+ processor_(
+ new rtc::RefCountedObject<FakeAudioProcessorWithInitValue>()) {}
+ std::string kind() const override { return "audio"; }
+ webrtc::AudioSourceInterface* GetSource() const override { return NULL; }
+ void AddSink(webrtc::AudioTrackSinkInterface* sink) override {}
+ void RemoveSink(webrtc::AudioTrackSinkInterface* sink) override {}
+ bool GetSignalLevel(int* level) override {
+ *level = 1;
+ return true;
+ }
+ rtc::scoped_refptr<webrtc::AudioProcessorInterface> GetAudioProcessor()
+ override {
+ return processor_;
+ }
+
+ private:
+ rtc::scoped_refptr<FakeAudioProcessorWithInitValue> processor_;
+};
+
bool GetValue(const StatsReport* report,
StatsReport::StatsValueName name,
std::string* value) {
@@ -440,7 +487,8 @@ void InitVoiceSenderInfo(cricket::VoiceSenderInfo* voice_sender_info) {
}
void UpdateVoiceSenderInfoFromAudioTrack(
- FakeAudioTrack* audio_track, cricket::VoiceSenderInfo* voice_sender_info) {
+ AudioTrackInterface* audio_track,
+ cricket::VoiceSenderInfo* voice_sender_info) {
audio_track->GetSignalLevel(&voice_sender_info->audio_level);
webrtc::AudioProcessorInterface::AudioProcessorStats audio_processor_stats;
audio_track->GetAudioProcessor()->GetStats(&audio_processor_stats);
@@ -492,12 +540,15 @@ class StatsCollectorForTest : public webrtc::StatsCollector {
class StatsCollectorTest : public testing::Test {
protected:
StatsCollectorTest()
- : media_engine_(new cricket::FakeMediaEngine()),
- channel_manager_(
- new cricket::ChannelManager(media_engine_, rtc::Thread::Current())),
+ : worker_thread_(rtc::Thread::Current()),
+ network_thread_(rtc::Thread::Current()),
+ media_engine_(new cricket::FakeMediaEngine()),
+ channel_manager_(new cricket::ChannelManager(media_engine_,
+ worker_thread_,
+ network_thread_)),
media_controller_(
webrtc::MediaControllerInterface::Create(cricket::MediaConfig(),
- rtc::Thread::Current(),
+ worker_thread_,
channel_manager_.get())),
session_(media_controller_.get()) {
// By default, we ignore session GetStats calls.
@@ -671,7 +722,7 @@ class StatsCollectorTest : public testing::Test {
void TestCertificateReports(
const rtc::FakeSSLCertificate& local_cert,
const std::vector<std::string>& local_ders,
- rtc::scoped_ptr<rtc::FakeSSLCertificate> remote_cert,
+ std::unique_ptr<rtc::FakeSSLCertificate> remote_cert,
const std::vector<std::string>& remote_ders) {
StatsCollectorForTest stats(&pc_);
@@ -694,7 +745,7 @@ class StatsCollectorTest : public testing::Test {
// Fake certificate to report
rtc::scoped_refptr<rtc::RTCCertificate> local_certificate(
- rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::FakeSSLIdentity>(
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::FakeSSLIdentity>(
new rtc::FakeSSLIdentity(local_cert))));
// Configure MockWebRtcSession
@@ -756,9 +807,11 @@ class StatsCollectorTest : public testing::Test {
srtp_crypto_suite);
}
+ rtc::Thread* const worker_thread_;
+ rtc::Thread* const network_thread_;
cricket::FakeMediaEngine* media_engine_;
- rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
- rtc::scoped_ptr<webrtc::MediaControllerInterface> media_controller_;
+ std::unique_ptr<cricket::ChannelManager> channel_manager_;
+ std::unique_ptr<webrtc::MediaControllerInterface> media_controller_;
MockWebRtcSession session_;
MockPeerConnection pc_;
FakeDataChannelProvider data_channel_provider_;
@@ -769,6 +822,29 @@ class StatsCollectorTest : public testing::Test {
std::vector<rtc::scoped_refptr<DataChannel>> data_channels_;
};
+TEST_F(StatsCollectorTest, FilterOutNegativeDataChannelId) {
+ const std::string label = "hacks";
+ // The data channel id is from the Config which is -1 initially.
+ const int id = -1;
+ const std::string state = DataChannelInterface::DataStateString(
+ DataChannelInterface::DataState::kConnecting);
+
+ AddDataChannel(cricket::DCT_SCTP, label, id);
+ StatsCollectorForTest stats(&pc_);
+
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ StatsReports reports;
+ stats.GetStats(NULL, &reports);
+
+ const StatsReport* report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeDataChannel, 1);
+
+ std::string value_in_report;
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameDataChannelId,
+ &value_in_report));
+}
+
// Verify that ExtractDataInfo populates reports.
TEST_F(StatsCollectorTest, ExtractDataInfo) {
const std::string label = "hacks";
@@ -824,8 +900,9 @@ TEST_F(StatsCollectorTest, BytesCounterHandles64Bits) {
Return(true)));
MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
- cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
- nullptr, kVideoChannelName, false);
+ cricket::VideoChannel video_channel(worker_thread_, network_thread_,
+ media_channel, nullptr, kVideoChannelName,
+ false);
StatsReports reports; // returned values.
cricket::VideoSenderInfo video_sender_info;
cricket::VideoMediaInfo stats_read;
@@ -870,8 +947,9 @@ TEST_F(StatsCollectorTest, BandwidthEstimationInfoIsReported) {
Return(true)));
MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
- cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
- nullptr, kVideoChannelName, false);
+ cricket::VideoChannel video_channel(worker_thread_, network_thread_,
+ media_channel, nullptr, kVideoChannelName,
+ false);
StatsReports reports; // returned values.
cricket::VideoSenderInfo video_sender_info;
@@ -945,8 +1023,8 @@ TEST_F(StatsCollectorTest, TrackObjectExistsWithoutUpdateStats) {
StatsCollectorForTest stats(&pc_);
MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
- cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
- nullptr, "video", false);
+ cricket::VideoChannel video_channel(worker_thread_, network_thread_,
+ media_channel, nullptr, "video", false);
AddOutgoingVideoTrackStats();
stats.AddStream(stream_);
@@ -981,8 +1059,9 @@ TEST_F(StatsCollectorTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) {
Return(true)));
MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
- cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
- nullptr, kVideoChannelName, false);
+ cricket::VideoChannel video_channel(worker_thread_, network_thread_,
+ media_channel, nullptr, kVideoChannelName,
+ false);
AddOutgoingVideoTrackStats();
stats.AddStream(stream_);
@@ -1049,8 +1128,8 @@ TEST_F(StatsCollectorTest, TransportObjectLinkedFromSsrcObject) {
MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
// The transport_name known by the video channel.
const std::string kVcName("vcname");
- cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
- nullptr, kVcName, false);
+ cricket::VideoChannel video_channel(worker_thread_, network_thread_,
+ media_channel, nullptr, kVcName, false);
AddOutgoingVideoTrackStats();
stats.AddStream(stream_);
@@ -1107,8 +1186,8 @@ TEST_F(StatsCollectorTest, RemoteSsrcInfoIsAbsent) {
MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
// The transport_name known by the video channel.
const std::string kVcName("vcname");
- cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
- nullptr, kVcName, false);
+ cricket::VideoChannel video_channel(worker_thread_, network_thread_,
+ media_channel, nullptr, kVcName, false);
AddOutgoingVideoTrackStats();
stats.AddStream(stream_);
@@ -1133,8 +1212,8 @@ TEST_F(StatsCollectorTest, RemoteSsrcInfoIsPresent) {
MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
// The transport_name known by the video channel.
const std::string kVcName("vcname");
- cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
- nullptr, kVcName, false);
+ cricket::VideoChannel video_channel(worker_thread_, network_thread_,
+ media_channel, nullptr, kVcName, false);
AddOutgoingVideoTrackStats();
stats.AddStream(stream_);
@@ -1188,8 +1267,9 @@ TEST_F(StatsCollectorTest, ReportsFromRemoteTrack) {
Return(true)));
MockVideoMediaChannel* media_channel = new MockVideoMediaChannel();
- cricket::VideoChannel video_channel(rtc::Thread::Current(), media_channel,
- nullptr, kVideoChannelName, false);
+ cricket::VideoChannel video_channel(worker_thread_, network_thread_,
+ media_channel, nullptr, kVideoChannelName,
+ false);
AddIncomingVideoTrackStats();
stats.AddStream(stream_);
@@ -1339,7 +1419,7 @@ TEST_F(StatsCollectorTest, ChainedCertificateReportsCreated) {
remote_ders[1] = "non-";
remote_ders[2] = "intersecting";
remote_ders[3] = "set";
- rtc::scoped_ptr<rtc::FakeSSLCertificate> remote_cert(
+ std::unique_ptr<rtc::FakeSSLCertificate> remote_cert(
new rtc::FakeSSLCertificate(DersToPems(remote_ders)));
TestCertificateReports(local_cert, local_ders, std::move(remote_cert),
@@ -1355,7 +1435,7 @@ TEST_F(StatsCollectorTest, ChainlessCertificateReportsCreated) {
// Build remote certificate.
std::string remote_der = "This is somebody else's der.";
- rtc::scoped_ptr<rtc::FakeSSLCertificate> remote_cert(
+ std::unique_ptr<rtc::FakeSSLCertificate> remote_cert(
new rtc::FakeSSLCertificate(DerToPem(remote_der)));
TestCertificateReports(local_cert, std::vector<std::string>(1, local_der),
@@ -1445,7 +1525,7 @@ TEST_F(StatsCollectorTest, NoCertificates) {
transport_stats;
// Fake transport object.
- rtc::scoped_ptr<cricket::FakeTransport> transport(
+ std::unique_ptr<cricket::FakeTransport> transport(
new cricket::FakeTransport(transport_stats.transport_name));
// Configure MockWebRtcSession
@@ -1479,7 +1559,7 @@ TEST_F(StatsCollectorTest, UnsupportedDigestIgnored) {
// Build a remote certificate with an unsupported digest algorithm.
std::string remote_der = "This is somebody else's der.";
- rtc::scoped_ptr<rtc::FakeSSLCertificate> remote_cert(
+ std::unique_ptr<rtc::FakeSSLCertificate> remote_cert(
new rtc::FakeSSLCertificate(DerToPem(remote_der)));
remote_cert->set_digest_algorithm("foobar");
@@ -1487,6 +1567,113 @@ TEST_F(StatsCollectorTest, UnsupportedDigestIgnored) {
std::move(remote_cert), std::vector<std::string>());
}
+// This test verifies that the audio/video related stats which are -1 initially
+// will be filtered out.
+TEST_F(StatsCollectorTest, FilterOutNegativeInitialValues) {
+ StatsCollectorForTest stats(&pc_);
+
+ EXPECT_CALL(session_, GetLocalCertificate(_, _))
+ .WillRepeatedly(Return(false));
+ EXPECT_CALL(session_, GetRemoteSSLCertificate_ReturnsRawPointer(_))
+ .WillRepeatedly(Return(nullptr));
+
+ MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
+ // The transport_name known by the voice channel.
+ const std::string kVcName("vcname");
+ cricket::VoiceChannel voice_channel(worker_thread_, network_thread_,
+ media_engine_, media_channel, nullptr,
+ kVcName, false);
+
+ // Create a local stream with a local audio track and adds it to the stats.
+ if (stream_ == NULL)
+ stream_ = webrtc::MediaStream::Create("streamlabel");
+
+ rtc::scoped_refptr<FakeAudioTrackWithInitValue> local_track(
+ new rtc::RefCountedObject<FakeAudioTrackWithInitValue>(kLocalTrackId));
+ stream_->AddTrack(local_track);
+ EXPECT_CALL(session_, GetLocalTrackIdBySsrc(kSsrcOfTrack, _))
+ .WillOnce(DoAll(SetArgPointee<1>(kLocalTrackId), Return(true)));
+ stats.AddStream(stream_);
+ stats.AddLocalAudioTrack(local_track.get(), kSsrcOfTrack);
+
+ // Create a remote stream with a remote audio track and adds it to the stats.
+ rtc::scoped_refptr<webrtc::MediaStream> remote_stream(
+ webrtc::MediaStream::Create("remotestreamlabel"));
+ rtc::scoped_refptr<FakeAudioTrackWithInitValue> remote_track(
+ new rtc::RefCountedObject<FakeAudioTrackWithInitValue>(kRemoteTrackId));
+ EXPECT_CALL(session_, GetRemoteTrackIdBySsrc(kSsrcOfTrack, _))
+ .WillOnce(DoAll(SetArgPointee<1>(kRemoteTrackId), Return(true)));
+ remote_stream->AddTrack(remote_track);
+ stats.AddStream(remote_stream);
+
+ // Instruct the session to return stats containing the transport channel.
+ InitSessionStats(kVcName);
+ EXPECT_CALL(session_, GetTransportStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(session_stats_), Return(true)));
+
+ cricket::VoiceSenderInfo voice_sender_info;
+ voice_sender_info.add_ssrc(kSsrcOfTrack);
+ // These values are set to -1 initially in audio_send_stream.
+ // The voice_sender_info will read the values from audio_send_stream.
+ voice_sender_info.rtt_ms = -1;
+ voice_sender_info.packets_lost = -1;
+ voice_sender_info.jitter_ms = -1;
+
+ // Some of the contents in |voice_sender_info| needs to be updated from the
+ // |audio_track_|.
+ UpdateVoiceSenderInfoFromAudioTrack(local_track.get(), &voice_sender_info);
+
+ cricket::VoiceReceiverInfo voice_receiver_info;
+ voice_receiver_info.add_ssrc(kSsrcOfTrack);
+ voice_receiver_info.capture_start_ntp_time_ms = -1;
+ voice_receiver_info.audio_level = -1;
+
+ // Constructs an ssrc stats update.
+ cricket::VoiceMediaInfo stats_read;
+ stats_read.senders.push_back(voice_sender_info);
+ stats_read.receivers.push_back(voice_receiver_info);
+
+ EXPECT_CALL(session_, voice_channel()).WillRepeatedly(Return(&voice_channel));
+ EXPECT_CALL(session_, video_channel()).WillRepeatedly(ReturnNull());
+ EXPECT_CALL(*media_channel, GetStats(_))
+ .WillRepeatedly(DoAll(SetArgPointee<0>(stats_read), Return(true)));
+
+ StatsReports reports;
+ stats.UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard);
+
+ // Get stats for the local track.
+ stats.GetStats(local_track.get(), &reports);
+ const StatsReport* report =
+ FindNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, 1);
+ EXPECT_TRUE(report);
+ // The -1 will not be added to the stats report.
+ std::string value_in_report;
+ EXPECT_FALSE(
+ GetValue(report, StatsReport::kStatsValueNameRtt, &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNamePacketsLost,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameJitterReceived,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report,
+ StatsReport::kStatsValueNameEchoCancellationQualityMin,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoDelayMedian,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoDelayStdDev,
+ &value_in_report));
+
+ // Get stats for the remote track.
+ reports.clear();
+ stats.GetStats(remote_track.get(), &reports);
+ report = FindNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, 1);
+ EXPECT_TRUE(report);
+ EXPECT_FALSE(GetValue(report,
+ StatsReport::kStatsValueNameCaptureStartNtpTimeMs,
+ &value_in_report));
+ EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameAudioInputLevel,
+ &value_in_report));
+}
+
// This test verifies that a local stats object can get statistics via
// AudioTrackInterface::GetStats() method.
TEST_F(StatsCollectorTest, GetStatsFromLocalAudioTrack) {
@@ -1500,8 +1687,9 @@ TEST_F(StatsCollectorTest, GetStatsFromLocalAudioTrack) {
MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
// The transport_name known by the voice channel.
const std::string kVcName("vcname");
- cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
- media_channel, nullptr, kVcName, false);
+ cricket::VoiceChannel voice_channel(worker_thread_, network_thread_,
+ media_engine_, media_channel, nullptr,
+ kVcName, false);
AddOutgoingAudioTrackStats();
stats.AddStream(stream_);
stats.AddLocalAudioTrack(audio_track_, kSsrcOfTrack);
@@ -1535,8 +1723,9 @@ TEST_F(StatsCollectorTest, GetStatsFromRemoteStream) {
MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
// The transport_name known by the voice channel.
const std::string kVcName("vcname");
- cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
- media_channel, nullptr, kVcName, false);
+ cricket::VoiceChannel voice_channel(worker_thread_, network_thread_,
+ media_engine_, media_channel, nullptr,
+ kVcName, false);
AddIncomingAudioTrackStats();
stats.AddStream(stream_);
@@ -1564,8 +1753,9 @@ TEST_F(StatsCollectorTest, GetStatsAfterRemoveAudioStream) {
MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
// The transport_name known by the voice channel.
const std::string kVcName("vcname");
- cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
- media_channel, nullptr, kVcName, false);
+ cricket::VoiceChannel voice_channel(worker_thread_, network_thread_,
+ media_engine_, media_channel, nullptr,
+ kVcName, false);
AddOutgoingAudioTrackStats();
stats.AddStream(stream_);
stats.AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack);
@@ -1625,8 +1815,9 @@ TEST_F(StatsCollectorTest, LocalAndRemoteTracksWithSameSsrc) {
MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
// The transport_name known by the voice channel.
const std::string kVcName("vcname");
- cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
- media_channel, nullptr, kVcName, false);
+ cricket::VoiceChannel voice_channel(worker_thread_, network_thread_,
+ media_engine_, media_channel, nullptr,
+ kVcName, false);
// Create a local stream with a local audio track and adds it to the stats.
AddOutgoingAudioTrackStats();
@@ -1712,8 +1903,9 @@ TEST_F(StatsCollectorTest, TwoLocalTracksWithSameSsrc) {
MockVoiceMediaChannel* media_channel = new MockVoiceMediaChannel();
// The transport_name known by the voice channel.
const std::string kVcName("vcname");
- cricket::VoiceChannel voice_channel(rtc::Thread::Current(), media_engine_,
- media_channel, nullptr, kVcName, false);
+ cricket::VoiceChannel voice_channel(worker_thread_, network_thread_,
+ media_engine_, media_channel, nullptr,
+ kVcName, false);
// Create a local stream with a local audio track and adds it to the stats.
AddOutgoingAudioTrackStats();
diff --git a/chromium/third_party/webrtc/api/statstypes.cc b/chromium/third_party/webrtc/api/statstypes.cc
index feeead43745..61af82467a7 100644
--- a/chromium/third_party/webrtc/api/statstypes.cc
+++ b/chromium/third_party/webrtc/api/statstypes.cc
@@ -363,6 +363,8 @@ bool StatsReport::Value::bool_val() const {
const char* StatsReport::Value::display_name() const {
switch (name) {
+ case kStatsValueNameAecDivergentFilterFraction:
+ return "aecDivergentFilterFraction";
case kStatsValueNameAudioOutputLevel:
return "audioOutputLevel";
case kStatsValueNameAudioInputLevel:
diff --git a/chromium/third_party/webrtc/api/statstypes.h b/chromium/third_party/webrtc/api/statstypes.h
index 9a44724e8fd..4f58b975403 100644
--- a/chromium/third_party/webrtc/api/statstypes.h
+++ b/chromium/third_party/webrtc/api/statstypes.h
@@ -21,9 +21,9 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/linked_ptr.h"
#include "webrtc/base/refcount.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/base/thread_checker.h"
@@ -99,6 +99,7 @@ class StatsReport {
enum StatsValueName {
kStatsValueNameActiveConnection,
+ kStatsValueNameAecDivergentFilterFraction,
kStatsValueNameAudioInputLevel,
kStatsValueNameAudioOutputLevel,
kStatsValueNameBytesReceived,
diff --git a/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.cc b/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.cc
index 0b6478ed2f6..a32ef64d037 100644
--- a/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.cc
+++ b/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.cc
@@ -23,7 +23,7 @@ static const int kHighSampleValue = 10000;
// Same value as src/modules/audio_device/main/source/audio_device_config.h in
// https://code.google.com/p/webrtc/
-static const uint32_t kAdmMaxIdleTimeProcess = 1000;
+static const int kAdmMaxIdleTimeProcess = 1000;
// Constants here are derived by running VoE using a real ADM.
// The constants correspond to 10ms of mono audio at 44kHz.
@@ -73,12 +73,12 @@ int FakeAudioCaptureModule::frames_received() const {
}
int64_t FakeAudioCaptureModule::TimeUntilNextProcess() {
- const uint32_t current_time = rtc::Time();
+ const int64_t current_time = rtc::TimeMillis();
if (current_time < last_process_time_ms_) {
// TODO: wraparound could be handled more gracefully.
return 0;
}
- const uint32_t elapsed_time = current_time - last_process_time_ms_;
+ const int64_t elapsed_time = current_time - last_process_time_ms_;
if (kAdmMaxIdleTimeProcess < elapsed_time) {
return 0;
}
@@ -86,7 +86,7 @@ int64_t FakeAudioCaptureModule::TimeUntilNextProcess() {
}
void FakeAudioCaptureModule::Process() {
- last_process_time_ms_ = rtc::Time();
+ last_process_time_ms_ = rtc::TimeMillis();
}
int32_t FakeAudioCaptureModule::ActiveAudioLayer(
@@ -205,8 +205,7 @@ int32_t FakeAudioCaptureModule::InitRecording() {
}
bool FakeAudioCaptureModule::RecordingIsInitialized() const {
- ASSERT(false);
- return 0;
+ return rec_is_initialized_;
}
int32_t FakeAudioCaptureModule::StartPlayout() {
@@ -591,7 +590,7 @@ bool FakeAudioCaptureModule::Initialize() {
// sent to it. Note that the audio processing pipeline will likely distort the
// original signal.
SetSendBuffer(kHighSampleValue);
- last_process_time_ms_ = rtc::Time();
+ last_process_time_ms_ = rtc::TimeMillis();
return true;
}
@@ -650,7 +649,7 @@ void FakeAudioCaptureModule::StartProcessP() {
void FakeAudioCaptureModule::ProcessFrameP() {
ASSERT(process_thread_->IsCurrent());
if (!started_) {
- next_frame_time_ = rtc::Time();
+ next_frame_time_ = rtc::TimeMillis();
started_ = true;
}
@@ -666,8 +665,8 @@ void FakeAudioCaptureModule::ProcessFrameP() {
}
next_frame_time_ += kTimePerFrameMs;
- const uint32_t current_time = rtc::Time();
- const uint32_t wait_time =
+ const int64_t current_time = rtc::TimeMillis();
+ const int64_t wait_time =
(next_frame_time_ > current_time) ? next_frame_time_ - current_time : 0;
process_thread_->PostDelayed(wait_time, this, MSG_RUN_PROCESS);
}
diff --git a/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.h b/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.h
index 9200bdf6f31..f89249ad2f9 100644
--- a/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.h
+++ b/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule.h
@@ -20,10 +20,11 @@
#ifndef WEBRTC_API_TEST_FAKEAUDIOCAPTUREMODULE_H_
#define WEBRTC_API_TEST_FAKEAUDIOCAPTUREMODULE_H_
+#include <memory>
+
#include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/messagehandler.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_device/include/audio_device.h"
@@ -172,12 +173,12 @@ class FakeAudioCaptureModule
int32_t ResetAudioDevice() override;
int32_t SetLoudspeakerStatus(bool enable) override;
int32_t GetLoudspeakerStatus(bool* enabled) const override;
- virtual bool BuiltInAECIsAvailable() const { return false; }
- virtual int32_t EnableBuiltInAEC(bool enable) { return -1; }
- virtual bool BuiltInAGCIsAvailable() const { return false; }
- virtual int32_t EnableBuiltInAGC(bool enable) { return -1; }
- virtual bool BuiltInNSIsAvailable() const { return false; }
- virtual int32_t EnableBuiltInNS(bool enable) { return -1; }
+ bool BuiltInAECIsAvailable() const override { return false; }
+ int32_t EnableBuiltInAEC(bool enable) override { return -1; }
+ bool BuiltInAGCIsAvailable() const override { return false; }
+ int32_t EnableBuiltInAGC(bool enable) override { return -1; }
+ bool BuiltInNSIsAvailable() const override { return false; }
+ int32_t EnableBuiltInNS(bool enable) override { return -1; }
// End of functions inherited from webrtc::AudioDeviceModule.
// The following function is inherited from rtc::MessageHandler.
@@ -225,7 +226,7 @@ class FakeAudioCaptureModule
// The time in milliseconds when Process() was last called or 0 if no call
// has been made.
- uint32_t last_process_time_ms_;
+ int64_t last_process_time_ms_;
// Callback for playout and recording.
webrtc::AudioTransport* audio_callback_;
@@ -245,9 +246,9 @@ class FakeAudioCaptureModule
// wall clock time the next frame should be generated and received. started_
// ensures that next_frame_time_ can be initialized properly on first call.
bool started_;
- uint32_t next_frame_time_;
+ int64_t next_frame_time_;
- rtc::scoped_ptr<rtc::Thread> process_thread_;
+ std::unique_ptr<rtc::Thread> process_thread_;
// Buffer for storing samples received from the webrtc::AudioTransport.
char rec_buffer_[kNumberSamples * kNumberBytesPerSample];
diff --git a/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule_unittest.cc b/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule_unittest.cc
index 8ac1acce2b5..d0dcd85012b 100644
--- a/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule_unittest.cc
+++ b/chromium/third_party/webrtc/api/test/fakeaudiocapturemodule_unittest.cc
@@ -31,7 +31,7 @@ class FakeAdmTest : public testing::Test,
memset(rec_buffer_, 0, sizeof(rec_buffer_));
}
- virtual void SetUp() {
+ void SetUp() override {
fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
EXPECT_TRUE(fake_audio_capture_module_.get() != NULL);
}
diff --git a/chromium/third_party/webrtc/api/test/fakedtlsidentitystore.h b/chromium/third_party/webrtc/api/test/fakedtlsidentitystore.h
index 8bbffbf93e9..58de38fb326 100644
--- a/chromium/third_party/webrtc/api/test/fakedtlsidentitystore.h
+++ b/chromium/third_party/webrtc/api/test/fakedtlsidentitystore.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_API_TEST_FAKEDTLSIDENTITYSERVICE_H_
#define WEBRTC_API_TEST_FAKEDTLSIDENTITYSERVICE_H_
+#include <memory>
#include <string>
#include <utility>
@@ -18,67 +19,104 @@
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/base/rtccertificate.h"
-static const struct {
- const char* rsa_private_key_pem;
- const char* cert_pem;
-} kKeysAndCerts[] = {
- {"-----BEGIN RSA PRIVATE KEY-----\n"
- "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
- "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
- "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
- "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
- "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
- "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
- "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
- "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
- "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
- "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
- "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
- "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
- "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
- "UCXiYxSsu20QNVw=\n"
- "-----END RSA PRIVATE KEY-----\n",
- "-----BEGIN CERTIFICATE-----\n"
- "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
- "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
- "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
- "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
- "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
- "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
- "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
- "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
- "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
- "-----END CERTIFICATE-----\n"},
- {"-----BEGIN RSA PRIVATE KEY-----\n"
- "MIICXQIBAAKBgQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgj\n"
- "Bl8CPZMvDh9EwB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQ\n"
- "sOR/qPvviJx5I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQAB\n"
- "AoGARni9eY8/hv+SX+I+05EdXt6MQXNUbQ+cSykBNCfVccLzIFEWUQMT2IHqwl6X\n"
- "ShIXcq7/n1QzOAEiuzixauM3YHg4xZ1Um2Ha9a7ig5Xg4v6b43bmMkNE6LkoAtYs\n"
- "qnQdfMh442b1liDud6IMb1Qk0amt3fSrgRMc547TZQVx4QECQQDxUeDm94r3p4ng\n"
- "5rCLLC1K5/6HSTZsh7jatKPlz7GfP/IZlYV7iE5784/n0wRiCjZOS7hQRy/8m2Gp\n"
- "pf4aZq+DAkEA6+np4d36FYikydvUrupLT3FkdRHGn/v83qOll/VmeNh+L1xMZlIP\n"
- "tM26hAXCcQb7O5+J9y3cx2CAQsBS11ZXZQJAfGgTo76WG9p5UEJdXUInD2jOZPwv\n"
- "XIATolxh6kXKcijLLLlSmT7KB0inNYIpzkkpee+7U1d/u6B3FriGaSHq9QJBAM/J\n"
- "ICnDdLCgwNvWVraVQC3BpwSB2pswvCFwq7py94V60XFvbw80Ogc6qIv98qvQxVlX\n"
- "hJIEgA/PjEi+0ng94Q0CQQDm8XSDby35gmjO+6eRmJtAjtB7nguLvrPXM6CPXRmD\n"
- "sRoBocpHw6j9UdzZ6qYG0FkdXZghezXFY58ro2BYYRR3\n"
- "-----END RSA PRIVATE KEY-----\n",
- "-----BEGIN CERTIFICATE-----\n"
- "MIICWDCCAcGgAwIBAgIJALgDjxMbBOhbMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"
- "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"
- "aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTEzMjIzMjEzWhcNMTYxMTEyMjIzMjEzWjBF\n"
- "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"
- "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB\n"
- "gQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgjBl8CPZMvDh9E\n"
- "wB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQsOR/qPvviJx5\n"
- "I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQABo1AwTjAdBgNV\n"
- "HQ4EFgQUx2tbJdlcSTCepn09UdYORXKuSTAwHwYDVR0jBBgwFoAUx2tbJdlcSTCe\n"
- "pn09UdYORXKuSTAwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQAmp9Id\n"
- "E716gHMqeBG4S2FCgVFCr0a0ugkaneQAN/c2L9CbMemEN9W6jvucUIVOtYd90dDW\n"
- "lXuowWmT/JctPe3D2qt4yvYW3puECHk2tVQmrJOZiZiTRtWm6HxkmoUYHYp/DtaS\n"
- "1Xe29gSTnZtI5sQCrGMzk3SGRSSs7ejLKiVDBQ==\n"
- "-----END CERTIFICATE-----\n"}};
+// RSA with mod size 1024, pub exp 0x10001.
+static const rtc::RTCCertificatePEM kRsaPems[] = {
+ rtc::RTCCertificatePEM(
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
+ "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
+ "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
+ "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
+ "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
+ "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
+ "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
+ "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
+ "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
+ "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
+ "UCXiYxSsu20QNVw=\n"
+ "-----END RSA PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
+ "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
+ "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
+ "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
+ "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
+ "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
+ "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
+ "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
+ "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+ "-----END CERTIFICATE-----\n"),
+ rtc::RTCCertificatePEM(
+ "-----BEGIN RSA PRIVATE KEY-----\n"
+ "MIICXQIBAAKBgQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgj\n"
+ "Bl8CPZMvDh9EwB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQ\n"
+ "sOR/qPvviJx5I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQAB\n"
+ "AoGARni9eY8/hv+SX+I+05EdXt6MQXNUbQ+cSykBNCfVccLzIFEWUQMT2IHqwl6X\n"
+ "ShIXcq7/n1QzOAEiuzixauM3YHg4xZ1Um2Ha9a7ig5Xg4v6b43bmMkNE6LkoAtYs\n"
+ "qnQdfMh442b1liDud6IMb1Qk0amt3fSrgRMc547TZQVx4QECQQDxUeDm94r3p4ng\n"
+ "5rCLLC1K5/6HSTZsh7jatKPlz7GfP/IZlYV7iE5784/n0wRiCjZOS7hQRy/8m2Gp\n"
+ "pf4aZq+DAkEA6+np4d36FYikydvUrupLT3FkdRHGn/v83qOll/VmeNh+L1xMZlIP\n"
+ "tM26hAXCcQb7O5+J9y3cx2CAQsBS11ZXZQJAfGgTo76WG9p5UEJdXUInD2jOZPwv\n"
+ "XIATolxh6kXKcijLLLlSmT7KB0inNYIpzkkpee+7U1d/u6B3FriGaSHq9QJBAM/J\n"
+ "ICnDdLCgwNvWVraVQC3BpwSB2pswvCFwq7py94V60XFvbw80Ogc6qIv98qvQxVlX\n"
+ "hJIEgA/PjEi+0ng94Q0CQQDm8XSDby35gmjO+6eRmJtAjtB7nguLvrPXM6CPXRmD\n"
+ "sRoBocpHw6j9UdzZ6qYG0FkdXZghezXFY58ro2BYYRR3\n"
+ "-----END RSA PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIICWDCCAcGgAwIBAgIJALgDjxMbBOhbMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"
+ "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"
+ "aWRnaXRzIFB0eSBMdGQwHhcNMTUxMTEzMjIzMjEzWhcNMTYxMTEyMjIzMjEzWjBF\n"
+ "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"
+ "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB\n"
+ "gQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgjBl8CPZMvDh9E\n"
+ "wB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQsOR/qPvviJx5\n"
+ "I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQABo1AwTjAdBgNV\n"
+ "HQ4EFgQUx2tbJdlcSTCepn09UdYORXKuSTAwHwYDVR0jBBgwFoAUx2tbJdlcSTCe\n"
+ "pn09UdYORXKuSTAwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQAmp9Id\n"
+ "E716gHMqeBG4S2FCgVFCr0a0ugkaneQAN/c2L9CbMemEN9W6jvucUIVOtYd90dDW\n"
+ "lXuowWmT/JctPe3D2qt4yvYW3puECHk2tVQmrJOZiZiTRtWm6HxkmoUYHYp/DtaS\n"
+ "1Xe29gSTnZtI5sQCrGMzk3SGRSSs7ejLKiVDBQ==\n"
+ "-----END CERTIFICATE-----\n")
+};
+
+// ECDSA with EC_NIST_P256.
+// These PEM strings were created by generating an identity with
+// |SSLIdentity::Generate| and invoking |identity->PrivateKeyToPEMString()|,
+// |identity->PublicKeyToPEMString()| and
+// |identity->certificate().ToPEMString()|.
+static const rtc::RTCCertificatePEM kEcdsaPems[] = {
+ rtc::RTCCertificatePEM(
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg+qaRsR5uHtqG689M\n"
+ "A3PHSJNeVpyi5wUKCft62h0UWy+hRANCAAS5Mjc85q9fVq4ln+zOPlaEC/Rzj5Pb\n"
+ "MVZtf1x/8k2KsbmyZoAMDX2yer/atEuXmItMe3yd6/DXnvboU//D3Lyt\n"
+ "-----END PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBFTCBu6ADAgECAgkA30tGY5XG7oowCgYIKoZIzj0EAwIwEDEOMAwGA1UEAwwF\n"
+ "dGVzdDMwHhcNMTYwNTA5MDkxODA4WhcNMTYwNjA5MDkxODA4WjAQMQ4wDAYDVQQD\n"
+ "DAV0ZXN0MzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABLkyNzzmr19WriWf7M4+\n"
+ "VoQL9HOPk9sxVm1/XH/yTYqxubJmgAwNfbJ6v9q0S5eYi0x7fJ3r8Nee9uhT/8Pc\n"
+ "vK0wCgYIKoZIzj0EAwIDSQAwRgIhAIIc3+CqfkZ9lLwTj1PvUtt3KhnqF2kD0War\n"
+ "cCoTBbCxAiEAyp9Cn4vo2ZBhRIVDKyoxmwak8Z0PAVhJAQaWCgoY2D4=\n"
+ "-----END CERTIFICATE-----\n"),
+ rtc::RTCCertificatePEM(
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQghL/G4JRYnuDNbQuh\n"
+ "LqkytcE39Alsq6FItDVFgOesfCmhRANCAATd53FjPLyVUcwYguEPbSJM03fP6Rx5\n"
+ "GY1dEZ00+ZykjJI83VfDAyvmpRuGahNtBH0hc+7xkDCbeo6TM0tN35xr\n"
+ "-----END PRIVATE KEY-----\n",
+ "-----BEGIN CERTIFICATE-----\n"
+ "MIIBFDCBu6ADAgECAgkArZYdXMyJ5rswCgYIKoZIzj0EAwIwEDEOMAwGA1UEAwwF\n"
+ "dGVzdDQwHhcNMTYwNTA5MDkxODA4WhcNMTYwNjA5MDkxODA4WjAQMQ4wDAYDVQQD\n"
+ "DAV0ZXN0NDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABN3ncWM8vJVRzBiC4Q9t\n"
+ "IkzTd8/pHHkZjV0RnTT5nKSMkjzdV8MDK+alG4ZqE20EfSFz7vGQMJt6jpMzS03f\n"
+ "nGswCgYIKoZIzj0EAwIDSAAwRQIgb/LBc8OtsC5lEDyjCP6M9xt5mwzUNrQBOFWZ\n"
+ "1fE/g68CIQD7uoFfbiq6dTp8ZwzbwQ8jJf08KjriamqA9OW/4268Dw==\n"
+ "-----END CERTIFICATE-----\n")
+};
class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
public rtc::MessageHandler {
@@ -100,62 +138,85 @@ class FakeDtlsIdentityStore : public webrtc::DtlsIdentityStoreInterface,
const rtc::Optional<uint64_t>& expires_ms,
const rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>&
observer) override {
- // TODO(hbos): Should be able to generate KT_ECDSA too.
- RTC_DCHECK((key_params.type() == rtc::KT_RSA &&
- key_params.rsa_params().mod_size == 1024 &&
- key_params.rsa_params().pub_exp == 0x10001 &&
- !expires_ms) ||
- should_fail_);
+ // The certificates are created from constant PEM strings and use its coded
+ // expiration time, we do not support modifying it.
+ RTC_DCHECK(!expires_ms);
MessageData* msg = new MessageData(
rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver>(observer));
- rtc::Thread::Current()->Post(
- this, should_fail_ ? MSG_FAILURE : MSG_SUCCESS, msg);
+ uint32_t msg_id;
+ // Only supports RSA-1024-0x10001 and ECDSA-P256.
+ if (should_fail_) {
+ msg_id = MSG_FAILURE;
+ } else if (key_params.type() == rtc::KT_RSA) {
+ RTC_DCHECK(key_params.rsa_params().mod_size == 1024 &&
+ key_params.rsa_params().pub_exp == 0x10001);
+ msg_id = MSG_SUCCESS_RSA;
+ } else {
+ RTC_DCHECK(key_params.type() == rtc::KT_ECDSA &&
+ key_params.ec_curve() == rtc::EC_NIST_P256);
+ msg_id = MSG_SUCCESS_ECDSA;
+ }
+ rtc::Thread::Current()->Post(this, msg_id, msg);
}
static rtc::scoped_refptr<rtc::RTCCertificate> GenerateCertificate() {
- std::string cert;
- std::string key;
- rtc::SSLIdentity::PemToDer("CERTIFICATE", kKeysAndCerts[0].cert_pem, &cert);
- rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY",
- kKeysAndCerts[0].rsa_private_key_pem, &key);
-
- std::string pem_cert = rtc::SSLIdentity::DerToPem(
- rtc::kPemTypeCertificate,
- reinterpret_cast<const unsigned char*>(cert.data()),
- cert.length());
- std::string pem_key = rtc::SSLIdentity::DerToPem(
- rtc::kPemTypeRsaPrivateKey,
- reinterpret_cast<const unsigned char*>(key.data()),
- key.length());
- rtc::scoped_ptr<rtc::SSLIdentity> identity(
- rtc::SSLIdentity::FromPEMStrings(pem_key, pem_cert));
-
+ std::unique_ptr<rtc::SSLIdentity> identity;
+ switch (rtc::KT_DEFAULT) {
+ case rtc::KT_RSA:
+ identity.reset(
+ rtc::SSLIdentity::FromPEMStrings(kRsaPems[0].private_key(),
+ kRsaPems[0].certificate()));
+ break;
+ case rtc::KT_ECDSA:
+ identity.reset(
+ rtc::SSLIdentity::FromPEMStrings(kEcdsaPems[0].private_key(),
+ kEcdsaPems[0].certificate()));
+ break;
+ default:
+ RTC_NOTREACHED();
+ }
return rtc::RTCCertificate::Create(std::move(identity));
}
private:
enum {
- MSG_SUCCESS,
+ MSG_SUCCESS_RSA,
+ MSG_SUCCESS_ECDSA,
MSG_FAILURE,
};
- const char* get_key() {
- return kKeysAndCerts[key_index_].rsa_private_key_pem;
+ const rtc::RTCCertificatePEM& get_pem(const rtc::KeyType& key_type) const {
+ switch (key_type) {
+ case rtc::KT_RSA:
+ return kRsaPems[key_index_];
+ case rtc::KT_ECDSA:
+ return kEcdsaPems[key_index_];
+ default:
+ RTC_NOTREACHED();
+ return kEcdsaPems[key_index_];
+ }
+ }
+ const std::string& get_key(const rtc::KeyType& key_type) const {
+ return get_pem(key_type).private_key();
+ }
+ const std::string& get_cert(const rtc::KeyType& key_type) const {
+ return get_pem(key_type).certificate();
}
- const char* get_cert() { return kKeysAndCerts[key_index_].cert_pem; }
// rtc::MessageHandler implementation.
- void OnMessage(rtc::Message* msg) {
+ void OnMessage(rtc::Message* msg) override {
MessageData* message_data = static_cast<MessageData*>(msg->pdata);
rtc::scoped_refptr<webrtc::DtlsIdentityRequestObserver> observer =
message_data->data();
switch (msg->message_id) {
- case MSG_SUCCESS: {
- std::string cert;
- std::string key;
- rtc::SSLIdentity::PemToDer("CERTIFICATE", get_cert(), &cert);
- rtc::SSLIdentity::PemToDer("RSA PRIVATE KEY", get_key(), &key);
- observer->OnSuccess(cert, key);
+ case MSG_SUCCESS_RSA:
+ case MSG_SUCCESS_ECDSA: {
+ rtc::KeyType key_type =
+ msg->message_id == MSG_SUCCESS_RSA ? rtc::KT_RSA : rtc::KT_ECDSA;
+ std::unique_ptr<rtc::SSLIdentity> identity(
+ rtc::SSLIdentity::FromPEMStrings(get_key(key_type),
+ get_cert(key_type)));
+ observer->OnSuccess(std::move(identity));
break;
}
case MSG_FAILURE:
diff --git a/chromium/third_party/webrtc/api/test/fakevideotrackrenderer.h b/chromium/third_party/webrtc/api/test/fakevideotrackrenderer.h
index 3bd3d0b0005..0d9b2488ab5 100644
--- a/chromium/third_party/webrtc/api/test/fakevideotrackrenderer.h
+++ b/chromium/third_party/webrtc/api/test/fakevideotrackrenderer.h
@@ -16,8 +16,7 @@
namespace webrtc {
-class FakeVideoTrackRenderer
- : public rtc::VideoSinkInterface<cricket::VideoFrame> {
+class FakeVideoTrackRenderer : public cricket::FakeVideoRenderer {
public:
FakeVideoTrackRenderer(VideoTrackInterface* video_track)
: video_track_(video_track) {
@@ -25,22 +24,7 @@ class FakeVideoTrackRenderer
}
~FakeVideoTrackRenderer() { video_track_->RemoveSink(this); }
- virtual void OnFrame(const cricket::VideoFrame& video_frame) override {
- fake_renderer_.OnFrame(video_frame);
- }
-
- int errors() const { return fake_renderer_.errors(); }
- int width() const { return fake_renderer_.width(); }
- int height() const { return fake_renderer_.height(); }
- webrtc::VideoRotation rotation() const { return fake_renderer_.rotation(); }
- bool black_frame() const { return fake_renderer_.black_frame(); }
-
- int num_rendered_frames() const {
- return fake_renderer_.num_rendered_frames();
- }
-
private:
- cricket::FakeVideoRenderer fake_renderer_;
rtc::scoped_refptr<VideoTrackInterface> video_track_;
};
diff --git a/chromium/third_party/webrtc/api/test/fakevideotracksource.h b/chromium/third_party/webrtc/api/test/fakevideotracksource.h
index 0b70a56b4e4..1cb264b736d 100644
--- a/chromium/third_party/webrtc/api/test/fakevideotracksource.h
+++ b/chromium/third_party/webrtc/api/test/fakevideotracksource.h
@@ -30,7 +30,6 @@ class FakeVideoTrackSource : public VideoTrackSource {
protected:
FakeVideoTrackSource()
: VideoTrackSource(&fake_video_capturer_,
- rtc::Thread::Current(),
false /* remote */) {}
virtual ~FakeVideoTrackSource() {}
diff --git a/chromium/third_party/webrtc/api/test/mockpeerconnectionobservers.h b/chromium/third_party/webrtc/api/test/mockpeerconnectionobservers.h
index bd593c2ab52..39a8f0134d6 100644
--- a/chromium/third_party/webrtc/api/test/mockpeerconnectionobservers.h
+++ b/chromium/third_party/webrtc/api/test/mockpeerconnectionobservers.h
@@ -13,6 +13,7 @@
#ifndef WEBRTC_API_TEST_MOCKPEERCONNECTIONOBSERVERS_H_
#define WEBRTC_API_TEST_MOCKPEERCONNECTIONOBSERVERS_H_
+#include <memory>
#include <string>
#include "webrtc/api/datachannelinterface.h"
@@ -44,7 +45,7 @@ class MockCreateSessionDescriptionObserver
private:
bool called_;
bool result_;
- rtc::scoped_ptr<SessionDescriptionInterface> desc_;
+ std::unique_ptr<SessionDescriptionInterface> desc_;
};
class MockSetSessionDescriptionObserver
diff --git a/chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.cc b/chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.cc
index 2ff9b5d0092..ed8e031afc3 100644
--- a/chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.cc
+++ b/chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.cc
@@ -15,7 +15,7 @@
#include "webrtc/api/test/mockpeerconnectionobservers.h"
#include "webrtc/api/test/peerconnectiontestwrapper.h"
#include "webrtc/base/gunit.h"
-#include "webrtc/p2p/client/fakeportallocator.h"
+#include "webrtc/p2p/base/fakeportallocator.h"
static const char kStreamLabelBase[] = "stream_label";
static const char kVideoTrackLabelBase[] = "video_track";
@@ -47,15 +47,20 @@ void PeerConnectionTestWrapper::Connect(PeerConnectionTestWrapper* caller,
caller, &PeerConnectionTestWrapper::ReceiveAnswerSdp);
}
-PeerConnectionTestWrapper::PeerConnectionTestWrapper(const std::string& name)
- : name_(name) {}
+PeerConnectionTestWrapper::PeerConnectionTestWrapper(
+ const std::string& name,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread)
+ : name_(name),
+ network_thread_(network_thread),
+ worker_thread_(worker_thread) {}
PeerConnectionTestWrapper::~PeerConnectionTestWrapper() {}
bool PeerConnectionTestWrapper::CreatePc(
const MediaConstraintsInterface* constraints) {
- rtc::scoped_ptr<cricket::PortAllocator> port_allocator(
- new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+ std::unique_ptr<cricket::PortAllocator> port_allocator(
+ new cricket::FakePortAllocator(network_thread_, nullptr));
fake_audio_capture_module_ = FakeAudioCaptureModule::Create();
if (fake_audio_capture_module_ == NULL) {
@@ -63,7 +68,7 @@ bool PeerConnectionTestWrapper::CreatePc(
}
peer_connection_factory_ = webrtc::CreatePeerConnectionFactory(
- rtc::Thread::Current(), rtc::Thread::Current(),
+ network_thread_, worker_thread_, rtc::Thread::Current(),
fake_audio_capture_module_, NULL, NULL);
if (!peer_connection_factory_) {
return false;
@@ -74,9 +79,9 @@ bool PeerConnectionTestWrapper::CreatePc(
webrtc::PeerConnectionInterface::IceServer ice_server;
ice_server.uri = "stun:stun.l.google.com:19302";
config.servers.push_back(ice_server);
- rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store(
- rtc::SSLStreamAdapter::HaveDtlsSrtp() ?
- new FakeDtlsIdentityStore() : nullptr);
+ std::unique_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store(
+ rtc::SSLStreamAdapter::HaveDtlsSrtp() ? new FakeDtlsIdentityStore()
+ : nullptr);
peer_connection_ = peer_connection_factory_->CreatePeerConnection(
config, constraints, std::move(port_allocator),
std::move(dtls_identity_store), this);
@@ -117,7 +122,7 @@ void PeerConnectionTestWrapper::OnDataChannel(
void PeerConnectionTestWrapper::OnSuccess(SessionDescriptionInterface* desc) {
// This callback should take the ownership of |desc|.
- rtc::scoped_ptr<SessionDescriptionInterface> owned_desc(desc);
+ std::unique_ptr<SessionDescriptionInterface> owned_desc(desc);
std::string sdp;
EXPECT_TRUE(desc->ToString(&sdp));
@@ -182,7 +187,7 @@ void PeerConnectionTestWrapper::SetRemoteDescription(const std::string& type,
void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid,
int sdp_mline_index,
const std::string& candidate) {
- rtc::scoped_ptr<webrtc::IceCandidateInterface> owned_candidate(
+ std::unique_ptr<webrtc::IceCandidateInterface> owned_candidate(
webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, candidate, NULL));
EXPECT_TRUE(peer_connection_->AddIceCandidate(owned_candidate.get()));
}
diff --git a/chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.h b/chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.h
index 25510b2de07..3272366c762 100644
--- a/chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.h
+++ b/chromium/third_party/webrtc/api/test/peerconnectiontestwrapper.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_API_TEST_PEERCONNECTIONTESTWRAPPER_H_
#define WEBRTC_API_TEST_PEERCONNECTIONTESTWRAPPER_H_
+#include <memory>
+
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/api/test/fakeaudiocapturemodule.h"
#include "webrtc/api/test/fakeconstraints.h"
@@ -25,7 +27,9 @@ class PeerConnectionTestWrapper
static void Connect(PeerConnectionTestWrapper* caller,
PeerConnectionTestWrapper* callee);
- explicit PeerConnectionTestWrapper(const std::string& name);
+ PeerConnectionTestWrapper(const std::string& name,
+ rtc::Thread* network_thread,
+ rtc::Thread* worker_thread);
virtual ~PeerConnectionTestWrapper();
bool CreatePc(const webrtc::MediaConstraintsInterface* constraints);
@@ -88,11 +92,13 @@ class PeerConnectionTestWrapper
bool video, const webrtc::FakeConstraints& video_constraints);
std::string name_;
+ rtc::Thread* const network_thread_;
+ rtc::Thread* const worker_thread_;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
peer_connection_factory_;
rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module_;
- rtc::scoped_ptr<webrtc::FakeVideoTrackRenderer> renderer_;
+ std::unique_ptr<webrtc::FakeVideoTrackRenderer> renderer_;
};
#endif // WEBRTC_API_TEST_PEERCONNECTIONTESTWRAPPER_H_
diff --git a/chromium/third_party/webrtc/api/videocapturertracksource.cc b/chromium/third_party/webrtc/api/videocapturertracksource.cc
index cb539614be7..b99a2d1edaf 100644
--- a/chromium/third_party/webrtc/api/videocapturertracksource.cc
+++ b/chromium/third_party/webrtc/api/videocapturertracksource.cc
@@ -286,8 +286,9 @@ VideoCapturerTrackSource::VideoCapturerTrackSource(
rtc::Thread* worker_thread,
cricket::VideoCapturer* capturer,
bool remote)
- : VideoTrackSource(capturer, worker_thread, remote),
+ : VideoTrackSource(capturer, remote),
signaling_thread_(rtc::Thread::Current()),
+ worker_thread_(worker_thread),
video_capturer_(capturer),
started_(false) {
video_capturer_->SignalStateChange.connect(
@@ -350,7 +351,7 @@ void VideoCapturerTrackSource::Initialize(
format_ = GetBestCaptureFormat(formats);
// Start the camera with our best guess.
- if (!worker_thread()->Invoke<bool>(
+ if (!worker_thread_->Invoke<bool>(
rtc::Bind(&cricket::VideoCapturer::StartCapturing,
video_capturer_.get(), format_))) {
SetState(kEnded);
@@ -370,7 +371,7 @@ void VideoCapturerTrackSource::Stop() {
return;
}
started_ = false;
- worker_thread()->Invoke<void>(
+ worker_thread_->Invoke<void>(
rtc::Bind(&cricket::VideoCapturer::Stop, video_capturer_.get()));
}
@@ -378,7 +379,7 @@ void VideoCapturerTrackSource::Restart() {
if (started_) {
return;
}
- if (!worker_thread()->Invoke<bool>(
+ if (!worker_thread_->Invoke<bool>(
rtc::Bind(&cricket::VideoCapturer::StartCapturing,
video_capturer_.get(), format_))) {
SetState(kEnded);
diff --git a/chromium/third_party/webrtc/api/videocapturertracksource.h b/chromium/third_party/webrtc/api/videocapturertracksource.h
index 0d1142debad..92f00dc4e5a 100644
--- a/chromium/third_party/webrtc/api/videocapturertracksource.h
+++ b/chromium/third_party/webrtc/api/videocapturertracksource.h
@@ -11,10 +11,11 @@
#ifndef WEBRTC_API_VIDEOCAPTURERTRACKSOURCE_H_
#define WEBRTC_API_VIDEOCAPTURERTRACKSOURCE_H_
+#include <memory>
+
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/videotracksource.h"
#include "webrtc/base/asyncinvoker.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/base/videocommon.h"
@@ -47,10 +48,6 @@ class VideoCapturerTrackSource : public VideoTrackSource,
cricket::VideoCapturer* capturer,
bool remote);
- cricket::VideoCapturer* GetVideoCapturer() override {
- return video_capturer_.get();
- }
-
bool is_screencast() const override {
return video_capturer_->IsScreencast();
}
@@ -75,8 +72,9 @@ class VideoCapturerTrackSource : public VideoTrackSource,
cricket::CaptureState capture_state);
rtc::Thread* signaling_thread_;
+ rtc::Thread* worker_thread_;
rtc::AsyncInvoker invoker_;
- rtc::scoped_ptr<cricket::VideoCapturer> video_capturer_;
+ std::unique_ptr<cricket::VideoCapturer> video_capturer_;
bool started_;
cricket::VideoFormat format_;
rtc::Optional<bool> needs_denoising_;
diff --git a/chromium/third_party/webrtc/api/videocapturertracksource_unittest.cc b/chromium/third_party/webrtc/api/videocapturertracksource_unittest.cc
index 39252934ebf..90d2cd2563d 100644
--- a/chromium/third_party/webrtc/api/videocapturertracksource_unittest.cc
+++ b/chromium/third_party/webrtc/api/videocapturertracksource_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include <vector>
@@ -109,7 +110,7 @@ class VideoCapturerTrackSourceTest : public testing::Test {
protected:
VideoCapturerTrackSourceTest() { InitCapturer(false); }
void InitCapturer(bool is_screencast) {
- capturer_cleanup_ = rtc::scoped_ptr<TestVideoCapturer>(
+ capturer_cleanup_ = std::unique_ptr<TestVideoCapturer>(
new TestVideoCapturer(is_screencast));
capturer_ = capturer_cleanup_.get();
}
@@ -126,17 +127,16 @@ class VideoCapturerTrackSourceTest : public testing::Test {
constraints, false);
ASSERT_TRUE(source_.get() != NULL);
- EXPECT_EQ(capturer_, source_->GetVideoCapturer());
state_observer_.reset(new StateObserver(source_));
source_->RegisterObserver(state_observer_.get());
source_->AddOrUpdateSink(&renderer_, rtc::VideoSinkWants());
}
- rtc::scoped_ptr<TestVideoCapturer> capturer_cleanup_;
+ std::unique_ptr<TestVideoCapturer> capturer_cleanup_;
TestVideoCapturer* capturer_;
cricket::FakeVideoRenderer renderer_;
- rtc::scoped_ptr<StateObserver> state_observer_;
+ std::unique_ptr<StateObserver> state_observer_;
rtc::scoped_refptr<VideoTrackSourceInterface> source_;
};
diff --git a/chromium/third_party/webrtc/api/videosourceproxy.h b/chromium/third_party/webrtc/api/videosourceproxy.h
index f43c0db69f2..6d4dfcb8053 100644
--- a/chromium/third_party/webrtc/api/videosourceproxy.h
+++ b/chromium/third_party/webrtc/api/videosourceproxy.h
@@ -21,21 +21,22 @@ namespace webrtc {
// destroyed on the signaling thread and marshals all method calls to the
// signaling thread.
BEGIN_PROXY_MAP(VideoTrackSource)
-PROXY_CONSTMETHOD0(SourceState, state)
-PROXY_CONSTMETHOD0(bool, remote)
-PROXY_METHOD0(cricket::VideoCapturer*, GetVideoCapturer)
-PROXY_METHOD0(void, Stop)
-PROXY_METHOD0(void, Restart)
-PROXY_CONSTMETHOD0(bool, is_screencast)
-PROXY_CONSTMETHOD0(rtc::Optional<bool>, needs_denoising)
-PROXY_METHOD1(bool, GetStats, Stats*)
-PROXY_METHOD2(void,
- AddOrUpdateSink,
- rtc::VideoSinkInterface<cricket::VideoFrame>*,
- const rtc::VideoSinkWants&)
-PROXY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<cricket::VideoFrame>*)
-PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
-PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
+ PROXY_CONSTMETHOD0(SourceState, state)
+ PROXY_CONSTMETHOD0(bool, remote)
+ PROXY_METHOD0(void, Stop)
+ PROXY_METHOD0(void, Restart)
+ PROXY_CONSTMETHOD0(bool, is_screencast)
+ PROXY_CONSTMETHOD0(rtc::Optional<bool>, needs_denoising)
+ PROXY_METHOD1(bool, GetStats, Stats*)
+ PROXY_WORKER_METHOD2(void,
+ AddOrUpdateSink,
+ rtc::VideoSinkInterface<cricket::VideoFrame>*,
+ const rtc::VideoSinkWants&)
+ PROXY_WORKER_METHOD1(void,
+ RemoveSink,
+ rtc::VideoSinkInterface<cricket::VideoFrame>*)
+ PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
+ PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY()
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/videotrack.cc b/chromium/third_party/webrtc/api/videotrack.cc
index bd380254b2d..234b4cf7ae9 100644
--- a/chromium/third_party/webrtc/api/videotrack.cc
+++ b/chromium/third_party/webrtc/api/videotrack.cc
@@ -20,6 +20,7 @@ VideoTrack::VideoTrack(const std::string& label,
VideoTrackSourceInterface* video_source)
: MediaStreamTrack<VideoTrackInterface>(label),
video_source_(video_source) {
+ worker_thread_checker_.DetachFromThread();
video_source_->RegisterObserver(this);
}
@@ -31,10 +32,12 @@ std::string VideoTrack::kind() const {
return kVideoKind;
}
+// AddOrUpdateSink and RemoveSink should be called on the worker
+// thread.
void VideoTrack::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
VideoSourceBase::AddOrUpdateSink(sink, wants);
rtc::VideoSinkWants modified_wants = wants;
modified_wants.black_frames = !enabled();
@@ -43,23 +46,25 @@ void VideoTrack::AddOrUpdateSink(
void VideoTrack::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
VideoSourceBase::RemoveSink(sink);
video_source_->RemoveSink(sink);
}
bool VideoTrack::set_enabled(bool enable) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(signaling_thread_checker_.CalledOnValidThread());
for (auto& sink_pair : sink_pairs()) {
rtc::VideoSinkWants modified_wants = sink_pair.wants;
modified_wants.black_frames = !enable;
+ // video_source_ is a proxy object, marshalling the call to the
+ // worker thread.
video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants);
}
return MediaStreamTrack<VideoTrackInterface>::set_enabled(enable);
}
void VideoTrack::OnChanged() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(signaling_thread_checker_.CalledOnValidThread());
if (video_source_->state() == MediaSourceInterface::kEnded) {
set_state(kEnded);
} else {
diff --git a/chromium/third_party/webrtc/api/videotrack.h b/chromium/third_party/webrtc/api/videotrack.h
index 3835d2c936e..60a0a64aec7 100644
--- a/chromium/third_party/webrtc/api/videotrack.h
+++ b/chromium/third_party/webrtc/api/videotrack.h
@@ -33,11 +33,11 @@ class VideoTrack : public MediaStreamTrack<VideoTrackInterface>,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
- virtual VideoTrackSourceInterface* GetSource() const {
+ VideoTrackSourceInterface* GetSource() const override {
return video_source_.get();
}
- virtual bool set_enabled(bool enable);
- virtual std::string kind() const;
+ bool set_enabled(bool enable) override;
+ std::string kind() const override;
protected:
VideoTrack(const std::string& id, VideoTrackSourceInterface* video_source);
@@ -47,7 +47,8 @@ class VideoTrack : public MediaStreamTrack<VideoTrackInterface>,
// Implements ObserverInterface. Observes |video_source_| state.
void OnChanged() override;
- rtc::ThreadChecker thread_checker_;
+ rtc::ThreadChecker signaling_thread_checker_;
+ rtc::ThreadChecker worker_thread_checker_;
rtc::scoped_refptr<VideoTrackSourceInterface> video_source_;
};
diff --git a/chromium/third_party/webrtc/api/videotrack_unittest.cc b/chromium/third_party/webrtc/api/videotrack_unittest.cc
index d35bcdb2a03..0b67c77829c 100644
--- a/chromium/third_party/webrtc/api/videotrack_unittest.cc
+++ b/chromium/third_party/webrtc/api/videotrack_unittest.cc
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/api/test/fakevideotrackrenderer.h"
#include "webrtc/api/videocapturertracksource.h"
#include "webrtc/api/videotrack.h"
#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/media/base/fakevideocapturer.h"
#include "webrtc/media/base/fakemediaengine.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
@@ -31,7 +31,7 @@ class VideoTrackTest : public testing::Test {
VideoTrackTest() {
static const char kVideoTrackId[] = "track_id";
video_track_source_ = new rtc::RefCountedObject<VideoTrackSource>(
- &capturer_, rtc::Thread::Current(), true /* remote */);
+ &capturer_, true /* remote */);
video_track_ = VideoTrack::Create(kVideoTrackId, video_track_source_);
capturer_.Start(
cricket::VideoFormat(640, 480, cricket::VideoFormat::FpsToInterval(30),
@@ -55,14 +55,14 @@ TEST_F(VideoTrackTest, SourceStateChangeTrackState) {
// frames to the source.
TEST_F(VideoTrackTest, RenderVideo) {
// FakeVideoTrackRenderer register itself to |video_track_|
- rtc::scoped_ptr<FakeVideoTrackRenderer> renderer_1(
+ std::unique_ptr<FakeVideoTrackRenderer> renderer_1(
new FakeVideoTrackRenderer(video_track_.get()));
capturer_.CaptureFrame();
EXPECT_EQ(1, renderer_1->num_rendered_frames());
// FakeVideoTrackRenderer register itself to |video_track_|
- rtc::scoped_ptr<FakeVideoTrackRenderer> renderer_2(
+ std::unique_ptr<FakeVideoTrackRenderer> renderer_2(
new FakeVideoTrackRenderer(video_track_.get()));
capturer_.CaptureFrame();
EXPECT_EQ(2, renderer_1->num_rendered_frames());
@@ -75,7 +75,7 @@ TEST_F(VideoTrackTest, RenderVideo) {
// Test that disabling the track results in blacked out frames.
TEST_F(VideoTrackTest, DisableTrackBlackout) {
- rtc::scoped_ptr<FakeVideoTrackRenderer> renderer(
+ std::unique_ptr<FakeVideoTrackRenderer> renderer(
new FakeVideoTrackRenderer(video_track_.get()));
capturer_.CaptureFrame();
diff --git a/chromium/third_party/webrtc/api/videotracksource.cc b/chromium/third_party/webrtc/api/videotracksource.cc
index f8212d7a70c..17d32fbc710 100644
--- a/chromium/third_party/webrtc/api/videotracksource.cc
+++ b/chromium/third_party/webrtc/api/videotracksource.cc
@@ -12,18 +12,14 @@
#include <string>
-#include "webrtc/base/bind.h"
-
namespace webrtc {
VideoTrackSource::VideoTrackSource(
rtc::VideoSourceInterface<cricket::VideoFrame>* source,
- rtc::Thread* worker_thread,
bool remote)
- : source_(source),
- worker_thread_(worker_thread),
- state_(kInitializing),
- remote_(remote) {}
+ : source_(source), state_(kInitializing), remote_(remote) {
+ worker_thread_checker_.DetachFromThread();
+}
void VideoTrackSource::SetState(SourceState new_state) {
if (state_ != new_state) {
@@ -39,22 +35,20 @@ void VideoTrackSource::OnSourceDestroyed() {
void VideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (!source_) {
return;
}
- worker_thread_->Invoke<void>(rtc::Bind(
- &rtc::VideoSourceInterface<cricket::VideoFrame>::AddOrUpdateSink, source_,
- sink, wants));
+ source_->AddOrUpdateSink(sink, wants);
}
void VideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (!source_) {
return;
}
- worker_thread_->Invoke<void>(
- rtc::Bind(&rtc::VideoSourceInterface<cricket::VideoFrame>::RemoveSink,
- source_, sink));
+ source_->RemoveSink(sink);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/api/videotracksource.h b/chromium/third_party/webrtc/api/videotracksource.h
index 108209dc2c1..10e24ab27ed 100644
--- a/chromium/third_party/webrtc/api/videotracksource.h
+++ b/chromium/third_party/webrtc/api/videotracksource.h
@@ -13,6 +13,7 @@
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/notifier.h"
+#include "webrtc/base/thread_checker.h"
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/media/base/videosinkinterface.h"
@@ -22,7 +23,6 @@ namespace webrtc {
class VideoTrackSource : public Notifier<VideoTrackSourceInterface> {
public:
VideoTrackSource(rtc::VideoSourceInterface<cricket::VideoFrame>* source,
- rtc::Thread* worker_thread,
bool remote);
void SetState(SourceState new_state);
// OnSourceDestroyed clears this instance pointer to |source_|. It is useful
@@ -36,8 +36,8 @@ class VideoTrackSource : public Notifier<VideoTrackSourceInterface> {
void Stop() override{};
void Restart() override{};
- virtual bool is_screencast() const { return false; }
- virtual rtc::Optional<bool> needs_denoising() const {
+ bool is_screencast() const override { return false; }
+ rtc::Optional<bool> needs_denoising() const override {
return rtc::Optional<bool>(); }
bool GetStats(Stats* stats) override { return false; }
@@ -46,14 +46,9 @@ class VideoTrackSource : public Notifier<VideoTrackSourceInterface> {
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
- cricket::VideoCapturer* GetVideoCapturer() override { return nullptr; }
-
- protected:
- rtc::Thread* worker_thread() { return worker_thread_; }
-
private:
+ rtc::ThreadChecker worker_thread_checker_;
rtc::VideoSourceInterface<cricket::VideoFrame>* source_;
- rtc::Thread* worker_thread_;
cricket::VideoOptions options_;
SourceState state_;
const bool remote_;
diff --git a/chromium/third_party/webrtc/api/webrtcsdp.cc b/chromium/third_party/webrtc/api/webrtcsdp.cc
index 85fecf8bd8f..b86d9038ddc 100644
--- a/chromium/third_party/webrtc/api/webrtcsdp.cc
+++ b/chromium/third_party/webrtc/api/webrtcsdp.cc
@@ -13,8 +13,11 @@
#include <ctype.h>
#include <limits.h>
#include <stdio.h>
+
#include <algorithm>
+#include <memory>
#include <string>
+#include <unordered_map>
#include <vector>
#include "webrtc/api/jsepicecandidate.h"
@@ -269,7 +272,7 @@ static bool ParseContent(const std::string& message,
const MediaType media_type,
int mline_index,
const std::string& protocol,
- const std::vector<int>& codec_preference,
+ const std::vector<int>& payload_types,
size_t* pos,
std::string* content_name,
MediaContentDescription* media_desc,
@@ -287,7 +290,7 @@ static bool ParseCryptoAttribute(const std::string& line,
SdpParseError* error);
static bool ParseRtpmapAttribute(const std::string& line,
const MediaType media_type,
- const std::vector<int>& codec_preference,
+ const std::vector<int>& payload_types,
MediaContentDescription* media_desc,
SdpParseError* error);
static bool ParseFmtpAttributes(const std::string& line,
@@ -1653,9 +1656,8 @@ bool AddSctpDataCodec(DataContentDescription* media_desc,
NULL);
}
// Add the SCTP Port number as a pseudo-codec "port" parameter
- cricket::DataCodec codec_port(
- cricket::kGoogleSctpDataCodecId, cricket::kGoogleSctpDataCodecName,
- 0);
+ cricket::DataCodec codec_port(cricket::kGoogleSctpDataCodecId,
+ cricket::kGoogleSctpDataCodecName);
codec_port.SetParam(cricket::kCodecParamPort, sctp_port);
LOG(INFO) << "AddSctpDataCodec: Got SCTP Port Number "
<< sctp_port;
@@ -2181,9 +2183,8 @@ void MaybeCreateStaticPayloadAudioCodecs(
if (!media_desc) {
return;
}
- int preference = static_cast<int>(fmts.size());
+ RTC_DCHECK(media_desc->codecs().empty());
std::vector<int>::const_iterator it = fmts.begin();
- bool add_new_codec = false;
for (; it != fmts.end(); ++it) {
int payload_type = *it;
if (!media_desc->HasCodec(payload_type) &&
@@ -2193,14 +2194,8 @@ void MaybeCreateStaticPayloadAudioCodecs(
int clock_rate = kStaticPayloadAudioCodecs[payload_type].clockrate;
size_t channels = kStaticPayloadAudioCodecs[payload_type].channels;
media_desc->AddCodec(cricket::AudioCodec(payload_type, encoding_name,
- clock_rate, 0, channels,
- preference));
- add_new_codec = true;
+ clock_rate, 0, channels));
}
- --preference;
- }
- if (add_new_codec) {
- media_desc->SortCodecs();
}
}
@@ -2209,7 +2204,7 @@ static C* ParseContentDescription(const std::string& message,
const MediaType media_type,
int mline_index,
const std::string& protocol,
- const std::vector<int>& codec_preference,
+ const std::vector<int>& payload_types,
size_t* pos,
std::string* content_name,
TransportDescription* transport,
@@ -2230,14 +2225,28 @@ static C* ParseContentDescription(const std::string& message,
ASSERT(false);
break;
}
- if (!ParseContent(message, media_type, mline_index, protocol,
- codec_preference, pos, content_name,
- media_desc, transport, candidates, error)) {
+ if (!ParseContent(message, media_type, mline_index, protocol, payload_types,
+ pos, content_name, media_desc, transport, candidates,
+ error)) {
delete media_desc;
return NULL;
}
// Sort the codecs according to the m-line fmt list.
- media_desc->SortCodecs();
+ std::unordered_map<int, int> payload_type_preferences;
+ // "size + 1" so that the lowest preference payload type has a preference of
+ // 1, which is greater than the default (0) for payload types not in the fmt
+ // list.
+ int preference = static_cast<int>(payload_types.size() + 1);
+ for (int pt : payload_types) {
+ payload_type_preferences[pt] = preference--;
+ }
+ std::vector<typename C::CodecType> codecs = media_desc->codecs();
+ std::sort(codecs.begin(), codecs.end(), [&payload_type_preferences](
+ const typename C::CodecType& a,
+ const typename C::CodecType& b) {
+ return payload_type_preferences[a.id] > payload_type_preferences[b.id];
+ });
+ media_desc->set_codecs(codecs);
return media_desc;
}
@@ -2276,7 +2285,7 @@ bool ParseMediaDescription(const std::string& message,
std::string protocol = fields[2];
// <fmt>
- std::vector<int> codec_preference;
+ std::vector<int> payload_types;
if (IsRtp(protocol)) {
for (size_t j = 3 ; j < fields.size(); ++j) {
// TODO(wu): Remove when below bug is fixed.
@@ -2289,7 +2298,7 @@ bool ParseMediaDescription(const std::string& message,
if (!GetPayloadTypeFromString(line, fields[j], &pl, error)) {
return false;
}
- codec_preference.push_back(pl);
+ payload_types.push_back(pl);
}
}
@@ -2300,24 +2309,21 @@ bool ParseMediaDescription(const std::string& message,
session_td.ice_mode, session_td.connection_role,
session_td.identity_fingerprint.get());
- rtc::scoped_ptr<MediaContentDescription> content;
+ std::unique_ptr<MediaContentDescription> content;
std::string content_name;
if (HasAttribute(line, kMediaTypeVideo)) {
content.reset(ParseContentDescription<VideoContentDescription>(
- message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol,
- codec_preference, pos, &content_name,
- &transport, candidates, error));
+ message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol,
+ payload_types, pos, &content_name, &transport, candidates, error));
} else if (HasAttribute(line, kMediaTypeAudio)) {
content.reset(ParseContentDescription<AudioContentDescription>(
- message, cricket::MEDIA_TYPE_AUDIO, mline_index, protocol,
- codec_preference, pos, &content_name,
- &transport, candidates, error));
+ message, cricket::MEDIA_TYPE_AUDIO, mline_index, protocol,
+ payload_types, pos, &content_name, &transport, candidates, error));
} else if (HasAttribute(line, kMediaTypeData)) {
DataContentDescription* data_desc =
ParseContentDescription<DataContentDescription>(
- message, cricket::MEDIA_TYPE_DATA, mline_index, protocol,
- codec_preference, pos, &content_name,
- &transport, candidates, error);
+ message, cricket::MEDIA_TYPE_DATA, mline_index, protocol,
+ payload_types, pos, &content_name, &transport, candidates, error);
content.reset(data_desc);
int p;
@@ -2524,7 +2530,7 @@ bool ParseContent(const std::string& message,
const MediaType media_type,
int mline_index,
const std::string& protocol,
- const std::vector<int>& codec_preference,
+ const std::vector<int>& payload_types,
size_t* pos,
std::string* content_name,
MediaContentDescription* media_desc,
@@ -2537,7 +2543,7 @@ bool ParseContent(const std::string& message,
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
MaybeCreateStaticPayloadAudioCodecs(
- codec_preference, static_cast<AudioContentDescription*>(media_desc));
+ payload_types, static_cast<AudioContentDescription*>(media_desc));
}
// The media level "ice-ufrag" and "ice-pwd".
@@ -2672,8 +2678,8 @@ bool ParseContent(const std::string& message,
return false;
}
} else if (HasAttribute(line, kAttributeRtpmap)) {
- if (!ParseRtpmapAttribute(line, media_type, codec_preference,
- media_desc, error)) {
+ if (!ParseRtpmapAttribute(line, media_type, payload_types, media_desc,
+ error)) {
return false;
}
} else if (HasAttribute(line, kCodecParamMaxPTime)) {
@@ -2929,9 +2935,12 @@ bool ParseCryptoAttribute(const std::string& line,
}
// Updates or creates a new codec entry in the audio description with according
-// to |name|, |clockrate|, |bitrate|, |channels| and |preference|.
-void UpdateCodec(int payload_type, const std::string& name, int clockrate,
- int bitrate, size_t channels, int preference,
+// to |name|, |clockrate|, |bitrate|, and |channels|.
+void UpdateCodec(int payload_type,
+ const std::string& name,
+ int clockrate,
+ int bitrate,
+ size_t channels,
AudioContentDescription* audio_desc) {
// Codec may already be populated with (only) optional parameters
// (from an fmtp).
@@ -2941,15 +2950,17 @@ void UpdateCodec(int payload_type, const std::string& name, int clockrate,
codec.clockrate = clockrate;
codec.bitrate = bitrate;
codec.channels = channels;
- codec.preference = preference;
AddOrReplaceCodec<AudioContentDescription, cricket::AudioCodec>(audio_desc,
codec);
}
// Updates or creates a new codec entry in the video description according to
-// |name|, |width|, |height|, |framerate| and |preference|.
-void UpdateCodec(int payload_type, const std::string& name, int width,
- int height, int framerate, int preference,
+// |name|, |width|, |height|, and |framerate|.
+void UpdateCodec(int payload_type,
+ const std::string& name,
+ int width,
+ int height,
+ int framerate,
VideoContentDescription* video_desc) {
// Codec may already be populated with (only) optional parameters
// (from an fmtp).
@@ -2959,14 +2970,13 @@ void UpdateCodec(int payload_type, const std::string& name, int width,
codec.width = width;
codec.height = height;
codec.framerate = framerate;
- codec.preference = preference;
AddOrReplaceCodec<VideoContentDescription, cricket::VideoCodec>(video_desc,
codec);
}
bool ParseRtpmapAttribute(const std::string& line,
const MediaType media_type,
- const std::vector<int>& codec_preference,
+ const std::vector<int>& payload_types,
MediaContentDescription* media_desc,
SdpParseError* error) {
std::vector<std::string> fields;
@@ -2988,12 +2998,8 @@ bool ParseRtpmapAttribute(const std::string& line,
return false;
}
- // Set the preference order depending on the order of the pl type in the
- // <fmt> of the m-line.
- const int preference = codec_preference.end() -
- std::find(codec_preference.begin(), codec_preference.end(),
- payload_type);
- if (preference == 0) {
+ if (std::find(payload_types.begin(), payload_types.end(), payload_type) ==
+ payload_types.end()) {
LOG(LS_WARNING) << "Ignore rtpmap line that did not appear in the "
<< "<fmt> of the m-line: " << line;
return true;
@@ -3023,7 +3029,7 @@ bool ParseRtpmapAttribute(const std::string& line,
JsepSessionDescription::kMaxVideoCodecWidth,
JsepSessionDescription::kMaxVideoCodecHeight,
JsepSessionDescription::kDefaultVideoCodecFramerate,
- preference, video_desc);
+ video_desc);
} else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
// RFC 4566
// For audio streams, <encoding parameters> indicates the number
@@ -3051,12 +3057,11 @@ bool ParseRtpmapAttribute(const std::string& line,
AudioContentDescription* audio_desc =
static_cast<AudioContentDescription*>(media_desc);
UpdateCodec(payload_type, encoding_name, clock_rate, bitrate, channels,
- preference, audio_desc);
+ audio_desc);
} else if (media_type == cricket::MEDIA_TYPE_DATA) {
DataContentDescription* data_desc =
static_cast<DataContentDescription*>(media_desc);
- data_desc->AddCodec(cricket::DataCodec(payload_type, encoding_name,
- preference));
+ data_desc->AddCodec(cricket::DataCodec(payload_type, encoding_name));
}
return true;
}
diff --git a/chromium/third_party/webrtc/api/webrtcsdp_unittest.cc b/chromium/third_party/webrtc/api/webrtcsdp_unittest.cc
index 099c9c04c0d..4bc84fdc12d 100644
--- a/chromium/third_party/webrtc/api/webrtcsdp_unittest.cc
+++ b/chromium/third_party/webrtc/api/webrtcsdp_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <set>
#include <string>
#include <vector>
@@ -20,7 +21,6 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/messagedigest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sslfingerprint.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/base/stringutils.h"
@@ -1030,10 +1030,10 @@ class WebRtcSdpTest : public testing::Test {
"inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32",
"dummy_session_params"));
audio->set_protocol(cricket::kMediaProtocolSavpf);
- AudioCodec opus(111, "opus", 48000, 0, 2, 3);
+ AudioCodec opus(111, "opus", 48000, 0, 2);
audio->AddCodec(opus);
- audio->AddCodec(AudioCodec(103, "ISAC", 16000, 32000, 1, 2));
- audio->AddCodec(AudioCodec(104, "ISAC", 32000, 56000, 1, 1));
+ audio->AddCodec(AudioCodec(103, "ISAC", 16000, 32000, 1));
+ audio->AddCodec(AudioCodec(104, "ISAC", 32000, 56000, 1));
return audio;
}
@@ -1049,8 +1049,7 @@ class WebRtcSdpTest : public testing::Test {
VideoCodec(120, JsepSessionDescription::kDefaultVideoCodecName,
JsepSessionDescription::kMaxVideoCodecWidth,
JsepSessionDescription::kMaxVideoCodecHeight,
- JsepSessionDescription::kDefaultVideoCodecFramerate,
- JsepSessionDescription::kDefaultVideoCodecPreference));
+ JsepSessionDescription::kDefaultVideoCodecFramerate));
return video;
}
@@ -1395,12 +1394,11 @@ class WebRtcSdpTest : public testing::Test {
}
void AddSctpDataChannel() {
- rtc::scoped_ptr<DataContentDescription> data(
- new DataContentDescription());
+ std::unique_ptr<DataContentDescription> data(new DataContentDescription());
data_desc_ = data.get();
data_desc_->set_protocol(cricket::kMediaProtocolDtlsSctp);
DataCodec codec(cricket::kGoogleSctpDataCodecId,
- cricket::kGoogleSctpDataCodecName, 0);
+ cricket::kGoogleSctpDataCodecName);
codec.SetParam(cricket::kCodecParamPort, kDefaultSctpPort);
data_desc_->AddCodec(codec);
desc_.AddContent(kDataContentName, NS_JINGLE_DRAFT_SCTP, data.release());
@@ -1409,11 +1407,10 @@ class WebRtcSdpTest : public testing::Test {
}
void AddRtpDataChannel() {
- rtc::scoped_ptr<DataContentDescription> data(
- new DataContentDescription());
+ std::unique_ptr<DataContentDescription> data(new DataContentDescription());
data_desc_ = data.get();
- data_desc_->AddCodec(DataCodec(101, "google-data", 1));
+ data_desc_->AddCodec(DataCodec(101, "google-data"));
StreamParams data_stream;
data_stream.id = kDataChannelMsid;
data_stream.cname = kDataChannelCname;
@@ -1680,7 +1677,7 @@ class WebRtcSdpTest : public testing::Test {
VideoContentDescription* video_desc_;
DataContentDescription* data_desc_;
Candidates candidates_;
- rtc::scoped_ptr<IceCandidateInterface> jcandidate_;
+ std::unique_ptr<IceCandidateInterface> jcandidate_;
JsepSessionDescription jdesc_;
};
@@ -1995,8 +1992,8 @@ TEST_F(WebRtcSdpTest, SerializeWithSctpDataChannelAndNewPort) {
jsep_desc.description()->GetContentDescriptionByName(kDataContentName));
const int kNewPort = 1234;
- cricket::DataCodec codec(
- cricket::kGoogleSctpDataCodecId, cricket::kGoogleSctpDataCodecName, 0);
+ cricket::DataCodec codec(cricket::kGoogleSctpDataCodecId,
+ cricket::kGoogleSctpDataCodecName);
codec.SetParam(cricket::kCodecParamPort, kNewPort);
dcdesc->AddOrReplaceCodec(codec);
@@ -2083,8 +2080,8 @@ TEST_F(WebRtcSdpTest, SerializeTcpCandidates) {
"", "", LOCAL_PORT_TYPE, kCandidateGeneration,
kCandidateFoundation1);
candidate.set_tcptype(cricket::TCPTYPE_ACTIVE_STR);
- rtc::scoped_ptr<IceCandidateInterface> jcandidate(
- new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
+ std::unique_ptr<IceCandidateInterface> jcandidate(
+ new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
std::string message = webrtc::SdpSerializeCandidate(*jcandidate);
EXPECT_EQ(std::string(kSdpTcpActiveCandidate), message);
@@ -2180,10 +2177,11 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmap) {
static_cast<AudioContentDescription*>(
jdesc.description()->GetContentDescriptionByName(cricket::CN_AUDIO));
AudioCodecs ref_codecs;
- // The codecs in the AudioContentDescription will be sorted by preference.
- ref_codecs.push_back(AudioCodec(0, "PCMU", 8000, 0, 1, 3));
- ref_codecs.push_back(AudioCodec(18, "G729", 16000, 0, 1, 2));
- ref_codecs.push_back(AudioCodec(103, "ISAC", 16000, 32000, 1, 1));
+ // The codecs in the AudioContentDescription should be in the same order as
+ // the payload types (<fmt>s) on the m= line.
+ ref_codecs.push_back(AudioCodec(0, "PCMU", 8000, 0, 1));
+ ref_codecs.push_back(AudioCodec(18, "G729", 16000, 0, 1));
+ ref_codecs.push_back(AudioCodec(103, "ISAC", 16000, 32000, 1));
EXPECT_EQ(ref_codecs, audio->codecs());
}
@@ -2405,8 +2403,8 @@ TEST_F(WebRtcSdpTest, DeserializeCandidate) {
rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority,
"", "", LOCAL_PORT_TYPE, kCandidateGeneration,
kCandidateFoundation1);
- rtc::scoped_ptr<IceCandidateInterface> jcandidate_template(
- new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
+ std::unique_ptr<IceCandidateInterface> jcandidate_template(
+ new JsepIceCandidate(std::string("audio_content_name"), 0, candidate));
EXPECT_TRUE(jcandidate.candidate().IsEquivalent(
jcandidate_template->candidate()));
sdp = kSdpTcpPassiveCandidate;
diff --git a/chromium/third_party/webrtc/api/webrtcsession.cc b/chromium/third_party/webrtc/api/webrtcsession.cc
index f5462855741..80352cf8324 100644
--- a/chromium/third_party/webrtc/api/webrtcsession.cc
+++ b/chromium/third_party/webrtc/api/webrtcsession.cc
@@ -421,22 +421,6 @@ static std::string MakeTdErrorString(const std::string& desc) {
return MakeErrorString(kPushDownTDFailed, desc);
}
-uint32_t ConvertIceTransportTypeToCandidateFilter(
- PeerConnectionInterface::IceTransportsType type) {
- switch (type) {
- case PeerConnectionInterface::kNone:
- return cricket::CF_NONE;
- case PeerConnectionInterface::kRelay:
- return cricket::CF_RELAY;
- case PeerConnectionInterface::kNoHost:
- return (cricket::CF_ALL & ~cricket::CF_HOST);
- case PeerConnectionInterface::kAll:
- return cricket::CF_ALL;
- default: ASSERT(false);
- }
- return cricket::CF_NONE;
-}
-
// Returns true if |new_desc| requests an ICE restart (i.e., new ufrag/pwd).
bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc,
const SessionDescriptionInterface* new_desc,
@@ -470,18 +454,18 @@ bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc,
}
WebRtcSession::WebRtcSession(webrtc::MediaControllerInterface* media_controller,
- rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread,
rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread,
cricket::PortAllocator* port_allocator)
- : signaling_thread_(signaling_thread),
- worker_thread_(worker_thread),
- port_allocator_(port_allocator),
+ : worker_thread_(worker_thread),
+ signaling_thread_(signaling_thread),
// RFC 3264: The numeric value of the session id and version in the
// o line MUST be representable with a "64 bit signed integer".
// Due to this constraint session id |sid_| is max limited to LLONG_MAX.
sid_(rtc::ToString(rtc::CreateRandomId64() & LLONG_MAX)),
transport_controller_(new cricket::TransportController(signaling_thread,
- worker_thread,
+ network_thread,
port_allocator)),
media_controller_(media_controller),
channel_manager_(media_controller_->channel_manager()),
@@ -528,7 +512,7 @@ WebRtcSession::~WebRtcSession() {
bool WebRtcSession::Initialize(
const PeerConnectionFactoryInterface::Options& options,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
bundle_policy_ = rtc_configuration.bundle_policy;
rtcp_mux_policy_ = rtc_configuration.rtcp_mux_policy;
@@ -604,8 +588,6 @@ bool WebRtcSession::Initialize(
if (options.disable_encryption) {
webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED);
}
- port_allocator()->set_candidate_filter(
- ConvertIceTransportTypeToCandidateFilter(rtc_configuration.type));
return true;
}
@@ -675,7 +657,7 @@ bool WebRtcSession::SetLocalDescription(SessionDescriptionInterface* desc,
ASSERT(signaling_thread()->IsCurrent());
// Takes the ownership of |desc| regardless of the result.
- rtc::scoped_ptr<SessionDescriptionInterface> desc_temp(desc);
+ std::unique_ptr<SessionDescriptionInterface> desc_temp(desc);
// Validate SDP.
if (!ValidateSessionDescription(desc, cricket::CS_LOCAL, err_desc)) {
@@ -731,14 +713,14 @@ bool WebRtcSession::SetRemoteDescription(SessionDescriptionInterface* desc,
ASSERT(signaling_thread()->IsCurrent());
// Takes the ownership of |desc| regardless of the result.
- rtc::scoped_ptr<SessionDescriptionInterface> desc_temp(desc);
+ std::unique_ptr<SessionDescriptionInterface> desc_temp(desc);
// Validate SDP.
if (!ValidateSessionDescription(desc, cricket::CS_REMOTE, err_desc)) {
return false;
}
- rtc::scoped_ptr<SessionDescriptionInterface> old_remote_desc(
+ std::unique_ptr<SessionDescriptionInterface> old_remote_desc(
remote_desc_.release());
remote_desc_.reset(desc_temp.release());
@@ -1039,7 +1021,7 @@ bool WebRtcSession::GetLocalCertificate(
certificate);
}
-rtc::scoped_ptr<rtc::SSLCertificate> WebRtcSession::GetRemoteSSLCertificate(
+std::unique_ptr<rtc::SSLCertificate> WebRtcSession::GetRemoteSSLCertificate(
const std::string& transport_name) {
ASSERT(signaling_thread()->IsCurrent());
return transport_controller_->GetRemoteSSLCertificate(transport_name);
@@ -1145,12 +1127,6 @@ bool WebRtcSession::RemoveRemoteIceCandidates(
return true;
}
-bool WebRtcSession::SetIceTransports(
- PeerConnectionInterface::IceTransportsType type) {
- return port_allocator()->set_candidate_filter(
- ConvertIceTransportTypeToCandidateFilter(type));
-}
-
cricket::IceConfig WebRtcSession::ParseIceConfig(
const PeerConnectionInterface::RTCConfiguration& config) const {
cricket::IceConfig ice_config;
@@ -1236,33 +1212,52 @@ void WebRtcSession::SetAudioPlayoutVolume(uint32_t ssrc, double volume) {
}
void WebRtcSession::SetRawAudioSink(uint32_t ssrc,
- rtc::scoped_ptr<AudioSinkInterface> sink) {
+ std::unique_ptr<AudioSinkInterface> sink) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_)
return;
- voice_channel_->SetRawAudioSink(ssrc, rtc::ScopedToUnique(std::move(sink)));
+ voice_channel_->SetRawAudioSink(ssrc, std::move(sink));
}
-RtpParameters WebRtcSession::GetAudioRtpParameters(uint32_t ssrc) const {
+RtpParameters WebRtcSession::GetAudioRtpSendParameters(uint32_t ssrc) const {
ASSERT(signaling_thread()->IsCurrent());
if (voice_channel_) {
- return voice_channel_->GetRtpParameters(ssrc);
+ return voice_channel_->GetRtpSendParameters(ssrc);
}
return RtpParameters();
}
-bool WebRtcSession::SetAudioRtpParameters(uint32_t ssrc,
- const RtpParameters& parameters) {
+bool WebRtcSession::SetAudioRtpSendParameters(uint32_t ssrc,
+ const RtpParameters& parameters) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_) {
return false;
}
- return voice_channel_->SetRtpParameters(ssrc, parameters);
+ return voice_channel_->SetRtpSendParameters(ssrc, parameters);
+}
+
+RtpParameters WebRtcSession::GetAudioRtpReceiveParameters(uint32_t ssrc) const {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (voice_channel_) {
+ return voice_channel_->GetRtpReceiveParameters(ssrc);
+ }
+ return RtpParameters();
}
-bool WebRtcSession::SetCaptureDevice(uint32_t ssrc,
- cricket::VideoCapturer* camera) {
+bool WebRtcSession::SetAudioRtpReceiveParameters(
+ uint32_t ssrc,
+ const RtpParameters& parameters) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!voice_channel_) {
+ return false;
+ }
+ return voice_channel_->SetRtpReceiveParameters(ssrc, parameters);
+}
+
+bool WebRtcSession::SetSource(
+ uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
ASSERT(signaling_thread()->IsCurrent());
if (!video_channel_) {
@@ -1271,13 +1266,7 @@ bool WebRtcSession::SetCaptureDevice(uint32_t ssrc,
LOG(LS_WARNING) << "Video not used in this call.";
return false;
}
- if (!video_channel_->SetCapturer(ssrc, camera)) {
- // Allow that SetCapturer fail if |camera| is NULL but assert otherwise.
- // This in the normal case when the underlying media channel has already
- // been deleted.
- ASSERT(camera == NULL);
- return false;
- }
+ video_channel_->SetSource(ssrc, source);
return true;
}
@@ -1314,21 +1303,39 @@ void WebRtcSession::SetVideoSend(uint32_t ssrc,
}
}
-RtpParameters WebRtcSession::GetVideoRtpParameters(uint32_t ssrc) const {
+RtpParameters WebRtcSession::GetVideoRtpSendParameters(uint32_t ssrc) const {
ASSERT(signaling_thread()->IsCurrent());
if (video_channel_) {
- return video_channel_->GetRtpParameters(ssrc);
+ return video_channel_->GetRtpSendParameters(ssrc);
}
return RtpParameters();
}
-bool WebRtcSession::SetVideoRtpParameters(uint32_t ssrc,
- const RtpParameters& parameters) {
+bool WebRtcSession::SetVideoRtpSendParameters(uint32_t ssrc,
+ const RtpParameters& parameters) {
ASSERT(signaling_thread()->IsCurrent());
if (!video_channel_) {
return false;
}
- return video_channel_->SetRtpParameters(ssrc, parameters);
+ return video_channel_->SetRtpSendParameters(ssrc, parameters);
+}
+
+RtpParameters WebRtcSession::GetVideoRtpReceiveParameters(uint32_t ssrc) const {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (video_channel_) {
+ return video_channel_->GetRtpReceiveParameters(ssrc);
+ }
+ return RtpParameters();
+}
+
+bool WebRtcSession::SetVideoRtpReceiveParameters(
+ uint32_t ssrc,
+ const RtpParameters& parameters) {
+ ASSERT(signaling_thread()->IsCurrent());
+ if (!video_channel_) {
+ return false;
+ }
+ return video_channel_->SetRtpReceiveParameters(ssrc, parameters);
}
bool WebRtcSession::CanInsertDtmf(const std::string& track_id) {
@@ -1732,13 +1739,41 @@ void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
}
}
-// TODO(mallinath) - Add a correct error code if the channels are not created
-// due to BUNDLE is enabled but rtcp-mux is disabled.
+// Returns the name of the transport channel when BUNDLE is enabled, or nullptr
+// if the channel is not part of any bundle.
+const std::string* WebRtcSession::GetBundleTransportName(
+ const cricket::ContentInfo* content,
+ const cricket::ContentGroup* bundle) {
+ if (!bundle) {
+ return nullptr;
+ }
+ const std::string* first_content_name = bundle->FirstContentName();
+ if (!first_content_name) {
+ LOG(LS_WARNING) << "Tried to BUNDLE with no contents.";
+ return nullptr;
+ }
+ if (!bundle->HasContentName(content->name)) {
+ LOG(LS_WARNING) << content->name << " is not part of any bundle group";
+ return nullptr;
+ }
+ LOG(LS_INFO) << "Bundling " << content->name << " on " << *first_content_name;
+ return first_content_name;
+}
+
bool WebRtcSession::CreateChannels(const SessionDescription* desc) {
+ const cricket::ContentGroup* bundle_group = nullptr;
+ if (bundle_policy_ == PeerConnectionInterface::kBundlePolicyMaxBundle) {
+ bundle_group = desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE);
+ if (!bundle_group) {
+ LOG(LS_WARNING) << "max-bundle specified without BUNDLE specified";
+ return false;
+ }
+ }
// Creating the media channels and transport proxies.
const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(desc);
if (voice && !voice->rejected && !voice_channel_) {
- if (!CreateVoiceChannel(voice)) {
+ if (!CreateVoiceChannel(voice,
+ GetBundleTransportName(voice, bundle_group))) {
LOG(LS_ERROR) << "Failed to create voice channel.";
return false;
}
@@ -1746,7 +1781,8 @@ bool WebRtcSession::CreateChannels(const SessionDescription* desc) {
const cricket::ContentInfo* video = cricket::GetFirstVideoContent(desc);
if (video && !video->rejected && !video_channel_) {
- if (!CreateVideoChannel(video)) {
+ if (!CreateVideoChannel(video,
+ GetBundleTransportName(video, bundle_group))) {
LOG(LS_ERROR) << "Failed to create video channel.";
return false;
}
@@ -1755,82 +1791,77 @@ bool WebRtcSession::CreateChannels(const SessionDescription* desc) {
const cricket::ContentInfo* data = cricket::GetFirstDataContent(desc);
if (data_channel_type_ != cricket::DCT_NONE &&
data && !data->rejected && !data_channel_) {
- if (!CreateDataChannel(data)) {
+ if (!CreateDataChannel(data, GetBundleTransportName(data, bundle_group))) {
LOG(LS_ERROR) << "Failed to create data channel.";
return false;
}
}
- if (rtcp_mux_policy_ == PeerConnectionInterface::kRtcpMuxPolicyRequire) {
- if (voice_channel()) {
- voice_channel()->ActivateRtcpMux();
- }
- if (video_channel()) {
- video_channel()->ActivateRtcpMux();
- }
- if (data_channel()) {
- data_channel()->ActivateRtcpMux();
- }
- }
-
- // Enable BUNDLE immediately when kBundlePolicyMaxBundle is in effect.
- if (bundle_policy_ == PeerConnectionInterface::kBundlePolicyMaxBundle) {
- const cricket::ContentGroup* bundle_group = desc->GetGroupByName(
- cricket::GROUP_TYPE_BUNDLE);
- if (!bundle_group) {
- LOG(LS_WARNING) << "max-bundle specified without BUNDLE specified";
- return false;
- }
- if (!EnableBundle(*bundle_group)) {
- LOG(LS_WARNING) << "max-bundle failed to enable bundling.";
- return false;
- }
- }
-
return true;
}
-bool WebRtcSession::CreateVoiceChannel(const cricket::ContentInfo* content) {
+bool WebRtcSession::CreateVoiceChannel(const cricket::ContentInfo* content,
+ const std::string* bundle_transport) {
+ bool require_rtcp_mux =
+ rtcp_mux_policy_ == PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ bool create_rtcp_transport_channel = !require_rtcp_mux;
voice_channel_.reset(channel_manager_->CreateVoiceChannel(
- media_controller_, transport_controller_.get(), content->name, true,
- audio_options_));
+ media_controller_, transport_controller_.get(), content->name,
+ bundle_transport, create_rtcp_transport_channel, audio_options_));
if (!voice_channel_) {
return false;
}
+ if (require_rtcp_mux) {
+ voice_channel_->ActivateRtcpMux();
+ }
voice_channel_->SignalDtlsSetupFailure.connect(
this, &WebRtcSession::OnDtlsSetupFailure);
SignalVoiceChannelCreated();
- voice_channel_->transport_channel()->SignalSentPacket.connect(
- this, &WebRtcSession::OnSentPacket_w);
+ voice_channel_->SignalSentPacket.connect(this,
+ &WebRtcSession::OnSentPacket_w);
return true;
}
-bool WebRtcSession::CreateVideoChannel(const cricket::ContentInfo* content) {
+bool WebRtcSession::CreateVideoChannel(const cricket::ContentInfo* content,
+ const std::string* bundle_transport) {
+ bool require_rtcp_mux =
+ rtcp_mux_policy_ == PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ bool create_rtcp_transport_channel = !require_rtcp_mux;
video_channel_.reset(channel_manager_->CreateVideoChannel(
- media_controller_, transport_controller_.get(), content->name, true,
- video_options_));
+ media_controller_, transport_controller_.get(), content->name,
+ bundle_transport, create_rtcp_transport_channel, video_options_));
if (!video_channel_) {
return false;
}
-
+ if (require_rtcp_mux) {
+ video_channel_->ActivateRtcpMux();
+ }
video_channel_->SignalDtlsSetupFailure.connect(
this, &WebRtcSession::OnDtlsSetupFailure);
SignalVideoChannelCreated();
- video_channel_->transport_channel()->SignalSentPacket.connect(
- this, &WebRtcSession::OnSentPacket_w);
+ video_channel_->SignalSentPacket.connect(this,
+ &WebRtcSession::OnSentPacket_w);
return true;
}
-bool WebRtcSession::CreateDataChannel(const cricket::ContentInfo* content) {
+bool WebRtcSession::CreateDataChannel(const cricket::ContentInfo* content,
+ const std::string* bundle_transport) {
bool sctp = (data_channel_type_ == cricket::DCT_SCTP);
+ bool require_rtcp_mux =
+ rtcp_mux_policy_ == PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ bool create_rtcp_transport_channel = !sctp && !require_rtcp_mux;
data_channel_.reset(channel_manager_->CreateDataChannel(
- transport_controller_.get(), content->name, !sctp, data_channel_type_));
+ transport_controller_.get(), content->name, bundle_transport,
+ create_rtcp_transport_channel, data_channel_type_));
if (!data_channel_) {
return false;
}
+ if (require_rtcp_mux) {
+ data_channel_->ActivateRtcpMux();
+ }
if (sctp) {
data_channel_->SignalDataReceived.connect(
@@ -1841,8 +1872,7 @@ bool WebRtcSession::CreateDataChannel(const cricket::ContentInfo* content) {
this, &WebRtcSession::OnDtlsSetupFailure);
SignalDataChannelCreated();
- data_channel_->transport_channel()->SignalSentPacket.connect(
- this, &WebRtcSession::OnSentPacket_w);
+ data_channel_->SignalSentPacket.connect(this, &WebRtcSession::OnSentPacket_w);
return true;
}
@@ -1944,6 +1974,9 @@ bool WebRtcSession::ValidateSessionDescription(
return BadSdp(source, type, kBundleWithoutRtcpMux, err_desc);
}
+ // TODO(skvlad): When the local rtcp-mux policy is Require, reject any
+ // m-lines that do not rtcp-mux enabled.
+
// Verify m-lines in Answer when compared against Offer.
if (action == kAnswer) {
const cricket::SessionDescription* offer_desc =
@@ -2160,8 +2193,7 @@ void WebRtcSession::ReportNegotiatedCiphers(
}
}
-void WebRtcSession::OnSentPacket_w(cricket::TransportChannel* channel,
- const rtc::SentPacket& sent_packet) {
+void WebRtcSession::OnSentPacket_w(const rtc::SentPacket& sent_packet) {
RTC_DCHECK(worker_thread()->IsCurrent());
media_controller_->call_w()->OnSentPacket(sent_packet);
}
diff --git a/chromium/third_party/webrtc/api/webrtcsession.h b/chromium/third_party/webrtc/api/webrtcsession.h
index 01ec526501f..98217bff266 100644
--- a/chromium/third_party/webrtc/api/webrtcsession.h
+++ b/chromium/third_party/webrtc/api/webrtcsession.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_API_WEBRTCSESSION_H_
#define WEBRTC_API_WEBRTCSESSION_H_
+#include <memory>
#include <set>
#include <string>
#include <vector>
@@ -21,6 +22,7 @@
#include "webrtc/api/mediastreamprovider.h"
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/api/statstypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/sslidentity.h"
#include "webrtc/base/thread.h"
@@ -34,7 +36,6 @@ namespace cricket {
class ChannelManager;
class DataChannel;
class StatsReport;
-class VideoCapturer;
class VideoChannel;
class VoiceChannel;
@@ -137,22 +138,22 @@ class WebRtcSession : public AudioProviderInterface,
};
WebRtcSession(webrtc::MediaControllerInterface* media_controller,
- rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread,
rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread,
cricket::PortAllocator* port_allocator);
virtual ~WebRtcSession();
// These are const to allow them to be called from const methods.
- rtc::Thread* signaling_thread() const { return signaling_thread_; }
rtc::Thread* worker_thread() const { return worker_thread_; }
- cricket::PortAllocator* port_allocator() const { return port_allocator_; }
+ rtc::Thread* signaling_thread() const { return signaling_thread_; }
// The ID of this session.
const std::string& id() const { return sid_; }
bool Initialize(
const PeerConnectionFactoryInterface::Options& options,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
const PeerConnectionInterface::RTCConfiguration& rtc_configuration);
// Deletes the voice, video and data channel and changes the session state
// to STATE_CLOSED.
@@ -213,8 +214,6 @@ class WebRtcSession : public AudioProviderInterface,
bool RemoveRemoteIceCandidates(
const std::vector<cricket::Candidate>& candidates);
- bool SetIceTransports(PeerConnectionInterface::IceTransportsType type);
-
cricket::IceConfig ParseIceConfig(
const PeerConnectionInterface::RTCConfiguration& config) const;
@@ -243,14 +242,19 @@ class WebRtcSession : public AudioProviderInterface,
cricket::AudioSource* source) override;
void SetAudioPlayoutVolume(uint32_t ssrc, double volume) override;
void SetRawAudioSink(uint32_t ssrc,
- rtc::scoped_ptr<AudioSinkInterface> sink) override;
+ std::unique_ptr<AudioSinkInterface> sink) override;
- RtpParameters GetAudioRtpParameters(uint32_t ssrc) const override;
- bool SetAudioRtpParameters(uint32_t ssrc,
- const RtpParameters& parameters) override;
+ RtpParameters GetAudioRtpSendParameters(uint32_t ssrc) const override;
+ bool SetAudioRtpSendParameters(uint32_t ssrc,
+ const RtpParameters& parameters) override;
+ RtpParameters GetAudioRtpReceiveParameters(uint32_t ssrc) const override;
+ bool SetAudioRtpReceiveParameters(uint32_t ssrc,
+ const RtpParameters& parameters) override;
// Implements VideoMediaProviderInterface.
- bool SetCaptureDevice(uint32_t ssrc, cricket::VideoCapturer* camera) override;
+ bool SetSource(
+ uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) override;
void SetVideoPlayout(
uint32_t ssrc,
bool enable,
@@ -259,15 +263,18 @@ class WebRtcSession : public AudioProviderInterface,
bool enable,
const cricket::VideoOptions* options) override;
- RtpParameters GetVideoRtpParameters(uint32_t ssrc) const override;
- bool SetVideoRtpParameters(uint32_t ssrc,
- const RtpParameters& parameters) override;
+ RtpParameters GetVideoRtpSendParameters(uint32_t ssrc) const override;
+ bool SetVideoRtpSendParameters(uint32_t ssrc,
+ const RtpParameters& parameters) override;
+ RtpParameters GetVideoRtpReceiveParameters(uint32_t ssrc) const override;
+ bool SetVideoRtpReceiveParameters(uint32_t ssrc,
+ const RtpParameters& parameters) override;
// Implements DtmfProviderInterface.
- virtual bool CanInsertDtmf(const std::string& track_id);
- virtual bool InsertDtmf(const std::string& track_id,
- int code, int duration);
- virtual sigslot::signal0<>* GetOnDestroyedSignal();
+ bool CanInsertDtmf(const std::string& track_id) override;
+ bool InsertDtmf(const std::string& track_id,
+ int code, int duration) override;
+ sigslot::signal0<>* GetOnDestroyedSignal() override;
// Implements DataChannelProviderInterface.
bool SendData(const cricket::SendDataParams& params,
@@ -292,7 +299,7 @@ class WebRtcSession : public AudioProviderInterface,
rtc::scoped_refptr<rtc::RTCCertificate>* certificate);
// Caller owns returned certificate
- virtual rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate(
+ virtual std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate(
const std::string& transport_name);
cricket::DataChannelType data_channel_type() const;
@@ -379,6 +386,12 @@ class WebRtcSession : public AudioProviderInterface,
const std::string& content_name,
cricket::TransportDescription* info);
+ // Returns the name of the transport channel when BUNDLE is enabled, or
+ // nullptr if the channel is not part of any bundle.
+ const std::string* GetBundleTransportName(
+ const cricket::ContentInfo* content,
+ const cricket::ContentGroup* bundle);
+
// Cause all the BaseChannels in the bundle group to have the same
// transport channel.
bool EnableBundle(const cricket::ContentGroup& bundle);
@@ -405,9 +418,12 @@ class WebRtcSession : public AudioProviderInterface,
bool CreateChannels(const cricket::SessionDescription* desc);
// Helper methods to create media channels.
- bool CreateVoiceChannel(const cricket::ContentInfo* content);
- bool CreateVideoChannel(const cricket::ContentInfo* content);
- bool CreateDataChannel(const cricket::ContentInfo* content);
+ bool CreateVoiceChannel(const cricket::ContentInfo* content,
+ const std::string* bundle_transport);
+ bool CreateVideoChannel(const cricket::ContentInfo* content,
+ const std::string* bundle_transport);
+ bool CreateDataChannel(const cricket::ContentInfo* content,
+ const std::string* bundle_transport);
// Listens to SCTP CONTROL messages on unused SIDs and process them as OPEN
// messages.
@@ -462,12 +478,10 @@ class WebRtcSession : public AudioProviderInterface,
void ReportNegotiatedCiphers(const cricket::TransportStats& stats);
- void OnSentPacket_w(cricket::TransportChannel* channel,
- const rtc::SentPacket& sent_packet);
+ void OnSentPacket_w(const rtc::SentPacket& sent_packet);
- rtc::Thread* const signaling_thread_;
rtc::Thread* const worker_thread_;
- cricket::PortAllocator* const port_allocator_;
+ rtc::Thread* const signaling_thread_;
State state_ = STATE_INIT;
Error error_ = ERROR_NONE;
@@ -476,17 +490,17 @@ class WebRtcSession : public AudioProviderInterface,
const std::string sid_;
bool initial_offerer_ = false;
- rtc::scoped_ptr<cricket::TransportController> transport_controller_;
+ std::unique_ptr<cricket::TransportController> transport_controller_;
MediaControllerInterface* media_controller_;
- rtc::scoped_ptr<cricket::VoiceChannel> voice_channel_;
- rtc::scoped_ptr<cricket::VideoChannel> video_channel_;
- rtc::scoped_ptr<cricket::DataChannel> data_channel_;
+ std::unique_ptr<cricket::VoiceChannel> voice_channel_;
+ std::unique_ptr<cricket::VideoChannel> video_channel_;
+ std::unique_ptr<cricket::DataChannel> data_channel_;
cricket::ChannelManager* channel_manager_;
IceObserver* ice_observer_;
PeerConnectionInterface::IceConnectionState ice_connection_state_;
bool ice_connection_receiving_;
- rtc::scoped_ptr<SessionDescriptionInterface> local_desc_;
- rtc::scoped_ptr<SessionDescriptionInterface> remote_desc_;
+ std::unique_ptr<SessionDescriptionInterface> local_desc_;
+ std::unique_ptr<SessionDescriptionInterface> remote_desc_;
// If the remote peer is using a older version of implementation.
bool older_version_remote_peer_;
bool dtls_enabled_;
@@ -501,8 +515,7 @@ class WebRtcSession : public AudioProviderInterface,
// List of content names for which the remote side triggered an ICE restart.
std::set<std::string> pending_ice_restarts_;
- rtc::scoped_ptr<WebRtcSessionDescriptionFactory>
- webrtc_session_desc_factory_;
+ std::unique_ptr<WebRtcSessionDescriptionFactory> webrtc_session_desc_factory_;
// Member variables for caching global options.
cricket::AudioOptions audio_options_;
diff --git a/chromium/third_party/webrtc/api/webrtcsession_unittest.cc b/chromium/third_party/webrtc/api/webrtcsession_unittest.cc
index 18c1a95116e..4207c24234c 100644
--- a/chromium/third_party/webrtc/api/webrtcsession_unittest.cc
+++ b/chromium/third_party/webrtc/api/webrtcsession_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <utility>
#include <vector>
@@ -55,7 +56,6 @@
using cricket::FakeVoiceMediaChannel;
using cricket::TransportInfo;
using rtc::SocketAddress;
-using rtc::scoped_ptr;
using rtc::Thread;
using webrtc::CreateSessionDescription;
using webrtc::CreateSessionDescriptionObserver;
@@ -158,9 +158,12 @@ class MockIceObserver : public webrtc::IceObserver {
ice_gathering_state_(PeerConnectionInterface::kIceGatheringNew) {
}
+ virtual ~MockIceObserver() = default;
+
void OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) override {
ice_connection_state_ = new_state;
+ ice_connection_state_history_.push_back(new_state);
}
void OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) override {
@@ -191,7 +194,7 @@ class MockIceObserver : public webrtc::IceObserver {
// Some local candidates are removed.
void OnIceCandidatesRemoved(
- const std::vector<cricket::Candidate>& candidates) {
+ const std::vector<cricket::Candidate>& candidates) override {
num_candidates_removed_ += candidates.size();
}
@@ -200,19 +203,23 @@ class MockIceObserver : public webrtc::IceObserver {
std::vector<cricket::Candidate> mline_1_candidates_;
PeerConnectionInterface::IceConnectionState ice_connection_state_;
PeerConnectionInterface::IceGatheringState ice_gathering_state_;
+ std::vector<PeerConnectionInterface::IceConnectionState>
+ ice_connection_state_history_;
size_t num_candidates_removed_ = 0;
};
class WebRtcSessionForTest : public webrtc::WebRtcSession {
public:
WebRtcSessionForTest(webrtc::MediaControllerInterface* media_controller,
- rtc::Thread* signaling_thread,
+ rtc::Thread* network_thread,
rtc::Thread* worker_thread,
+ rtc::Thread* signaling_thread,
cricket::PortAllocator* port_allocator,
webrtc::IceObserver* ice_observer)
: WebRtcSession(media_controller,
- signaling_thread,
+ network_thread,
worker_thread,
+ signaling_thread,
port_allocator) {
RegisterIceObserver(ice_observer);
}
@@ -246,7 +253,7 @@ class WebRtcSessionForTest : public webrtc::WebRtcSession {
using webrtc::WebRtcSession::SetAudioPlayout;
using webrtc::WebRtcSession::SetAudioSend;
- using webrtc::WebRtcSession::SetCaptureDevice;
+ using webrtc::WebRtcSession::SetSource;
using webrtc::WebRtcSession::SetVideoPlayout;
using webrtc::WebRtcSession::SetVideoSend;
@@ -297,7 +304,7 @@ class WebRtcSessionCreateSDPObserverForTest
~WebRtcSessionCreateSDPObserverForTest() {}
private:
- rtc::scoped_ptr<SessionDescriptionInterface> description_;
+ std::unique_ptr<SessionDescriptionInterface> description_;
State state_;
};
@@ -375,11 +382,11 @@ class WebRtcSessionTest
// used if provided, otherwise one will be generated using the
// |dtls_identity_store|.
void Init(
- rtc::scoped_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
+ std::unique_ptr<webrtc::DtlsIdentityStoreInterface> dtls_identity_store) {
ASSERT_TRUE(session_.get() == NULL);
session_.reset(new WebRtcSessionForTest(
media_controller_.get(), rtc::Thread::Current(), rtc::Thread::Current(),
- allocator_.get(), &observer_));
+ rtc::Thread::Current(), allocator_.get(), &observer_));
session_->SignalDataChannelOpenMessage.connect(
this, &WebRtcSessionTest::OnDataChannelOpenMessage);
session_->GetOnDestroyedSignal()->connect(
@@ -405,12 +412,6 @@ class WebRtcSessionTest
void Init() { Init(nullptr); }
- void InitWithIceTransport(
- PeerConnectionInterface::IceTransportsType ice_transport_type) {
- configuration_.type = ice_transport_type;
- Init();
- }
-
void InitWithBundlePolicy(
PeerConnectionInterface::BundlePolicy bundle_policy) {
configuration_.bundle_policy = bundle_policy;
@@ -427,7 +428,7 @@ class WebRtcSessionTest
// Successfully init with DTLS; with a certificate generated and supplied or
// with a store that generates it for us.
void InitWithDtls(RTCCertificateGenerationMethod cert_gen_method) {
- rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store;
+ std::unique_ptr<FakeDtlsIdentityStore> dtls_identity_store;
if (cert_gen_method == ALREADY_GENERATED) {
configuration_.certificates.push_back(
FakeDtlsIdentityStore::GenerateCertificate());
@@ -442,7 +443,7 @@ class WebRtcSessionTest
// Init with DTLS with a store that will fail to generate a certificate.
void InitWithDtlsIdentityGenFail() {
- rtc::scoped_ptr<FakeDtlsIdentityStore> dtls_identity_store(
+ std::unique_ptr<FakeDtlsIdentityStore> dtls_identity_store(
new FakeDtlsIdentityStore());
dtls_identity_store->set_should_fail(true);
Init(std::move(dtls_identity_store));
@@ -450,8 +451,8 @@ class WebRtcSessionTest
void InitWithDtmfCodec() {
// Add kTelephoneEventCodec for dtmf test.
- const cricket::AudioCodec kTelephoneEventCodec(
- 106, "telephone-event", 8000, 0, 1, 0);
+ const cricket::AudioCodec kTelephoneEventCodec(106, "telephone-event", 8000,
+ 0, 1);
std::vector<cricket::AudioCodec> codecs;
codecs.push_back(kTelephoneEventCodec);
media_engine_->SetAudioCodecs(codecs);
@@ -692,7 +693,7 @@ class WebRtcSessionTest
rtc::ToString(rtc::CreateRandomId());
// Confirmed to work with KT_RSA and KT_ECDSA.
tdesc_factory_->set_certificate(
- rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
rtc::SSLIdentity::Generate(identity_name, rtc::KT_DEFAULT))));
tdesc_factory_->set_secure(cricket::SEC_REQUIRED);
}
@@ -726,12 +727,12 @@ class WebRtcSessionTest
cricket::MediaSessionOptions options;
options.recv_video = true;
options.bundle_enabled = true;
- scoped_ptr<JsepSessionDescription> offer(
+ std::unique_ptr<JsepSessionDescription> offer(
CreateRemoteOffer(options, cricket::SEC_REQUIRED));
ASSERT_TRUE(offer.get() != NULL);
VerifyCryptoParams(offer->description());
SetRemoteDescriptionWithoutError(offer.release());
- scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
ASSERT_TRUE(answer.get() != NULL);
VerifyCryptoParams(answer->description());
}
@@ -930,7 +931,7 @@ class WebRtcSessionTest
options.recv_video = true;
options.bundle_enabled = true;
- rtc::scoped_ptr<SessionDescriptionInterface> temp_offer(
+ std::unique_ptr<SessionDescriptionInterface> temp_offer(
CreateRemoteOffer(options, cricket::SEC_ENABLED));
*nodtls_answer =
@@ -1069,7 +1070,7 @@ class WebRtcSessionTest
// and answer.
SetLocalDescriptionWithoutError(offer);
- rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ std::unique_ptr<SessionDescriptionInterface> answer(
CreateRemoteAnswer(session_->local_description()));
std::string sdp;
EXPECT_TRUE(answer->ToString(&sdp));
@@ -1113,6 +1114,22 @@ class WebRtcSessionTest
EXPECT_EQ(can, session_->CanInsertDtmf(kAudioTrack1));
}
+ bool ContainsVideoCodecWithName(const SessionDescriptionInterface* desc,
+ const std::string& codec_name) {
+ for (const auto& content : desc->description()->contents()) {
+ if (static_cast<cricket::MediaContentDescription*>(content.description)
+ ->type() == cricket::MEDIA_TYPE_VIDEO) {
+ const auto* mdesc =
+ static_cast<cricket::VideoContentDescription*>(content.description);
+ for (const auto& codec : mdesc->codecs()) {
+ if (codec.name == codec_name) {
+ return true;
+ }
+ }
+ }
+ }
+ return false;
+ }
// Helper class to configure loopback network and verify Best
// Connection using right IP protocol for TestLoopbackCall
// method. LoopbackNetworkManager applies firewall rules to block
@@ -1300,6 +1317,8 @@ class WebRtcSessionTest
SetupLoopbackCall();
+ // Wait for channel to be ready for sending.
+ EXPECT_TRUE_WAIT(media_engine_->GetVideoChannel(0)->sending(), 100);
uint8_t test_packet[15] = {0};
rtc::PacketOptions options;
options.packet_id = 10;
@@ -1313,8 +1332,8 @@ class WebRtcSessionTest
// Adds CN codecs to FakeMediaEngine and MediaDescriptionFactory.
void AddCNCodecs() {
- const cricket::AudioCodec kCNCodec1(102, "CN", 8000, 0, 1, 0);
- const cricket::AudioCodec kCNCodec2(103, "CN", 16000, 0, 1, 0);
+ const cricket::AudioCodec kCNCodec1(102, "CN", 8000, 0, 1);
+ const cricket::AudioCodec kCNCodec2(103, "CN", 16000, 0, 1);
// Add kCNCodec for dtmf test.
std::vector<cricket::AudioCodec> codecs = media_engine_->audio_codecs();;
@@ -1370,8 +1389,8 @@ class WebRtcSessionTest
SetFactoryDtlsSrtp();
if (type == CreateSessionDescriptionRequest::kAnswer) {
cricket::MediaSessionOptions options;
- scoped_ptr<JsepSessionDescription> offer(
- CreateRemoteOffer(options, cricket::SEC_DISABLED));
+ std::unique_ptr<JsepSessionDescription> offer(
+ CreateRemoteOffer(options, cricket::SEC_DISABLED));
ASSERT_TRUE(offer.get() != NULL);
SetRemoteDescriptionWithoutError(offer.release());
}
@@ -1417,23 +1436,23 @@ class WebRtcSessionTest
cricket::FakeMediaEngine* media_engine_;
cricket::FakeDataEngine* data_engine_;
- rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
+ std::unique_ptr<cricket::ChannelManager> channel_manager_;
cricket::FakeCall fake_call_;
- rtc::scoped_ptr<webrtc::MediaControllerInterface> media_controller_;
- rtc::scoped_ptr<cricket::TransportDescriptionFactory> tdesc_factory_;
- rtc::scoped_ptr<cricket::MediaSessionDescriptionFactory> desc_factory_;
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> vss_;
- rtc::scoped_ptr<rtc::FirewallSocketServer> fss_;
+ std::unique_ptr<webrtc::MediaControllerInterface> media_controller_;
+ std::unique_ptr<cricket::TransportDescriptionFactory> tdesc_factory_;
+ std::unique_ptr<cricket::MediaSessionDescriptionFactory> desc_factory_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ std::unique_ptr<rtc::FirewallSocketServer> fss_;
rtc::SocketServerScope ss_scope_;
rtc::SocketAddress stun_socket_addr_;
- rtc::scoped_ptr<cricket::TestStunServer> stun_server_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_;
cricket::TestTurnServer turn_server_;
rtc::FakeNetworkManager network_manager_;
- rtc::scoped_ptr<cricket::BasicPortAllocator> allocator_;
+ std::unique_ptr<cricket::BasicPortAllocator> allocator_;
PeerConnectionFactoryInterface::Options options_;
PeerConnectionInterface::RTCConfiguration configuration_;
- rtc::scoped_ptr<WebRtcSessionForTest> session_;
+ std::unique_ptr<WebRtcSessionForTest> session_;
MockIceObserver observer_;
cricket::FakeVideoMediaChannel* video_channel_;
cricket::FakeVoiceMediaChannel* voice_channel_;
@@ -1509,50 +1528,6 @@ TEST_F(WebRtcSessionTest, MAYBE_TestStunError) {
EXPECT_EQ(6u, observer_.mline_1_candidates_.size());
}
-// Test session delivers no candidates gathered when constraint set to "none".
-TEST_F(WebRtcSessionTest, TestIceTransportsNone) {
- AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
- InitWithIceTransport(PeerConnectionInterface::kNone);
- SendAudioVideoStream1();
- InitiateCall();
- EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
- EXPECT_EQ(0u, observer_.mline_0_candidates_.size());
- EXPECT_EQ(0u, observer_.mline_1_candidates_.size());
-}
-
-// Test session delivers only relay candidates gathered when constaint set to
-// "relay".
-TEST_F(WebRtcSessionTest, TestIceTransportsRelay) {
- AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
- ConfigureAllocatorWithTurn();
- InitWithIceTransport(PeerConnectionInterface::kRelay);
- SendAudioVideoStream1();
- InitiateCall();
- EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
- EXPECT_EQ(2u, observer_.mline_0_candidates_.size());
- EXPECT_EQ(2u, observer_.mline_1_candidates_.size());
- for (size_t i = 0; i < observer_.mline_0_candidates_.size(); ++i) {
- EXPECT_EQ(cricket::RELAY_PORT_TYPE,
- observer_.mline_0_candidates_[i].type());
- }
- for (size_t i = 0; i < observer_.mline_1_candidates_.size(); ++i) {
- EXPECT_EQ(cricket::RELAY_PORT_TYPE,
- observer_.mline_1_candidates_[i].type());
- }
-}
-
-// Test session delivers all candidates gathered when constaint set to "all".
-TEST_F(WebRtcSessionTest, TestIceTransportsAll) {
- AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
- InitWithIceTransport(PeerConnectionInterface::kAll);
- SendAudioVideoStream1();
- InitiateCall();
- EXPECT_TRUE_WAIT(observer_.oncandidatesready_, kIceCandidatesTimeout);
- // Host + STUN. By default allocator is disabled to gather relay candidates.
- EXPECT_EQ(4u, observer_.mline_0_candidates_.size());
- EXPECT_EQ(4u, observer_.mline_1_candidates_.size());
-}
-
TEST_F(WebRtcSessionTest, SetSdpFailedOnInvalidSdp) {
Init();
SessionDescriptionInterface* offer = NULL;
@@ -1871,7 +1846,7 @@ TEST_P(WebRtcSessionTest, TestSetRemoteNonDtlsAnswerWhenDtlsOn) {
SessionDescriptionInterface* offer = CreateOffer();
cricket::MediaSessionOptions options;
options.recv_video = true;
- rtc::scoped_ptr<SessionDescriptionInterface> temp_offer(
+ std::unique_ptr<SessionDescriptionInterface> temp_offer(
CreateRemoteOffer(options, cricket::SEC_ENABLED));
JsepSessionDescription* answer =
CreateRemoteAnswer(temp_offer.get(), options, cricket::SEC_ENABLED);
@@ -2097,7 +2072,7 @@ TEST_F(WebRtcSessionTest, TestSetRemotePrAnswer) {
TEST_F(WebRtcSessionTest, TestSetLocalAnswerWithoutOffer) {
Init();
SendNothing();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
SessionDescriptionInterface* answer =
CreateRemoteAnswer(offer.get());
@@ -2108,7 +2083,7 @@ TEST_F(WebRtcSessionTest, TestSetLocalAnswerWithoutOffer) {
TEST_F(WebRtcSessionTest, TestSetRemoteAnswerWithoutOffer) {
Init();
SendNothing();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
SessionDescriptionInterface* answer =
CreateRemoteAnswer(offer.get());
@@ -2347,7 +2322,7 @@ TEST_F(WebRtcSessionTest, TestSetLocalAndRemoteDescriptionWithCandidates) {
EXPECT_TRUE_WAIT(0u < observer_.mline_0_candidates_.size(),
kIceCandidatesTimeout);
- rtc::scoped_ptr<SessionDescriptionInterface> local_offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> local_offer(CreateOffer());
ASSERT_TRUE(local_offer->candidates(kMediaContentIndex0) != NULL);
EXPECT_LT(0u, local_offer->candidates(kMediaContentIndex0)->count());
@@ -2365,7 +2340,7 @@ TEST_F(WebRtcSessionTest, TestSetLocalAndRemoteDescriptionWithCandidates) {
TEST_F(WebRtcSessionTest, TestChannelCreationsWithContentNames) {
Init();
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
// CreateOffer creates session description with the content names "audio" and
// "video". Goal is to modify these content names and verify transport
@@ -2414,7 +2389,7 @@ TEST_F(WebRtcSessionTest, TestChannelCreationsWithContentNames) {
// the send streams when no constraints have been set.
TEST_F(WebRtcSessionTest, CreateOfferWithoutConstraintsOrStreams) {
Init();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
ASSERT_TRUE(offer != NULL);
const cricket::ContentInfo* content =
@@ -2430,7 +2405,7 @@ TEST_F(WebRtcSessionTest, CreateOfferWithoutConstraints) {
Init();
// Test Audio only offer.
SendAudioOnlyStream2();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(offer->description());
@@ -2455,8 +2430,7 @@ TEST_F(WebRtcSessionTest, CreateOfferWithConstraintsWithoutStreams) {
options.offer_to_receive_audio = 0;
options.offer_to_receive_video = 0;
- rtc::scoped_ptr<SessionDescriptionInterface> offer(
- CreateOffer(options));
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer(options));
ASSERT_TRUE(offer != NULL);
const cricket::ContentInfo* content =
@@ -2474,8 +2448,7 @@ TEST_F(WebRtcSessionTest, CreateAudioOnlyOfferWithConstraints) {
options.offer_to_receive_audio =
RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
- rtc::scoped_ptr<SessionDescriptionInterface> offer(
- CreateOffer(options));
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer(options));
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(offer->description());
@@ -2495,8 +2468,7 @@ TEST_F(WebRtcSessionTest, CreateOfferWithConstraints) {
options.offer_to_receive_video =
RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
- rtc::scoped_ptr<SessionDescriptionInterface> offer(
- CreateOffer(options));
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer(options));
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(offer->description());
@@ -2533,9 +2505,9 @@ TEST_F(WebRtcSessionTest, CreateAnswerWithoutAnOffer) {
TEST_F(WebRtcSessionTest, CreateAnswerWithoutConstraintsOrStreams) {
Init();
// Create a remote offer with audio and video content.
- rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
SetRemoteDescriptionWithoutError(offer.release());
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(answer->description());
ASSERT_TRUE(content != NULL);
@@ -2553,13 +2525,12 @@ TEST_F(WebRtcSessionTest, CreateAudioAnswerWithoutConstraintsOrStreams) {
// Create a remote offer with audio only.
cricket::MediaSessionOptions options;
- rtc::scoped_ptr<JsepSessionDescription> offer(
- CreateRemoteOffer(options));
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer(options));
ASSERT_TRUE(cricket::GetFirstVideoContent(offer->description()) == NULL);
ASSERT_TRUE(cricket::GetFirstAudioContent(offer->description()) != NULL);
SetRemoteDescriptionWithoutError(offer.release());
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(answer->description());
ASSERT_TRUE(content != NULL);
@@ -2573,11 +2544,11 @@ TEST_F(WebRtcSessionTest, CreateAudioAnswerWithoutConstraintsOrStreams) {
TEST_F(WebRtcSessionTest, CreateAnswerWithoutConstraints) {
Init();
// Create a remote offer with audio and video content.
- rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
SetRemoteDescriptionWithoutError(offer.release());
// Test with a stream with tracks.
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(answer->description());
ASSERT_TRUE(content != NULL);
@@ -2593,13 +2564,13 @@ TEST_F(WebRtcSessionTest, CreateAnswerWithoutConstraints) {
TEST_F(WebRtcSessionTest, CreateAnswerWithConstraintsWithoutStreams) {
Init();
// Create a remote offer with audio and video content.
- rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
SetRemoteDescriptionWithoutError(offer.release());
cricket::MediaSessionOptions session_options;
session_options.recv_audio = false;
session_options.recv_video = false;
- rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ std::unique_ptr<SessionDescriptionInterface> answer(
CreateAnswer(session_options));
const cricket::ContentInfo* content =
@@ -2617,7 +2588,7 @@ TEST_F(WebRtcSessionTest, CreateAnswerWithConstraintsWithoutStreams) {
TEST_F(WebRtcSessionTest, CreateAnswerWithConstraints) {
Init();
// Create a remote offer with audio and video content.
- rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
SetRemoteDescriptionWithoutError(offer.release());
cricket::MediaSessionOptions options;
@@ -2626,7 +2597,7 @@ TEST_F(WebRtcSessionTest, CreateAnswerWithConstraints) {
// Test with a stream with tracks.
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(options));
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer(options));
// TODO(perkj): Should the direction be set to SEND_ONLY?
const cricket::ContentInfo* content =
@@ -2648,8 +2619,7 @@ TEST_F(WebRtcSessionTest, CreateOfferWithoutCNCodecs) {
RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
options.voice_activity_detection = false;
- rtc::scoped_ptr<SessionDescriptionInterface> offer(
- CreateOffer(options));
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer(options));
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(offer->description());
@@ -2661,12 +2631,12 @@ TEST_F(WebRtcSessionTest, CreateAnswerWithoutCNCodecs) {
AddCNCodecs();
Init();
// Create a remote offer with audio and video content.
- rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer());
SetRemoteDescriptionWithoutError(offer.release());
cricket::MediaSessionOptions options;
options.vad_enabled = false;
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer(options));
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer(options));
const cricket::ContentInfo* content =
cricket::GetFirstAudioContent(answer->description());
ASSERT_TRUE(content != NULL);
@@ -2788,10 +2758,10 @@ TEST_F(WebRtcSessionTest, TestAVOfferWithVideoOnlyAnswer) {
TEST_F(WebRtcSessionTest, VerifyCryptoParamsInSDP) {
Init();
SendAudioVideoStream1();
- scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
VerifyCryptoParams(offer->description());
SetRemoteDescriptionWithoutError(offer.release());
- scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
VerifyCryptoParams(answer->description());
}
@@ -2799,7 +2769,7 @@ TEST_F(WebRtcSessionTest, VerifyNoCryptoParamsInSDP) {
options_.disable_encryption = true;
Init();
SendAudioVideoStream1();
- scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
VerifyNoCryptoParams(offer->description(), false);
}
@@ -2818,7 +2788,7 @@ TEST_F(WebRtcSessionTest, VerifyAnswerFromCryptoOffer) {
TEST_F(WebRtcSessionTest, TestSetLocalDescriptionWithoutIce) {
Init();
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
std::string sdp;
RemoveIceUfragPwdLines(offer.get(), &sdp);
@@ -2831,7 +2801,7 @@ TEST_F(WebRtcSessionTest, TestSetLocalDescriptionWithoutIce) {
// no a=ice-ufrag and a=ice-pwd lines are present in the SDP.
TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionWithoutIce) {
Init();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
std::string sdp;
RemoveIceUfragPwdLines(offer.get(), &sdp);
SessionDescriptionInterface* modified_offer =
@@ -2844,7 +2814,7 @@ TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionWithoutIce) {
TEST_F(WebRtcSessionTest, TestSetLocalDescriptionInvalidIceCredentials) {
Init();
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
// Modifying ice ufrag and pwd in local offer with strings smaller than the
// recommended values of 4 and 22 bytes respectively.
SetIceUfragPwd(offer.get(), "ice", "icepwd");
@@ -2861,7 +2831,7 @@ TEST_F(WebRtcSessionTest, TestSetLocalDescriptionInvalidIceCredentials) {
// too short ice ufrag and pwd strings.
TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionInvalidIceCredentials) {
Init();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
// Modifying ice ufrag and pwd in remote offer with strings smaller than the
// recommended values of 4 and 22 bytes respectively.
SetIceUfragPwd(offer.get(), "ice", "icepwd");
@@ -2879,7 +2849,7 @@ TEST_F(WebRtcSessionTest, TestSetRemoteOfferWithIceRestart) {
Init();
// Create the first offer.
- scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
SetIceUfragPwd(offer.get(), "0123456789012345", "abcdefghijklmnopqrstuvwx");
cricket::Candidate candidate1(1, "udp", rtc::SocketAddress("1.1.1.1", 5000),
0, "", "", "relay", 0, "");
@@ -2924,7 +2894,7 @@ TEST_F(WebRtcSessionTest, TestSetRemoteAnswerWithIceRestart) {
SetLocalDescriptionWithoutError(offer);
// Create the first answer.
- scoped_ptr<JsepSessionDescription> answer(CreateRemoteAnswer(offer));
+ std::unique_ptr<JsepSessionDescription> answer(CreateRemoteAnswer(offer));
answer->set_type(JsepSessionDescription::kPrAnswer);
SetIceUfragPwd(answer.get(), "0123456789012345", "abcdefghijklmnopqrstuvwx");
cricket::Candidate candidate1(1, "udp", rtc::SocketAddress("1.1.1.1", 5000),
@@ -3091,7 +3061,7 @@ TEST_F(WebRtcSessionTest, TestBalancedNoBundleInAnswer) {
SendAudioVideoStream2();
// Remove BUNDLE from the answer.
- rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ std::unique_ptr<SessionDescriptionInterface> answer(
CreateRemoteAnswer(session_->local_description()));
cricket::SessionDescription* answer_copy = answer->description()->Copy();
answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
@@ -3177,7 +3147,7 @@ TEST_F(WebRtcSessionTest, TestMaxBundleNoBundleInAnswer) {
SendAudioVideoStream2();
// Remove BUNDLE from the answer.
- rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ std::unique_ptr<SessionDescriptionInterface> answer(
CreateRemoteAnswer(session_->local_description()));
cricket::SessionDescription* answer_copy = answer->description()->Copy();
answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
@@ -3215,7 +3185,7 @@ TEST_F(WebRtcSessionTest, TestMaxBundleNoBundleInRemoteOffer) {
SendAudioVideoStream1();
// Remove BUNDLE from the offer.
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateRemoteOffer());
cricket::SessionDescription* offer_copy = offer->description()->Copy();
offer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
JsepSessionDescription* modified_offer =
@@ -3268,7 +3238,7 @@ TEST_F(WebRtcSessionTest, TestMaxCompatNoBundleInAnswer) {
SendAudioVideoStream2();
// Remove BUNDLE from the answer.
- rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ std::unique_ptr<SessionDescriptionInterface> answer(
CreateRemoteAnswer(session_->local_description()));
cricket::SessionDescription* answer_copy = answer->description()->Copy();
answer_copy->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE);
@@ -3296,6 +3266,60 @@ TEST_F(WebRtcSessionTest, TestMaxBundleWithSetRemoteDescriptionFirst) {
session_->video_rtp_transport_channel());
}
+// Adding a new channel to a BUNDLE which is already connected should directly
+// assign the bundle transport to the channel, without first setting a
+// disconnected non-bundle transport and then replacing it. The application
+// should not receive any changes in the ICE state.
+TEST_F(WebRtcSessionTest, TestAddChannelToConnectedBundle) {
+ LoopbackNetworkConfiguration config;
+ LoopbackNetworkManager loopback_network_manager(this, config);
+ // Both BUNDLE and RTCP-mux need to be enabled for the ICE state to remain
+ // connected. Disabling either of these two means that we need to wait for the
+ // answer to find out if more transports are needed.
+ configuration_.bundle_policy =
+ PeerConnectionInterface::kBundlePolicyMaxBundle;
+ configuration_.rtcp_mux_policy =
+ PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ options_.disable_encryption = true;
+ Init();
+
+ // Negotiate an audio channel with MAX_BUNDLE enabled.
+ SendAudioOnlyStream2();
+ SessionDescriptionInterface* offer = CreateOffer();
+ SetLocalDescriptionWithoutError(offer);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete,
+ observer_.ice_gathering_state_, kIceCandidatesTimeout);
+ std::string sdp;
+ offer->ToString(&sdp);
+ SessionDescriptionInterface* answer = webrtc::CreateSessionDescription(
+ JsepSessionDescription::kAnswer, sdp, nullptr);
+ ASSERT_TRUE(answer != NULL);
+ SetRemoteDescriptionWithoutError(answer);
+
+ // Wait for the ICE state to stabilize.
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ observer_.ice_connection_state_, kIceCandidatesTimeout);
+ observer_.ice_connection_state_history_.clear();
+
+ // Now add a video channel which should be using the same bundle transport.
+ SendAudioVideoStream2();
+ offer = CreateOffer();
+ offer->ToString(&sdp);
+ SetLocalDescriptionWithoutError(offer);
+ answer = webrtc::CreateSessionDescription(JsepSessionDescription::kAnswer,
+ sdp, nullptr);
+ ASSERT_TRUE(answer != NULL);
+ SetRemoteDescriptionWithoutError(answer);
+
+ // Wait for ICE state to stabilize
+ rtc::Thread::Current()->ProcessMessages(0);
+ EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted,
+ observer_.ice_connection_state_, kIceCandidatesTimeout);
+
+ // No ICE state changes are expected to happen.
+ EXPECT_EQ(0, observer_.ice_connection_state_history_.size());
+}
+
TEST_F(WebRtcSessionTest, TestRequireRtcpMux) {
InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyRequire);
SendAudioVideoStream1();
@@ -3385,23 +3409,32 @@ TEST_F(WebRtcSessionTest, SetAudioPlayout) {
EXPECT_EQ(1, volume);
}
-TEST_F(WebRtcSessionTest, AudioMaxSendBitrateNotImplemented) {
- // This test verifies that RtpParameters for audio RtpSenders cannot be
- // changed.
- // TODO(skvlad): Update the test after adding support for bitrate limiting in
- // WebRtcAudioSendStream.
-
+TEST_F(WebRtcSessionTest, SetAudioMaxSendBitrate) {
Init();
SendAudioVideoStream1();
CreateAndSetRemoteOfferAndLocalAnswer();
cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(channel != NULL);
uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
- webrtc::RtpParameters params = session_->GetAudioRtpParameters(send_ssrc);
+ EXPECT_EQ(-1, channel->max_bps());
+ webrtc::RtpParameters params = session_->GetAudioRtpSendParameters(send_ssrc);
+ EXPECT_EQ(1, params.encodings.size());
+ EXPECT_EQ(-1, params.encodings[0].max_bitrate_bps);
+ params.encodings[0].max_bitrate_bps = 1000;
+ EXPECT_TRUE(session_->SetAudioRtpSendParameters(send_ssrc, params));
+
+ // Read back the parameters and verify they have been changed.
+ params = session_->GetAudioRtpSendParameters(send_ssrc);
+ EXPECT_EQ(1, params.encodings.size());
+ EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
- EXPECT_EQ(0, params.encodings.size());
- params.encodings.push_back(webrtc::RtpEncodingParameters());
- EXPECT_FALSE(session_->SetAudioRtpParameters(send_ssrc, params));
+ // Verify that the audio channel received the new parameters.
+ params = channel->GetRtpSendParameters(send_ssrc);
+ EXPECT_EQ(1, params.encodings.size());
+ EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
+
+ // Verify that the global bitrate limit has not been changed.
+ EXPECT_EQ(-1, channel->max_bps());
}
TEST_F(WebRtcSessionTest, SetAudioSend) {
@@ -3417,7 +3450,7 @@ TEST_F(WebRtcSessionTest, SetAudioSend) {
cricket::AudioOptions options;
options.echo_cancellation = rtc::Optional<bool>(true);
- rtc::scoped_ptr<FakeAudioSource> source(new FakeAudioSource());
+ std::unique_ptr<FakeAudioSource> source(new FakeAudioSource());
session_->SetAudioSend(send_ssrc, false, options, source.get());
EXPECT_TRUE(channel->IsStreamMuted(send_ssrc));
EXPECT_EQ(rtc::Optional<bool>(), channel->options().echo_cancellation);
@@ -3439,7 +3472,7 @@ TEST_F(WebRtcSessionTest, AudioSourceForLocalStream) {
ASSERT_EQ(1u, channel->send_streams().size());
uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
- rtc::scoped_ptr<FakeAudioSource> source(new FakeAudioSource());
+ std::unique_ptr<FakeAudioSource> source(new FakeAudioSource());
cricket::AudioOptions options;
session_->SetAudioSend(send_ssrc, true, options, source.get());
EXPECT_TRUE(source->sink() != nullptr);
@@ -3478,19 +3511,19 @@ TEST_F(WebRtcSessionTest, SetVideoMaxSendBitrate) {
ASSERT_TRUE(channel != NULL);
uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_EQ(-1, channel->max_bps());
- webrtc::RtpParameters params = session_->GetVideoRtpParameters(send_ssrc);
+ webrtc::RtpParameters params = session_->GetVideoRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(-1, params.encodings[0].max_bitrate_bps);
params.encodings[0].max_bitrate_bps = 1000;
- EXPECT_TRUE(session_->SetVideoRtpParameters(send_ssrc, params));
+ EXPECT_TRUE(session_->SetVideoRtpSendParameters(send_ssrc, params));
// Read back the parameters and verify they have been changed.
- params = session_->GetVideoRtpParameters(send_ssrc);
+ params = session_->GetVideoRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
// Verify that the video channel received the new parameters.
- params = channel->GetRtpParameters(send_ssrc);
+ params = channel->GetRtpSendParameters(send_ssrc);
EXPECT_EQ(1, params.encodings.size());
EXPECT_EQ(1000, params.encodings[0].max_bitrate_bps);
@@ -3579,7 +3612,7 @@ TEST_F(WebRtcSessionTest, TestIncorrectMLinesInRemoteAnswer) {
SendAudioVideoStream1();
SessionDescriptionInterface* offer = CreateOffer();
SetLocalDescriptionWithoutError(offer);
- rtc::scoped_ptr<SessionDescriptionInterface> answer(
+ std::unique_ptr<SessionDescriptionInterface> answer(
CreateRemoteAnswer(session_->local_description()));
cricket::SessionDescription* answer_copy = answer->description()->Copy();
@@ -3676,7 +3709,7 @@ TEST_F(WebRtcSessionTest, TestIceStartAfterSetLocalDescriptionOnly) {
TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescription) {
Init();
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
// Making sure SetLocalDescription correctly sets crypto value in
// SessionDescription object after de-serialization of sdp string. The value
@@ -3695,7 +3728,7 @@ TEST_F(WebRtcSessionTest, TestCryptoAfterSetLocalDescriptionWithDisabled) {
options_.disable_encryption = true;
Init();
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
// Making sure SetLocalDescription correctly sets crypto value in
// SessionDescription object after de-serialization of sdp string. The value
@@ -3715,12 +3748,11 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) {
Init();
cricket::MediaSessionOptions options;
options.recv_video = true;
- rtc::scoped_ptr<JsepSessionDescription> offer(
- CreateRemoteOffer(options));
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer(options));
SetRemoteDescriptionWithoutError(offer.release());
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
SetLocalDescriptionWithoutError(answer.release());
// Receive an offer with new ufrag and password.
@@ -3728,18 +3760,18 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewUfragAndPassword) {
session_->local_description()->description()->contents()) {
options.transport_options[content.name].ice_restart = true;
}
- rtc::scoped_ptr<JsepSessionDescription> updated_offer1(
+ std::unique_ptr<JsepSessionDescription> updated_offer1(
CreateRemoteOffer(options, session_->remote_description()));
SetRemoteDescriptionWithoutError(updated_offer1.release());
- rtc::scoped_ptr<SessionDescriptionInterface> updated_answer1(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> updated_answer1(CreateAnswer());
EXPECT_FALSE(IceUfragPwdEqual(updated_answer1->description(),
session_->local_description()->description()));
// Even a second answer (created before the description is set) should have
// a new ufrag/password.
- rtc::scoped_ptr<SessionDescriptionInterface> updated_answer2(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> updated_answer2(CreateAnswer());
EXPECT_FALSE(IceUfragPwdEqual(updated_answer2->description(),
session_->local_description()->description()));
@@ -3758,34 +3790,34 @@ TEST_F(WebRtcSessionTest, TestOfferChangingOnlyUfragOrPassword) {
options.recv_audio = true;
options.recv_video = true;
// Create an offer with audio and video.
- rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer(options));
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer(options));
SetIceUfragPwd(offer.get(), "original_ufrag", "original_password12345");
SetRemoteDescriptionWithoutError(offer.release());
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
SetLocalDescriptionWithoutError(answer.release());
// Receive an offer with a new ufrag but stale password.
- rtc::scoped_ptr<JsepSessionDescription> ufrag_changed_offer(
+ std::unique_ptr<JsepSessionDescription> ufrag_changed_offer(
CreateRemoteOffer(options, session_->remote_description()));
SetIceUfragPwd(ufrag_changed_offer.get(), "modified_ufrag",
"original_password12345");
SetRemoteDescriptionWithoutError(ufrag_changed_offer.release());
- rtc::scoped_ptr<SessionDescriptionInterface> updated_answer1(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> updated_answer1(CreateAnswer());
EXPECT_FALSE(IceUfragPwdEqual(updated_answer1->description(),
session_->local_description()->description()));
SetLocalDescriptionWithoutError(updated_answer1.release());
// Receive an offer with a new password but stale ufrag.
- rtc::scoped_ptr<JsepSessionDescription> password_changed_offer(
+ std::unique_ptr<JsepSessionDescription> password_changed_offer(
CreateRemoteOffer(options, session_->remote_description()));
SetIceUfragPwd(password_changed_offer.get(), "modified_ufrag",
"modified_password12345");
SetRemoteDescriptionWithoutError(password_changed_offer.release());
- rtc::scoped_ptr<SessionDescriptionInterface> updated_answer2(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> updated_answer2(CreateAnswer());
EXPECT_FALSE(IceUfragPwdEqual(updated_answer2->description(),
session_->local_description()->description()));
SetLocalDescriptionWithoutError(updated_answer2.release());
@@ -3797,20 +3829,19 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerWithOldUfragAndPassword) {
Init();
cricket::MediaSessionOptions options;
options.recv_video = true;
- rtc::scoped_ptr<JsepSessionDescription> offer(
- CreateRemoteOffer(options));
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer(options));
SetRemoteDescriptionWithoutError(offer.release());
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
SetLocalDescriptionWithoutError(answer.release());
// Receive an offer without changed ufrag or password.
- rtc::scoped_ptr<JsepSessionDescription> updated_offer2(
+ std::unique_ptr<JsepSessionDescription> updated_offer2(
CreateRemoteOffer(options, session_->remote_description()));
SetRemoteDescriptionWithoutError(updated_offer2.release());
- rtc::scoped_ptr<SessionDescriptionInterface> updated_answer2(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> updated_answer2(CreateAnswer());
EXPECT_TRUE(IceUfragPwdEqual(updated_answer2->description(),
session_->local_description()->description()));
@@ -3827,7 +3858,7 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewAndOldUfragAndPassword) {
options.recv_video = true;
options.recv_audio = true;
options.bundle_enabled = false;
- rtc::scoped_ptr<JsepSessionDescription> offer(CreateRemoteOffer(options));
+ std::unique_ptr<JsepSessionDescription> offer(CreateRemoteOffer(options));
SetIceUfragPwd(offer.get(), cricket::MEDIA_TYPE_AUDIO, "aaaa",
"aaaaaaaaaaaaaaaaaaaaaa");
@@ -3836,18 +3867,18 @@ TEST_F(WebRtcSessionTest, TestCreateAnswerWithNewAndOldUfragAndPassword) {
SetRemoteDescriptionWithoutError(offer.release());
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
SetLocalDescriptionWithoutError(answer.release());
// Receive an offer with new ufrag and password, but only for the video media
// section.
- rtc::scoped_ptr<JsepSessionDescription> updated_offer(
+ std::unique_ptr<JsepSessionDescription> updated_offer(
CreateRemoteOffer(options, session_->remote_description()));
SetIceUfragPwd(updated_offer.get(), cricket::MEDIA_TYPE_VIDEO, "cccc",
"cccccccccccccccccccccc");
SetRemoteDescriptionWithoutError(updated_offer.release());
- rtc::scoped_ptr<SessionDescriptionInterface> updated_answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> updated_answer(CreateAnswer());
EXPECT_TRUE(IceUfragPwdEqual(updated_answer->description(),
session_->local_description()->description(),
@@ -3940,7 +3971,7 @@ TEST_P(WebRtcSessionTest, TestCreateOfferWithSctpEnabledWithoutStreams) {
InitWithDtls(GetParam());
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
EXPECT_TRUE(offer->description()->GetContentByName("data") == NULL);
EXPECT_TRUE(offer->description()->GetTransportInfoByName("data") == NULL);
}
@@ -3958,7 +3989,7 @@ TEST_P(WebRtcSessionTest, TestCreateAnswerWithSctpInOfferAndNoStreams) {
SetRemoteDescriptionWithoutError(offer);
// Verifies the answer contains SCTP.
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
EXPECT_TRUE(answer != NULL);
EXPECT_TRUE(answer->description()->GetContentByName("data") != NULL);
EXPECT_TRUE(answer->description()->GetTransportInfoByName("data") != NULL);
@@ -4090,7 +4121,7 @@ TEST_P(WebRtcSessionTest, TestCreateOfferBeforeIdentityRequestReturnSuccess) {
EXPECT_TRUE(session_->waiting_for_certificate_for_testing());
SendAudioVideoStream1();
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
EXPECT_TRUE(offer != NULL);
VerifyNoCryptoParams(offer->description(), true);
@@ -4107,12 +4138,12 @@ TEST_P(WebRtcSessionTest, TestCreateAnswerBeforeIdentityRequestReturnSuccess) {
cricket::MediaSessionOptions options;
options.recv_video = true;
- scoped_ptr<JsepSessionDescription> offer(
- CreateRemoteOffer(options, cricket::SEC_DISABLED));
+ std::unique_ptr<JsepSessionDescription> offer(
+ CreateRemoteOffer(options, cricket::SEC_DISABLED));
ASSERT_TRUE(offer.get() != NULL);
SetRemoteDescriptionWithoutError(offer.release());
- rtc::scoped_ptr<SessionDescriptionInterface> answer(CreateAnswer());
+ std::unique_ptr<SessionDescriptionInterface> answer(CreateAnswer());
EXPECT_TRUE(answer != NULL);
VerifyNoCryptoParams(answer->description(), true);
VerifyFingerprintStatus(answer->description(), true);
@@ -4127,7 +4158,7 @@ TEST_P(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnSuccess) {
EXPECT_TRUE_WAIT(!session_->waiting_for_certificate_for_testing(), 1000);
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
EXPECT_TRUE(offer != NULL);
}
@@ -4139,7 +4170,7 @@ TEST_F(WebRtcSessionTest, TestCreateOfferAfterIdentityRequestReturnFailure) {
EXPECT_TRUE_WAIT(!session_->waiting_for_certificate_for_testing(), 1000);
- rtc::scoped_ptr<SessionDescriptionInterface> offer(CreateOffer());
+ std::unique_ptr<SessionDescriptionInterface> offer(CreateOffer());
EXPECT_TRUE(offer == NULL);
}
@@ -4275,15 +4306,9 @@ TEST_P(WebRtcSessionTest, TestRenegotiateNewMediaWithCandidatesSeparated) {
SetLocalDescriptionWithoutError(answer);
}
-// Flaky on Win and Mac only. See webrtc:4943
-#if defined(WEBRTC_WIN) || defined(WEBRTC_MAC)
-#define MAYBE_TestRtxRemovedByCreateAnswer DISABLED_TestRtxRemovedByCreateAnswer
-#else
-#define MAYBE_TestRtxRemovedByCreateAnswer TestRtxRemovedByCreateAnswer
-#endif
// Tests that RTX codec is removed from the answer when it isn't supported
// by local side.
-TEST_F(WebRtcSessionTest, MAYBE_TestRtxRemovedByCreateAnswer) {
+TEST_F(WebRtcSessionTest, TestRtxRemovedByCreateAnswer) {
Init();
SendAudioVideoStream1();
std::string offer_sdp(kSdpWithRtx);
@@ -4293,14 +4318,12 @@ TEST_F(WebRtcSessionTest, MAYBE_TestRtxRemovedByCreateAnswer) {
EXPECT_TRUE(offer->ToString(&offer_sdp));
// Offer SDP contains the RTX codec.
- EXPECT_TRUE(offer_sdp.find("rtx") != std::string::npos);
+ EXPECT_TRUE(ContainsVideoCodecWithName(offer, "rtx"));
SetRemoteDescriptionWithoutError(offer);
SessionDescriptionInterface* answer = CreateAnswer();
- std::string answer_sdp;
- answer->ToString(&answer_sdp);
- // Answer SDP removes the unsupported RTX codec.
- EXPECT_TRUE(answer_sdp.find("rtx") == std::string::npos);
+ // Answer SDP does not contain the RTX codec.
+ EXPECT_FALSE(ContainsVideoCodecWithName(answer, "rtx"));
SetLocalDescriptionWithoutError(answer);
}
diff --git a/chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.cc b/chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.cc
index 78840e2fac3..e88262fbdc5 100644
--- a/chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.cc
+++ b/chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.cc
@@ -64,7 +64,7 @@ struct CreateSessionDescriptionMsg : public rtc::MessageData {
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserver> observer;
std::string error;
- rtc::scoped_ptr<webrtc::SessionDescriptionInterface> description;
+ std::unique_ptr<webrtc::SessionDescriptionInterface> description;
};
} // namespace
@@ -82,13 +82,13 @@ void WebRtcIdentityRequestObserver::OnSuccess(
rtc::kPemTypeRsaPrivateKey,
reinterpret_cast<const unsigned char*>(der_private_key.data()),
der_private_key.length());
- rtc::scoped_ptr<rtc::SSLIdentity> identity(
+ std::unique_ptr<rtc::SSLIdentity> identity(
rtc::SSLIdentity::FromPEMStrings(pem_key, pem_cert));
SignalCertificateReady(rtc::RTCCertificate::Create(std::move(identity)));
}
void WebRtcIdentityRequestObserver::OnSuccess(
- rtc::scoped_ptr<rtc::SSLIdentity> identity) {
+ std::unique_ptr<rtc::SSLIdentity> identity) {
SignalCertificateReady(rtc::RTCCertificate::Create(std::move(identity)));
}
@@ -127,7 +127,7 @@ void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription(
WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
rtc::Thread* signaling_thread,
cricket::ChannelManager* channel_manager,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
const rtc::scoped_refptr<WebRtcIdentityRequestObserver>&
identity_request_observer,
WebRtcSession* session,
@@ -168,7 +168,7 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
rtc::Thread* signaling_thread,
cricket::ChannelManager* channel_manager,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
WebRtcSession* session,
const std::string& session_id)
: WebRtcSessionDescriptionFactory(
diff --git a/chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.h b/chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.h
index 71d083b20d9..17e2ddd3b06 100644
--- a/chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.h
+++ b/chromium/third_party/webrtc/api/webrtcsessiondescriptionfactory.h
@@ -11,8 +11,11 @@
#ifndef WEBRTC_API_WEBRTCSESSIONDESCRIPTIONFACTORY_H_
#define WEBRTC_API_WEBRTCSESSIONDESCRIPTIONFACTORY_H_
+#include <memory>
+
#include "webrtc/api/dtlsidentitystore.h"
#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/messagehandler.h"
#include "webrtc/base/rtccertificate.h"
#include "webrtc/p2p/base/transportdescriptionfactory.h"
@@ -37,7 +40,7 @@ class WebRtcIdentityRequestObserver : public DtlsIdentityRequestObserver,
void OnFailure(int error) override;
void OnSuccess(const std::string& der_cert,
const std::string& der_private_key) override;
- void OnSuccess(rtc::scoped_ptr<rtc::SSLIdentity> identity) override;
+ void OnSuccess(std::unique_ptr<rtc::SSLIdentity> identity) override;
sigslot::signal1<int> SignalRequestFailed;
sigslot::signal1<const rtc::scoped_refptr<rtc::RTCCertificate>&>
@@ -82,7 +85,7 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler,
WebRtcSessionDescriptionFactory(
rtc::Thread* signaling_thread,
cricket::ChannelManager* channel_manager,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
WebRtcSession* session,
const std::string& session_id);
@@ -130,7 +133,7 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler,
WebRtcSessionDescriptionFactory(
rtc::Thread* signaling_thread,
cricket::ChannelManager* channel_manager,
- rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
+ std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store,
const rtc::scoped_refptr<WebRtcIdentityRequestObserver>&
identity_request_observer,
WebRtcSession* session,
@@ -161,7 +164,7 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler,
cricket::TransportDescriptionFactory transport_desc_factory_;
cricket::MediaSessionDescriptionFactory session_desc_factory_;
uint64_t session_version_;
- const rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store_;
+ const std::unique_ptr<DtlsIdentityStoreInterface> dtls_identity_store_;
const rtc::scoped_refptr<WebRtcIdentityRequestObserver>
identity_request_observer_;
// TODO(jiayl): remove the dependency on session once bug 2264 is fixed.
diff --git a/chromium/third_party/webrtc/audio/audio_receive_stream.cc b/chromium/third_party/webrtc/audio/audio_receive_stream.cc
index 9c253894719..44b72d8a0ba 100644
--- a/chromium/third_party/webrtc/audio/audio_receive_stream.cc
+++ b/chromium/third_party/webrtc/audio/audio_receive_stream.cc
@@ -18,9 +18,9 @@
#include "webrtc/audio/conversion.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/congestion_controller/include/congestion_controller.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/voice_engine/channel_proxy.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
@@ -66,8 +66,6 @@ std::string AudioReceiveStream::Config::Rtp::ToString() const {
std::string AudioReceiveStream::Config::ToString() const {
std::stringstream ss;
ss << "{rtp: " << rtp.ToString();
- ss << ", receive_transport: "
- << (receive_transport ? "(Transport)" : "nullptr");
ss << ", rtcp_send_transport: "
<< (rtcp_send_transport ? "(Transport)" : "nullptr");
ss << ", voe_channel_id: " << voe_channel_id;
@@ -95,6 +93,9 @@ AudioReceiveStream::AudioReceiveStream(
VoiceEngineImpl* voe_impl = static_cast<VoiceEngineImpl*>(voice_engine());
channel_proxy_ = voe_impl->GetChannelProxy(config_.voe_channel_id);
channel_proxy_->SetLocalSSRC(config.rtp.local_ssrc);
+
+ channel_proxy_->RegisterExternalTransport(config.rtcp_send_transport);
+
for (const auto& extension : config.rtp.extensions) {
if (extension.name == RtpExtension::kAudioLevel) {
channel_proxy_->SetReceiveAudioLevelIndicationStatus(true, extension.id);
@@ -127,6 +128,7 @@ AudioReceiveStream::AudioReceiveStream(
AudioReceiveStream::~AudioReceiveStream() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "~AudioReceiveStream: " << config_.ToString();
+ channel_proxy_->DeRegisterExternalTransport();
channel_proxy_->ResetCongestionControlObjects();
if (remote_bitrate_estimator_) {
remote_bitrate_estimator_->RemoveStream(config_.rtp.remote_ssrc);
@@ -141,45 +143,6 @@ void AudioReceiveStream::Stop() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
}
-void AudioReceiveStream::SignalNetworkState(NetworkState state) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-}
-
-bool AudioReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
- // TODO(solenberg): Tests call this function on a network thread, libjingle
- // calls on the worker thread. We should move towards always using a network
- // thread. Then this check can be enabled.
- // RTC_DCHECK(!thread_checker_.CalledOnValidThread());
- return false;
-}
-
-bool AudioReceiveStream::DeliverRtp(const uint8_t* packet,
- size_t length,
- const PacketTime& packet_time) {
- // TODO(solenberg): Tests call this function on a network thread, libjingle
- // calls on the worker thread. We should move towards always using a network
- // thread. Then this check can be enabled.
- // RTC_DCHECK(!thread_checker_.CalledOnValidThread());
- RTPHeader header;
- if (!rtp_header_parser_->Parse(packet, length, &header)) {
- return false;
- }
-
- // Only forward if the parsed header has one of the headers necessary for
- // bandwidth estimation. RTP timestamps has different rates for audio and
- // video and shouldn't be mixed.
- if (remote_bitrate_estimator_ &&
- header.extension.hasTransportSequenceNumber) {
- int64_t arrival_time_ms = TickTime::MillisecondTimestamp();
- if (packet_time.timestamp >= 0)
- arrival_time_ms = (packet_time.timestamp + 500) / 1000;
- size_t payload_size = length - header.headerLength;
- remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_size,
- header, false);
- }
- return true;
-}
-
webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
webrtc::AudioReceiveStream::Stats stats;
@@ -238,6 +201,46 @@ const webrtc::AudioReceiveStream::Config& AudioReceiveStream::config() const {
return config_;
}
+void AudioReceiveStream::SignalNetworkState(NetworkState state) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+}
+
+bool AudioReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+ // TODO(solenberg): Tests call this function on a network thread, libjingle
+ // calls on the worker thread. We should move towards always using a network
+ // thread. Then this check can be enabled.
+ // RTC_DCHECK(!thread_checker_.CalledOnValidThread());
+ return channel_proxy_->ReceivedRTCPPacket(packet, length);
+}
+
+bool AudioReceiveStream::DeliverRtp(const uint8_t* packet,
+ size_t length,
+ const PacketTime& packet_time) {
+ // TODO(solenberg): Tests call this function on a network thread, libjingle
+ // calls on the worker thread. We should move towards always using a network
+ // thread. Then this check can be enabled.
+ // RTC_DCHECK(!thread_checker_.CalledOnValidThread());
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(packet, length, &header)) {
+ return false;
+ }
+
+ // Only forward if the parsed header has one of the headers necessary for
+ // bandwidth estimation. RTP timestamps has different rates for audio and
+ // video and shouldn't be mixed.
+ if (remote_bitrate_estimator_ &&
+ header.extension.hasTransportSequenceNumber) {
+ int64_t arrival_time_ms = rtc::TimeMillis();
+ if (packet_time.timestamp >= 0)
+ arrival_time_ms = (packet_time.timestamp + 500) / 1000;
+ size_t payload_size = length - header.headerLength;
+ remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_size,
+ header);
+ }
+
+ return channel_proxy_->ReceivedRTPPacket(packet, length, packet_time);
+}
+
VoiceEngine* AudioReceiveStream::voice_engine() const {
internal::AudioState* audio_state =
static_cast<internal::AudioState*>(audio_state_.get());
diff --git a/chromium/third_party/webrtc/audio/audio_receive_stream.h b/chromium/third_party/webrtc/audio/audio_receive_stream.h
index c9754afbf51..d99956c2ddd 100644
--- a/chromium/third_party/webrtc/audio/audio_receive_stream.h
+++ b/chromium/third_party/webrtc/audio/audio_receive_stream.h
@@ -15,6 +15,7 @@
#include "webrtc/audio_receive_stream.h"
#include "webrtc/audio_state.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
@@ -35,20 +36,17 @@ class AudioReceiveStream final : public webrtc::AudioReceiveStream {
const rtc::scoped_refptr<webrtc::AudioState>& audio_state);
~AudioReceiveStream() override;
- // webrtc::ReceiveStream implementation.
+ // webrtc::AudioReceiveStream implementation.
void Start() override;
void Stop() override;
- void SignalNetworkState(NetworkState state) override;
- bool DeliverRtcp(const uint8_t* packet, size_t length) override;
- bool DeliverRtp(const uint8_t* packet,
- size_t length,
- const PacketTime& packet_time) override;
-
- // webrtc::AudioReceiveStream implementation.
webrtc::AudioReceiveStream::Stats GetStats() const override;
-
void SetSink(std::unique_ptr<AudioSinkInterface> sink) override;
+ void SignalNetworkState(NetworkState state);
+ bool DeliverRtcp(const uint8_t* packet, size_t length);
+ bool DeliverRtp(const uint8_t* packet,
+ size_t length,
+ const PacketTime& packet_time);
const webrtc::AudioReceiveStream::Config& config() const;
private:
diff --git a/chromium/third_party/webrtc/audio/audio_receive_stream_unittest.cc b/chromium/third_party/webrtc/audio/audio_receive_stream_unittest.cc
index 8703d6ed324..f6fe85850be 100644
--- a/chromium/third_party/webrtc/audio/audio_receive_stream_unittest.cc
+++ b/chromium/third_party/webrtc/audio/audio_receive_stream_unittest.cc
@@ -98,6 +98,10 @@ struct ConfigHelper {
.WillOnce(Return(&packet_router_));
EXPECT_CALL(*channel_proxy_, ResetCongestionControlObjects())
.Times(1);
+ EXPECT_CALL(*channel_proxy_, RegisterExternalTransport(nullptr))
+ .Times(1);
+ EXPECT_CALL(*channel_proxy_, DeRegisterExternalTransport())
+ .Times(1);
return channel_proxy_;
}));
stream_config_.voe_channel_id = kChannelId;
@@ -120,6 +124,7 @@ struct ConfigHelper {
AudioReceiveStream::Config& config() { return stream_config_; }
rtc::scoped_refptr<AudioState> audio_state() { return audio_state_; }
MockVoiceEngine& voice_engine() { return voice_engine_; }
+ MockVoEChannelProxy* channel_proxy() { return channel_proxy_; }
void SetupMockForBweFeedback(bool send_side_bwe) {
EXPECT_CALL(congestion_controller_,
@@ -152,7 +157,7 @@ struct ConfigHelper {
private:
SimulatedClock simulated_clock_;
PacketRouter packet_router_;
- testing::NiceMock<MockBitrateObserver> bitrate_observer_;
+ testing::NiceMock<MockCongestionObserver> bitrate_observer_;
testing::NiceMock<MockRemoteBitrateObserver> remote_bitrate_observer_;
MockCongestionController congestion_controller_;
MockRemoteBitrateEstimator remote_bitrate_estimator_;
@@ -181,7 +186,7 @@ void BuildOneByteExtension(std::vector<uint8_t>::iterator it,
shifted_value);
}
-std::vector<uint8_t> CreateRtpHeaderWithOneByteExtension(
+const std::vector<uint8_t> CreateRtpHeaderWithOneByteExtension(
int extension_id,
uint32_t extension_value,
size_t value_length) {
@@ -200,6 +205,18 @@ std::vector<uint8_t> CreateRtpHeaderWithOneByteExtension(
extension_value, value_length);
return header;
}
+
+const std::vector<uint8_t> CreateRtcpSenderReport() {
+ std::vector<uint8_t> packet;
+ const size_t kRtcpSrLength = 28; // In bytes.
+ packet.resize(kRtcpSrLength);
+ packet[0] = 0x80; // Version 2.
+ packet[1] = 0xc8; // PT = 200, SR.
+ // Length in number of 32-bit words - 1.
+ ByteWriter<uint16_t>::WriteBigEndian(&packet[2], 6);
+ ByteWriter<uint32_t>::WriteBigEndian(&packet[4], kLocalSsrc);
+ return packet;
+}
} // namespace
TEST(AudioReceiveStreamTest, ConfigToString) {
@@ -213,7 +230,7 @@ TEST(AudioReceiveStreamTest, ConfigToString) {
"{rtp: {remote_ssrc: 1234, local_ssrc: 5678, extensions: [{name: "
"http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time, id: 2}], "
"transport_cc: off}, "
- "receive_transport: nullptr, rtcp_send_transport: nullptr, "
+ "rtcp_send_transport: nullptr, "
"voe_channel_id: 2}",
config.ToString());
}
@@ -235,7 +252,7 @@ MATCHER_P(VerifyHeaderExtension, expected_extension, "") {
expected_extension.transportSequenceNumber;
}
-TEST(AudioReceiveStreamTest, AudioPacketUpdatesBweFeedback) {
+TEST(AudioReceiveStreamTest, ReceiveRtpPacket) {
ConfigHelper helper;
helper.config().rtp.transport_cc = true;
helper.SetupMockForBweFeedback(true);
@@ -252,12 +269,32 @@ TEST(AudioReceiveStreamTest, AudioPacketUpdatesBweFeedback) {
EXPECT_CALL(*helper.remote_bitrate_estimator(),
IncomingPacket(packet_time.timestamp / 1000,
rtp_packet.size() - kExpectedHeaderLength,
- VerifyHeaderExtension(expected_extension), false))
+ VerifyHeaderExtension(expected_extension)))
.Times(1);
+ EXPECT_CALL(*helper.channel_proxy(),
+ ReceivedRTPPacket(&rtp_packet[0],
+ rtp_packet.size(),
+ _))
+ .WillOnce(Return(true));
EXPECT_TRUE(
recv_stream.DeliverRtp(&rtp_packet[0], rtp_packet.size(), packet_time));
}
+TEST(AudioReceiveStreamTest, ReceiveRtcpPacket) {
+ ConfigHelper helper;
+ helper.config().rtp.transport_cc = true;
+ helper.SetupMockForBweFeedback(true);
+ internal::AudioReceiveStream recv_stream(
+ helper.congestion_controller(), helper.config(), helper.audio_state());
+
+ std::vector<uint8_t> rtcp_packet = CreateRtcpSenderReport();
+ EXPECT_CALL(*helper.channel_proxy(),
+ ReceivedRTCPPacket(&rtcp_packet[0], rtcp_packet.size()))
+ .WillOnce(Return(true));
+ EXPECT_TRUE(recv_stream.DeliverRtcp(&rtcp_packet[0], rtcp_packet.size()));
+}
+
+
TEST(AudioReceiveStreamTest, GetStats) {
ConfigHelper helper;
internal::AudioReceiveStream recv_stream(
diff --git a/chromium/third_party/webrtc/audio/audio_send_stream.cc b/chromium/third_party/webrtc/audio/audio_send_stream.cc
index 24afcbcf58e..c0a709e3c62 100644
--- a/chromium/third_party/webrtc/audio/audio_send_stream.cc
+++ b/chromium/third_party/webrtc/audio/audio_send_stream.cc
@@ -76,6 +76,8 @@ AudioSendStream::AudioSendStream(
channel_proxy_->SetLocalSSRC(config.rtp.ssrc);
channel_proxy_->SetRTCP_CNAME(config.rtp.c_name);
+ channel_proxy_->RegisterExternalTransport(config.send_transport);
+
for (const auto& extension : config.rtp.extensions) {
if (extension.name == RtpExtension::kAbsSendTime) {
channel_proxy_->SetSendAbsoluteSenderTimeStatus(true, extension.id);
@@ -92,6 +94,7 @@ AudioSendStream::AudioSendStream(
AudioSendStream::~AudioSendStream() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "~AudioSendStream: " << config_.ToString();
+ channel_proxy_->DeRegisterExternalTransport();
channel_proxy_->ResetCongestionControlObjects();
}
@@ -113,18 +116,6 @@ void AudioSendStream::Stop() {
}
}
-void AudioSendStream::SignalNetworkState(NetworkState state) {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-}
-
-bool AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
- // TODO(solenberg): Tests call this function on a network thread, libjingle
- // calls on the worker thread. We should move towards always using a network
- // thread. Then this check can be enabled.
- // RTC_DCHECK(!thread_checker_.CalledOnValidThread());
- return false;
-}
-
bool AudioSendStream::SendTelephoneEvent(int payload_type, int event,
int duration_ms) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
@@ -215,6 +206,18 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const {
return stats;
}
+void AudioSendStream::SignalNetworkState(NetworkState state) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+}
+
+bool AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+ // TODO(solenberg): Tests call this function on a network thread, libjingle
+ // calls on the worker thread. We should move towards always using a network
+ // thread. Then this check can be enabled.
+ // RTC_DCHECK(!thread_checker_.CalledOnValidThread());
+ return channel_proxy_->ReceivedRTCPPacket(packet, length);
+}
+
const webrtc::AudioSendStream::Config& AudioSendStream::config() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
return config_;
diff --git a/chromium/third_party/webrtc/audio/audio_send_stream.h b/chromium/third_party/webrtc/audio/audio_send_stream.h
index d463b3da30f..61dd7f24b45 100644
--- a/chromium/third_party/webrtc/audio/audio_send_stream.h
+++ b/chromium/third_party/webrtc/audio/audio_send_stream.h
@@ -15,6 +15,7 @@
#include "webrtc/audio_send_stream.h"
#include "webrtc/audio_state.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/thread_checker.h"
namespace webrtc {
@@ -33,17 +34,15 @@ class AudioSendStream final : public webrtc::AudioSendStream {
CongestionController* congestion_controller);
~AudioSendStream() override;
- // webrtc::SendStream implementation.
+ // webrtc::AudioSendStream implementation.
void Start() override;
void Stop() override;
- void SignalNetworkState(NetworkState state) override;
- bool DeliverRtcp(const uint8_t* packet, size_t length) override;
-
- // webrtc::AudioSendStream implementation.
bool SendTelephoneEvent(int payload_type, int event,
int duration_ms) override;
webrtc::AudioSendStream::Stats GetStats() const override;
+ void SignalNetworkState(NetworkState state);
+ bool DeliverRtcp(const uint8_t* packet, size_t length);
const webrtc::AudioSendStream::Config& config() const;
private:
diff --git a/chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc b/chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc
index c04a3de77c2..a94034c6496 100644
--- a/chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc
+++ b/chromium/third_party/webrtc/audio/audio_send_stream_unittest.cc
@@ -16,7 +16,7 @@
#include "webrtc/audio/audio_send_stream.h"
#include "webrtc/audio/audio_state.h"
#include "webrtc/audio/conversion.h"
-#include "webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h"
+#include "webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h"
#include "webrtc/modules/congestion_controller/include/congestion_controller.h"
#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h"
@@ -89,6 +89,10 @@ struct ConfigHelper {
.Times(1);
EXPECT_CALL(*channel_proxy_, ResetCongestionControlObjects())
.Times(1);
+ EXPECT_CALL(*channel_proxy_, RegisterExternalTransport(nullptr))
+ .Times(1);
+ EXPECT_CALL(*channel_proxy_, DeRegisterExternalTransport())
+ .Times(1);
return channel_proxy_;
}));
stream_config_.voe_channel_id = kChannelId;
@@ -157,7 +161,7 @@ struct ConfigHelper {
rtc::scoped_refptr<AudioState> audio_state_;
AudioSendStream::Config stream_config_;
testing::StrictMock<MockVoEChannelProxy>* channel_proxy_ = nullptr;
- testing::NiceMock<MockBitrateObserver> bitrate_observer_;
+ testing::NiceMock<MockCongestionObserver> bitrate_observer_;
testing::NiceMock<MockRemoteBitrateObserver> remote_bitrate_observer_;
CongestionController congestion_controller_;
};
diff --git a/chromium/third_party/webrtc/audio_receive_stream.h b/chromium/third_party/webrtc/audio_receive_stream.h
index 5254c41780a..6d72b4d3185 100644
--- a/chromium/third_party/webrtc/audio_receive_stream.h
+++ b/chromium/third_party/webrtc/audio_receive_stream.h
@@ -16,8 +16,8 @@
#include <string>
#include <vector>
+#include "webrtc/common_types.h"
#include "webrtc/config.h"
-#include "webrtc/stream.h"
#include "webrtc/transport.h"
#include "webrtc/typedefs.h"
@@ -31,7 +31,7 @@ class AudioSinkInterface;
// of WebRtc/Libjingle. Please use the VoiceEngine API instead.
// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=4690
-class AudioReceiveStream : public ReceiveStream {
+class AudioReceiveStream {
public:
struct Stats {
uint32_t remote_ssrc = 0;
@@ -83,7 +83,6 @@ class AudioReceiveStream : public ReceiveStream {
std::vector<RtpExtension> extensions;
} rtp;
- Transport* receive_transport = nullptr;
Transport* rtcp_send_transport = nullptr;
// Underlying VoiceEngine handle, used to map AudioReceiveStream to lower-
@@ -104,6 +103,13 @@ class AudioReceiveStream : public ReceiveStream {
std::map<uint8_t, AudioDecoder*> decoder_map;
};
+ // Starts stream activity.
+ // When a stream is active, it can receive, process and deliver packets.
+ virtual void Start() = 0;
+ // Stops stream activity.
+ // When a stream is stopped, it can't receive, process or deliver packets.
+ virtual void Stop() = 0;
+
virtual Stats GetStats() const = 0;
// Sets an audio sink that receives unmixed audio from the receive stream.
@@ -115,6 +121,9 @@ class AudioReceiveStream : public ReceiveStream {
// is being pulled+rendered and/or if audio is being pulled for the purposes
// of feeding to the AEC.
virtual void SetSink(std::unique_ptr<AudioSinkInterface> sink) = 0;
+
+ protected:
+ virtual ~AudioReceiveStream() {}
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/audio_send_stream.h b/chromium/third_party/webrtc/audio_send_stream.h
index 24c3d77ab27..d8e98bb0ec9 100644
--- a/chromium/third_party/webrtc/audio_send_stream.h
+++ b/chromium/third_party/webrtc/audio_send_stream.h
@@ -11,13 +11,12 @@
#ifndef WEBRTC_AUDIO_SEND_STREAM_H_
#define WEBRTC_AUDIO_SEND_STREAM_H_
+#include <memory>
#include <string>
#include <vector>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/config.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
-#include "webrtc/stream.h"
#include "webrtc/transport.h"
#include "webrtc/typedefs.h"
@@ -28,7 +27,7 @@ namespace webrtc {
// of WebRtc/Libjingle. Please use the VoiceEngine API instead.
// See: https://bugs.chromium.org/p/webrtc/issues/detail?id=4690
-class AudioSendStream : public SendStream {
+class AudioSendStream {
public:
struct Stats {
// TODO(solenberg): Harmonize naming and defaults with receive stream stats.
@@ -84,15 +83,25 @@ class AudioSendStream : public SendStream {
// Ownership of the encoder object is transferred to Call when the config is
// passed to Call::CreateAudioSendStream().
// TODO(solenberg): Implement, once we configure codecs through the new API.
- // rtc::scoped_ptr<AudioEncoder> encoder;
+ // std::unique_ptr<AudioEncoder> encoder;
int cng_payload_type = -1; // pt, or -1 to disable Comfort Noise Generator.
int red_payload_type = -1; // pt, or -1 to disable REDundant coding.
};
+ // Starts stream activity.
+ // When a stream is active, it can receive, process and deliver packets.
+ virtual void Start() = 0;
+ // Stops stream activity.
+ // When a stream is stopped, it can't receive, process or deliver packets.
+ virtual void Stop() = 0;
+
// TODO(solenberg): Make payload_type a config property instead.
virtual bool SendTelephoneEvent(int payload_type, int event,
int duration_ms) = 0;
virtual Stats GetStats() const = 0;
+
+ protected:
+ virtual ~AudioSendStream() {}
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/base/BUILD.gn b/chromium/third_party/webrtc/base/BUILD.gn
index bc6cb38023a..1e868b504e5 100644
--- a/chromium/third_party/webrtc/base/BUILD.gn
+++ b/chromium/third_party/webrtc/base/BUILD.gn
@@ -10,11 +10,12 @@ import("//build/config/crypto.gni")
import("//build/config/ui.gni")
import("../build/webrtc.gni")
-# Enable OpenSSL (BoringSSL) for iOS. This is covered in webrtc/supplement.gypi
-# for the GYP build.
import("//build_overrides/webrtc.gni")
-if (is_ios && !build_with_chromium) {
- use_openssl = true
+
+config("rtc_base_approved_all_dependent_config") {
+ if (is_mac && !build_with_chromium) {
+ libs = [ "Foundation.framework" ] # needed for logging_mac.mm
+ }
}
config("rtc_base_config") {
@@ -27,12 +28,6 @@ config("rtc_base_config") {
"FEATURE_ENABLE_SSL",
"LOGGING=1",
]
-
- if (is_posix) {
- # TODO(henrike): issue 3307, make rtc_base build without disabling
- # these flags.
- cflags_cc = [ "-Wno-non-virtual-dtor" ]
- }
}
config("rtc_base_chromium_config") {
@@ -46,45 +41,38 @@ config("openssl_config") {
]
}
-config("ios_config") {
- libs = [
- "AVFoundation.framework",
- "CFNetwork.framework",
-
- #"Foundation.framework", # Already included in //build/config:default_libs.
- "Security.framework",
- "SystemConfiguration.framework",
-
- #"UIKit.framework", # Already included in //build/config:default_libs.
- ]
-}
-
-config("mac_config") {
- libs = [
- "Cocoa.framework",
+config("rtc_base_all_dependent_config") {
+ if (is_ios) {
+ libs = [
+ "CFNetwork.framework",
- #"Foundation.framework", # Already included in //build/config:default_libs.
- #"IOKit.framework", # Already included in //build/config:default_libs.
- #"Security.framework", # Already included in //build/config:default_libs.
- "SystemConfiguration.framework",
- ]
-}
-
-config("mac_x86_config") {
- libs = [
- #"Carbon.framework", # Already included in //build/config:default_libs.
- ]
+ #"Foundation.framework", # Already in //build/config:default_libs.
+ "Security.framework",
+ "SystemConfiguration.framework",
+ "UIKit.framework", # Already in //build/config:default_libs.
+ ]
+ }
+ if (is_mac) {
+ libs = [
+ "Cocoa.framework",
+ "Foundation.framework",
+ "IOKit.framework",
+ "Security.framework",
+ "SystemConfiguration.framework",
+ ]
+ if (current_cpu == "x86") {
+ libs = [ "Carbon.framework" ] # Already in //build/config:default_libs.
+ }
+ }
}
if (is_linux && !build_with_chromium) {
# Provides the same functionality as the //crypto:platform target, which
# WebRTC cannot use as we don't sync src/crypto from Chromium.
group("linux_system_ssl") {
- if (use_openssl) {
- deps = [
- "//third_party/boringssl",
- ]
- }
+ deps = [
+ "//third_party/boringssl",
+ ]
}
}
@@ -98,16 +86,18 @@ if (rtc_build_ssl == 0) {
# The subset of rtc_base approved for use outside of libjingle.
static_library("rtc_base_approved") {
+ defines = []
deps = []
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
+ all_dependent_configs = [ ":rtc_base_approved_all_dependent_config" ]
sources = [
"array_view.h",
"atomicops.h",
+ "bind.h",
"bitbuffer.cc",
"bitbuffer.h",
- "buffer.cc",
"buffer.h",
"bufferqueue.cc",
"bufferqueue.h",
@@ -133,6 +123,7 @@ static_library("rtc_base_approved") {
"md5digest.cc",
"md5digest.h",
"mod_ops.h",
+ "onetimeevent.h",
"optional.h",
"platform_file.cc",
"platform_file.h",
@@ -148,7 +139,6 @@ static_library("rtc_base_approved") {
"refcount.h",
"safe_conversions.h",
"safe_conversions_impl.h",
- "scoped_ptr.h",
"scoped_ref_ptr.h",
"stringencode.cc",
"stringencode.h",
@@ -157,6 +147,8 @@ static_library("rtc_base_approved") {
"swap_queue.h",
"systeminfo.cc",
"systeminfo.h",
+ "task_queue.h",
+ "task_queue_posix.h",
"template_util.h",
"thread_annotations.h",
"thread_checker.h",
@@ -179,8 +171,30 @@ static_library("rtc_base_approved") {
sources += [
"logging.cc",
"logging.h",
+ "logging_mac.mm",
]
}
+
+ if (rtc_build_libevent) {
+ deps += [ "//base/third_party/libevent" ]
+ }
+ if (rtc_enable_libevent) {
+ sources += [
+ "task_queue_libevent.cc",
+ "task_queue_posix.cc",
+ ]
+ } else {
+ # If not libevent, fall back to the other task queues.
+ if (is_mac || is_ios) {
+ sources += [
+ "task_queue_gcd.cc",
+ "task_queue_posix.cc",
+ ]
+ }
+ if (is_win) {
+ sources += [ "task_queue_win.cc" ]
+ }
+ }
}
static_library("rtc_base") {
@@ -206,6 +220,8 @@ static_library("rtc_base") {
":rtc_base_config",
]
+ all_dependent_configs = [ ":rtc_base_all_dependent_config" ]
+
defines = [ "LOGGING=1" ]
sources = [
@@ -274,7 +290,17 @@ static_library("rtc_base") {
"network.h",
"networkmonitor.cc",
"networkmonitor.h",
+ "nullsocketserver.cc",
"nullsocketserver.h",
+ "openssl.h",
+ "openssladapter.cc",
+ "openssladapter.h",
+ "openssldigest.cc",
+ "openssldigest.h",
+ "opensslidentity.cc",
+ "opensslidentity.h",
+ "opensslstreamadapter.cc",
+ "opensslstreamadapter.h",
"pathutils.cc",
"pathutils.h",
"physicalsocketserver.cc",
@@ -287,6 +313,8 @@ static_library("rtc_base") {
"ratelimiter.h",
"rtccertificate.cc",
"rtccertificate.h",
+ "rtccertificategenerator.cc",
+ "rtccertificategenerator.h",
"sha1.cc",
"sha1.h",
"sha1digest.cc",
@@ -354,7 +382,6 @@ static_library("rtc_base") {
sources += [
"bandwidthsmoother.cc",
"bandwidthsmoother.h",
- "bind.h",
"callback.h",
"fileutils_mock.h",
"httpserver.cc",
@@ -460,29 +487,12 @@ static_library("rtc_base") {
configs += [ "//build/config/compiler:no_chromium_code" ]
if (!is_win) {
cflags += [ "-Wno-uninitialized" ]
- cflags_cc += [ "-Wno-non-virtual-dtor" ]
}
- # TODO(kjellander): The use_openssl block should really go away in order for
- # the GN build to be similar to the GYP build. See http://crbug.com/601042 for
- # more details.
- if (use_openssl) {
- if (rtc_build_ssl) {
- deps += [ "//third_party/boringssl" ]
- } else {
- configs += [ "external_ssl_library" ]
- }
- sources += [
- "openssl.h",
- "openssladapter.cc",
- "openssladapter.h",
- "openssldigest.cc",
- "openssldigest.h",
- "opensslidentity.cc",
- "opensslidentity.h",
- "opensslstreamadapter.cc",
- "opensslstreamadapter.h",
- ]
+ if (rtc_build_ssl) {
+ deps += [ "//third_party/boringssl" ]
+ } else {
+ configs += [ "external_ssl_library" ]
}
if (is_android) {
@@ -498,9 +508,6 @@ static_library("rtc_base") {
}
if (is_ios || is_mac) {
- if (is_ios) {
- all_dependent_configs = [ ":ios_config" ]
- }
sources += [
"maccocoathreadhelper.h",
"maccocoathreadhelper.mm",
@@ -539,12 +546,6 @@ static_library("rtc_base") {
"macutils.cc",
"macutils.h",
]
-
- all_dependent_configs = [ ":mac_config" ]
-
- if (current_cpu == "x86") {
- all_dependent_configs += [ ":mac_x86_config" ]
- }
}
if (is_win) {
@@ -612,39 +613,6 @@ static_library("rtc_base") {
}
}
-if (is_ios) {
- source_set("rtc_base_objc") {
- deps = [
- ":rtc_base",
- ]
- cflags = [ "-fobjc-arc" ]
- configs += [ "..:common_config" ]
- public_configs = [ "..:common_inherited_config" ]
-
- sources = [
- "objc/NSString+StdString.h",
- "objc/NSString+StdString.mm",
- "objc/RTCCameraPreviewView.h",
- "objc/RTCCameraPreviewView.m",
- "objc/RTCDispatcher.h",
- "objc/RTCDispatcher.m",
- "objc/RTCFieldTrials.h",
- "objc/RTCFieldTrials.mm",
- "objc/RTCFileLogger.h",
- "objc/RTCFileLogger.mm",
- "objc/RTCLogging.h",
- "objc/RTCLogging.mm",
- "objc/RTCMacros.h",
- "objc/RTCSSLAdapter.h",
- "objc/RTCSSLAdapter.mm",
- "objc/RTCTracing.h",
- "objc/RTCTracing.mm",
- "objc/RTCUIApplication.h",
- "objc/RTCUIApplication.mm",
- ]
- }
-}
-
source_set("gtest_prod") {
sources = [
"gtest_prod_util.h",
diff --git a/chromium/third_party/webrtc/base/DEPS b/chromium/third_party/webrtc/base/DEPS
index add7f38be50..bb76adfe31c 100644
--- a/chromium/third_party/webrtc/base/DEPS
+++ b/chromium/third_party/webrtc/base/DEPS
@@ -1,4 +1,5 @@
include_rules = [
+ "+base/third_party/libevent",
"+json",
"+third_party/jsoncpp",
"+webrtc/system_wrappers",
diff --git a/chromium/third_party/webrtc/base/OWNERS b/chromium/third_party/webrtc/base/OWNERS
index 2f400904c6c..b5d02af6b8c 100644
--- a/chromium/third_party/webrtc/base/OWNERS
+++ b/chromium/third_party/webrtc/base/OWNERS
@@ -15,4 +15,6 @@ per-file *.gyp=*
per-file *.gypi=*
per-file BUILD.gn=kjellander@webrtc.org
+per-file rate_statistics*=sprang@webrtc.org
+per-file rate_statistics*=stefan@webrtc.org
diff --git a/chromium/third_party/webrtc/base/array_view.h b/chromium/third_party/webrtc/base/array_view.h
index a7ca66cc95d..868009631f8 100644
--- a/chromium/third_party/webrtc/base/array_view.h
+++ b/chromium/third_party/webrtc/base/array_view.h
@@ -56,6 +56,7 @@ namespace rtc {
// Contains17(arr); // C array
// Contains17(arr); // std::vector
// Contains17(rtc::ArrayView<int>(arr, size)); // pointer + size
+// Contains17(nullptr); // nullptr -> empty ArrayView
// ...
//
// One important point is that ArrayView<T> and ArrayView<const T> are
@@ -73,6 +74,7 @@ class ArrayView final {
public:
// Construct an empty ArrayView.
ArrayView() : ArrayView(static_cast<T*>(nullptr), 0) {}
+ ArrayView(std::nullptr_t) : ArrayView() {}
// Construct an ArrayView for a (pointer,size) pair.
template <typename U>
diff --git a/chromium/third_party/webrtc/base/asyncinvoker.h b/chromium/third_party/webrtc/base/asyncinvoker.h
index a35133706a0..76e5d922e60 100644
--- a/chromium/third_party/webrtc/base/asyncinvoker.h
+++ b/chromium/third_party/webrtc/base/asyncinvoker.h
@@ -13,6 +13,7 @@
#include "webrtc/base/asyncinvoker-inl.h"
#include "webrtc/base/bind.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/scopedptrcollection.h"
#include "webrtc/base/thread.h"
diff --git a/chromium/third_party/webrtc/base/asyncpacketsocket.h b/chromium/third_party/webrtc/base/asyncpacketsocket.h
index 949ec67c839..f18839ed373 100644
--- a/chromium/third_party/webrtc/base/asyncpacketsocket.h
+++ b/chromium/third_party/webrtc/base/asyncpacketsocket.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_BASE_ASYNCPACKETSOCKET_H_
#define WEBRTC_BASE_ASYNCPACKETSOCKET_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/dscp.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/socket.h"
diff --git a/chromium/third_party/webrtc/base/asynctcpsocket.cc b/chromium/third_party/webrtc/base/asynctcpsocket.cc
index 65ec0f287bb..9ba46d7abc9 100644
--- a/chromium/third_party/webrtc/base/asynctcpsocket.cc
+++ b/chromium/third_party/webrtc/base/asynctcpsocket.cc
@@ -13,6 +13,7 @@
#include <string.h>
#include <algorithm>
+#include <memory>
#include "webrtc/base/byteorder.h"
#include "webrtc/base/checks.h"
@@ -44,7 +45,7 @@ AsyncSocket* AsyncTCPSocketBase::ConnectSocket(
rtc::AsyncSocket* socket,
const rtc::SocketAddress& bind_address,
const rtc::SocketAddress& remote_address) {
- rtc::scoped_ptr<rtc::AsyncSocket> owned_socket(socket);
+ std::unique_ptr<rtc::AsyncSocket> owned_socket(socket);
if (socket->Bind(bind_address) < 0) {
LOG(LS_ERROR) << "Bind() failed with error " << socket->GetError();
return NULL;
@@ -295,7 +296,7 @@ int AsyncTCPSocket::Send(const void *pv, size_t cb,
return res;
}
- rtc::SentPacket sent_packet(options.packet_id, rtc::Time());
+ rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis());
SignalSentPacket(this, sent_packet);
// We claim to have sent the whole thing, even if we only sent partial
diff --git a/chromium/third_party/webrtc/base/asynctcpsocket.h b/chromium/third_party/webrtc/base/asynctcpsocket.h
index ea314931d65..cd5c104d88e 100644
--- a/chromium/third_party/webrtc/base/asynctcpsocket.h
+++ b/chromium/third_party/webrtc/base/asynctcpsocket.h
@@ -11,9 +11,11 @@
#ifndef WEBRTC_BASE_ASYNCTCPSOCKET_H_
#define WEBRTC_BASE_ASYNCTCPSOCKET_H_
+#include <memory>
+
#include "webrtc/base/asyncpacketsocket.h"
#include "webrtc/base/buffer.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/socketfactory.h"
namespace rtc {
@@ -70,7 +72,7 @@ class AsyncTCPSocketBase : public AsyncPacketSocket {
void OnWriteEvent(AsyncSocket* socket);
void OnCloseEvent(AsyncSocket* socket, int error);
- scoped_ptr<AsyncSocket> socket_;
+ std::unique_ptr<AsyncSocket> socket_;
bool listen_;
Buffer inbuf_;
Buffer outbuf_;
diff --git a/chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc b/chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc
index b9317586406..592b61d536e 100644
--- a/chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc
+++ b/chromium/third_party/webrtc/base/asynctcpsocket_unittest.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/base/asynctcpsocket.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/virtualsocketserver.h"
namespace rtc {
@@ -37,10 +37,10 @@ class AsyncTCPSocketTest
}
protected:
- scoped_ptr<PhysicalSocketServer> pss_;
- scoped_ptr<VirtualSocketServer> vss_;
+ std::unique_ptr<PhysicalSocketServer> pss_;
+ std::unique_ptr<VirtualSocketServer> vss_;
AsyncSocket* socket_;
- scoped_ptr<AsyncTCPSocket> tcp_socket_;
+ std::unique_ptr<AsyncTCPSocket> tcp_socket_;
bool ready_to_send_;
};
diff --git a/chromium/third_party/webrtc/base/asyncudpsocket.cc b/chromium/third_party/webrtc/base/asyncudpsocket.cc
index 4e807330843..fc7d88712cc 100644
--- a/chromium/third_party/webrtc/base/asyncudpsocket.cc
+++ b/chromium/third_party/webrtc/base/asyncudpsocket.cc
@@ -18,7 +18,7 @@ static const int BUF_SIZE = 64 * 1024;
AsyncUDPSocket* AsyncUDPSocket::Create(
AsyncSocket* socket,
const SocketAddress& bind_address) {
- scoped_ptr<AsyncSocket> owned_socket(socket);
+ std::unique_ptr<AsyncSocket> owned_socket(socket);
if (socket->Bind(bind_address) < 0) {
LOG(LS_ERROR) << "Bind() failed with error " << socket->GetError();
return NULL;
@@ -59,7 +59,7 @@ SocketAddress AsyncUDPSocket::GetRemoteAddress() const {
int AsyncUDPSocket::Send(const void *pv, size_t cb,
const rtc::PacketOptions& options) {
- rtc::SentPacket sent_packet(options.packet_id, rtc::Time());
+ rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis());
int ret = socket_->Send(pv, cb);
SignalSentPacket(this, sent_packet);
return ret;
@@ -68,7 +68,7 @@ int AsyncUDPSocket::Send(const void *pv, size_t cb,
int AsyncUDPSocket::SendTo(const void *pv, size_t cb,
const SocketAddress& addr,
const rtc::PacketOptions& options) {
- rtc::SentPacket sent_packet(options.packet_id, rtc::Time());
+ rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis());
int ret = socket_->SendTo(pv, cb, addr);
SignalSentPacket(this, sent_packet);
return ret;
diff --git a/chromium/third_party/webrtc/base/asyncudpsocket.h b/chromium/third_party/webrtc/base/asyncudpsocket.h
index 4b47007ed79..aa6a9042b47 100644
--- a/chromium/third_party/webrtc/base/asyncudpsocket.h
+++ b/chromium/third_party/webrtc/base/asyncudpsocket.h
@@ -11,8 +11,9 @@
#ifndef WEBRTC_BASE_ASYNCUDPSOCKET_H_
#define WEBRTC_BASE_ASYNCUDPSOCKET_H_
+#include <memory>
+
#include "webrtc/base/asyncpacketsocket.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketfactory.h"
namespace rtc {
@@ -56,7 +57,7 @@ class AsyncUDPSocket : public AsyncPacketSocket {
// Called when the underlying socket is ready to send.
void OnWriteEvent(AsyncSocket* socket);
- scoped_ptr<AsyncSocket> socket_;
+ std::unique_ptr<AsyncSocket> socket_;
char* buf_;
size_t size_;
};
diff --git a/chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc b/chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc
index bd65940fcb8..99220056b47 100644
--- a/chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc
+++ b/chromium/third_party/webrtc/base/asyncudpsocket_unittest.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/base/asyncudpsocket.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/virtualsocketserver.h"
namespace rtc {
@@ -37,10 +37,10 @@ class AsyncUdpSocketTest
}
protected:
- scoped_ptr<PhysicalSocketServer> pss_;
- scoped_ptr<VirtualSocketServer> vss_;
+ std::unique_ptr<PhysicalSocketServer> pss_;
+ std::unique_ptr<VirtualSocketServer> vss_;
AsyncSocket* socket_;
- scoped_ptr<AsyncUDPSocket> udp_socket_;
+ std::unique_ptr<AsyncUDPSocket> udp_socket_;
bool ready_to_send_;
};
diff --git a/chromium/third_party/webrtc/base/base.gyp b/chromium/third_party/webrtc/base/base.gyp
index a5f0f7b44d4..9e55efc8073 100644
--- a/chromium/third_party/webrtc/base/base.gyp
+++ b/chromium/third_party/webrtc/base/base.gyp
@@ -22,52 +22,6 @@
}],
],
}],
- ['OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7")', {
- 'targets': [
- {
- 'target_name': 'rtc_base_objc',
- 'type': 'static_library',
- 'includes': [ '../build/objc_common.gypi' ],
- 'dependencies': [
- 'rtc_base',
- ],
- 'sources': [
- 'objc/NSString+StdString.h',
- 'objc/NSString+StdString.mm',
- 'objc/RTCDispatcher.h',
- 'objc/RTCDispatcher.m',
- 'objc/RTCFieldTrials.h',
- 'objc/RTCFieldTrials.mm',
- 'objc/RTCFileLogger.h',
- 'objc/RTCFileLogger.mm',
- 'objc/RTCLogging.h',
- 'objc/RTCLogging.mm',
- 'objc/RTCMacros.h',
- 'objc/RTCSSLAdapter.h',
- 'objc/RTCSSLAdapter.mm',
- 'objc/RTCTracing.h',
- 'objc/RTCTracing.mm',
- ],
- 'conditions': [
- ['OS=="ios"', {
- 'sources': [
- 'objc/RTCCameraPreviewView.h',
- 'objc/RTCCameraPreviewView.m',
- 'objc/RTCUIApplication.h',
- 'objc/RTCUIApplication.mm',
- ],
- 'all_dependent_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework AVFoundation',
- ],
- },
- },
- }],
- ],
- }
- ],
- }], # OS=="ios"
],
'targets': [
{
@@ -77,9 +31,9 @@
'sources': [
'array_view.h',
'atomicops.h',
+ 'bind.h',
'bitbuffer.cc',
'bitbuffer.h',
- 'buffer.cc',
'buffer.h',
'bufferqueue.cc',
'bufferqueue.h',
@@ -105,6 +59,7 @@
'md5digest.cc',
'md5digest.h',
'mod_ops.h',
+ 'onetimeevent.h',
'optional.h',
'platform_file.cc',
'platform_file.h',
@@ -120,7 +75,6 @@
'refcount.h',
'safe_conversions.h',
'safe_conversions_impl.h',
- 'scoped_ptr.h',
'scoped_ref_ptr.h',
'stringencode.cc',
'stringencode.h',
@@ -129,6 +83,8 @@
'swap_queue.h',
'systeminfo.cc',
'systeminfo.h',
+ 'task_queue.h',
+ 'task_queue_posix.h',
'template_util.h',
'thread_annotations.h',
'thread_checker.h',
@@ -154,8 +110,43 @@
'sources': [
'logging.cc',
'logging.h',
+ 'logging_mac.mm',
+ ],
+ }],
+ ['build_libevent==1', {
+ 'dependencies': [
+ '<(DEPTH)/base/third_party/libevent/libevent.gyp:libevent',
],
}],
+ ['enable_libevent==1', {
+ 'sources': [
+ 'task_queue_libevent.cc',
+ 'task_queue_posix.cc',
+ ],
+ }, {
+ # If not libevent, fall back to the other task queues.
+ 'conditions': [
+ ['OS=="mac" or OS=="ios"', {
+ 'sources': [
+ 'task_queue_gcd.cc',
+ 'task_queue_posix.cc',
+ ],
+ }],
+ ['OS=="win"', {
+ 'sources': [ 'task_queue_win.cc' ],
+ }]
+ ],
+ }],
+ ['OS=="mac" and build_with_chromium==0', {
+ 'all_dependent_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ # needed for logging_mac.mm
+ '-framework Foundation',
+ ],
+ },
+ },
+ }], # OS=="mac" and build_with_chromium==0
],
},
{
@@ -240,6 +231,7 @@
'network.h',
'networkmonitor.cc',
'networkmonitor.h',
+ 'nullsocketserver.cc',
'nullsocketserver.h',
'openssl.h',
'openssladapter.cc',
@@ -262,6 +254,8 @@
'ratelimiter.h',
'rtccertificate.cc',
'rtccertificate.h',
+ 'rtccertificategenerator.cc',
+ 'rtccertificategenerator.h',
'sha1.cc',
'sha1.h',
'sha1digest.cc',
@@ -319,13 +313,7 @@
'-Wextra',
'-Wall',
],
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'direct_dependent_settings': {
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'defines': [
'FEATURE_ENABLE_SSL',
'SSL_USE_OPENSSL',
@@ -361,7 +349,6 @@
'sources': [
'bandwidthsmoother.cc',
'bandwidthsmoother.h',
- 'bind.h',
'callback.h',
'fileutils_mock.h',
'httpserver.cc',
diff --git a/chromium/third_party/webrtc/base/base_tests.gyp b/chromium/third_party/webrtc/base/base_tests.gyp
index caef35c385a..063e8e164f4 100644
--- a/chromium/third_party/webrtc/base/base_tests.gyp
+++ b/chromium/third_party/webrtc/base/base_tests.gyp
@@ -79,6 +79,7 @@
'multipart_unittest.cc',
'nat_unittest.cc',
'network_unittest.cc',
+ 'onetimeevent_unittest.cc',
'optional_unittest.cc',
'optionsfile_unittest.cc',
'pathutils_unittest.cc',
@@ -92,7 +93,8 @@
'ratetracker_unittest.cc',
'referencecountedsingletonfactory_unittest.cc',
'rollingaccumulator_unittest.cc',
- 'rtccertificate_unittests.cc',
+ 'rtccertificate_unittest.cc',
+ 'rtccertificategenerator_unittest.cc',
'scopedptrcollection_unittest.cc',
'sha1digest_unittest.cc',
'sharedexclusivelock_unittest.cc',
@@ -106,6 +108,7 @@
'swap_queue_unittest.cc',
# TODO(ronghuawu): Reenable this test.
# 'systeminfo_unittest.cc',
+ 'task_queue_unittest.cc',
'task_unittest.cc',
'testclient_unittest.cc',
'thread_checker_unittest.cc',
diff --git a/chromium/third_party/webrtc/base/buffer.cc b/chromium/third_party/webrtc/base/buffer.cc
index 6051e6db959..79e48bd7a74 100644
--- a/chromium/third_party/webrtc/base/buffer.cc
+++ b/chromium/third_party/webrtc/base/buffer.cc
@@ -1,5 +1,5 @@
/*
- * Copyright 2015 The WebRTC Project Authors. All rights reserved.
+ * Copyright 2016 The WebRTC project authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,36 +8,5 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/base/buffer.h"
-
-#include <algorithm>
-#include <utility>
-
-namespace rtc {
-
-Buffer::Buffer() : size_(0), capacity_(0), data_(nullptr) {
- RTC_DCHECK(IsConsistent());
-}
-
-Buffer::Buffer(Buffer&& buf)
- : size_(buf.size()),
- capacity_(buf.capacity()),
- data_(std::move(buf.data_)) {
- RTC_DCHECK(IsConsistent());
- buf.OnMovedFrom();
-}
-
-Buffer::Buffer(size_t size) : Buffer(size, size) {
-}
-
-Buffer::Buffer(size_t size, size_t capacity)
- : size_(size),
- capacity_(std::max(size, capacity)),
- data_(new uint8_t[capacity_]) {
- RTC_DCHECK(IsConsistent());
-}
-
-// Note: The destructor works even if the buffer has been moved from.
-Buffer::~Buffer() = default;
-
-}; // namespace rtc
+// This is a dummy file, added because something in the Chromium build claims
+// to need it. We should fix that, and then remove it (bug 5845).
diff --git a/chromium/third_party/webrtc/base/buffer.h b/chromium/third_party/webrtc/base/buffer.h
index f007929a294..b8b8fc0e169 100644
--- a/chromium/third_party/webrtc/base/buffer.h
+++ b/chromium/third_party/webrtc/base/buffer.h
@@ -13,79 +13,114 @@
#include <cstring>
#include <memory>
+#include <type_traits>
#include <utility>
#include "webrtc/base/array_view.h"
#include "webrtc/base/checks.h"
-#include "webrtc/base/constructormagic.h"
namespace rtc {
namespace internal {
-// (Internal; please don't use outside this file.) ByteType<T>::t is int if T
-// is uint8_t, int8_t, or char; otherwise, it's a compilation error. Use like
-// this:
-//
-// template <typename T, typename ByteType<T>::t = 0>
-// void foo(T* x);
-//
-// to let foo<T> be defined only for byte-sized integers.
-template <typename T>
-struct ByteType {
- private:
- static int F(uint8_t*);
- static int F(int8_t*);
- static int F(char*);
-
- public:
- using t = decltype(F(static_cast<T*>(nullptr)));
+// (Internal; please don't use outside this file.) Determines if elements of
+// type U are compatible with a BufferT<T>. For most types, we just ignore
+// top-level const and forbid top-level volatile and require T and U to be
+// otherwise equal, but all byte-sized integers (notably char, int8_t, and
+// uint8_t) are compatible with each other. (Note: We aim to get rid of this
+// behavior, and treat all types the same.)
+template <typename T, typename U>
+struct BufferCompat {
+ static constexpr bool value =
+ !std::is_volatile<U>::value &&
+ ((std::is_integral<T>::value && sizeof(T) == 1)
+ ? (std::is_integral<U>::value && sizeof(U) == 1)
+ : (std::is_same<T, typename std::remove_const<U>::type>::value));
};
} // namespace internal
// Basic buffer class, can be grown and shrunk dynamically.
-// Unlike std::string/vector, does not initialize data when expanding capacity.
-class Buffer {
+// Unlike std::string/vector, does not initialize data when increasing size.
+template <typename T>
+class BufferT {
+ // We want T's destructor and default constructor to be trivial, i.e. perform
+ // no action, so that we don't have to touch the memory we allocate and
+ // deallocate. And we want T to be trivially copyable, so that we can copy T
+ // instances with std::memcpy. This is precisely the definition of a trivial
+ // type.
+ static_assert(std::is_trivial<T>::value, "T must be a trivial type.");
+
+ // This class relies heavily on being able to mutate its data.
+ static_assert(!std::is_const<T>::value, "T may not be const");
+
public:
- Buffer(); // An empty buffer.
- Buffer(Buffer&& buf); // Move contents from an existing buffer.
-
- // Construct a buffer with the specified number of uninitialized bytes.
- explicit Buffer(size_t size);
- Buffer(size_t size, size_t capacity);
-
- // Construct a buffer and copy the specified number of bytes into it. The
- // source array may be (const) uint8_t*, int8_t*, or char*.
- template <typename T, typename internal::ByteType<T>::t = 0>
- Buffer(const T* data, size_t size)
- : Buffer(data, size, size) {}
-
- template <typename T, typename internal::ByteType<T>::t = 0>
- Buffer(const T* data, size_t size, size_t capacity)
- : Buffer(size, capacity) {
- std::memcpy(data_.get(), data, size);
+ // An empty BufferT.
+ BufferT() : size_(0), capacity_(0), data_(nullptr) {
+ RTC_DCHECK(IsConsistent());
}
- // Construct a buffer from the contents of an array.
- template <typename T, size_t N, typename internal::ByteType<T>::t = 0>
- Buffer(const T(&array)[N])
- : Buffer(array, N) {}
+ // Disable copy construction and copy assignment, since copying a buffer is
+ // expensive enough that we want to force the user to be explicit about it.
+ BufferT(const BufferT&) = delete;
+ BufferT& operator=(const BufferT&) = delete;
- ~Buffer();
+ BufferT(BufferT&& buf)
+ : size_(buf.size()),
+ capacity_(buf.capacity()),
+ data_(std::move(buf.data_)) {
+ RTC_DCHECK(IsConsistent());
+ buf.OnMovedFrom();
+ }
+
+ // Construct a buffer with the specified number of uninitialized elements.
+ explicit BufferT(size_t size) : BufferT(size, size) {}
+
+ BufferT(size_t size, size_t capacity)
+ : size_(size),
+ capacity_(std::max(size, capacity)),
+ data_(new T[capacity_]) {
+ RTC_DCHECK(IsConsistent());
+ }
- // Get a pointer to the data. Just .data() will give you a (const) uint8_t*,
- // but you may also use .data<int8_t>() and .data<char>().
- template <typename T = uint8_t, typename internal::ByteType<T>::t = 0>
- const T* data() const {
+ // Construct a buffer and copy the specified number of elements into it.
+ template <typename U,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ BufferT(const U* data, size_t size) : BufferT(data, size, size) {}
+
+ template <typename U,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ BufferT(U* data, size_t size, size_t capacity) : BufferT(size, capacity) {
+ static_assert(sizeof(T) == sizeof(U), "");
+ std::memcpy(data_.get(), data, size * sizeof(U));
+ }
+
+ // Construct a buffer from the contents of an array.
+ template <typename U,
+ size_t N,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ BufferT(U (&array)[N]) : BufferT(array, N) {}
+
+ // Get a pointer to the data. Just .data() will give you a (const) T*, but if
+ // T is a byte-sized integer, you may also use .data<U>() for any other
+ // byte-sized integer U.
+ template <typename U = T,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ const U* data() const {
RTC_DCHECK(IsConsistent());
- return reinterpret_cast<T*>(data_.get());
+ return reinterpret_cast<U*>(data_.get());
}
- template <typename T = uint8_t, typename internal::ByteType<T>::t = 0>
- T* data() {
+ template <typename U = T,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ U* data() {
RTC_DCHECK(IsConsistent());
- return reinterpret_cast<T*>(data_.get());
+ return reinterpret_cast<U*>(data_.get());
}
size_t size() const {
@@ -98,7 +133,7 @@ class Buffer {
return capacity_;
}
- Buffer& operator=(Buffer&& buf) {
+ BufferT& operator=(BufferT&& buf) {
RTC_DCHECK(IsConsistent());
RTC_DCHECK(buf.IsConsistent());
size_ = buf.size_;
@@ -108,94 +143,120 @@ class Buffer {
return *this;
}
- bool operator==(const Buffer& buf) const {
+ bool operator==(const BufferT& buf) const {
RTC_DCHECK(IsConsistent());
- return size_ == buf.size() && memcmp(data_.get(), buf.data(), size_) == 0;
+ if (size_ != buf.size_) {
+ return false;
+ }
+ if (std::is_integral<T>::value) {
+ // Optimization.
+ return std::memcmp(data_.get(), buf.data_.get(), size_ * sizeof(T)) == 0;
+ }
+ for (size_t i = 0; i < size_; ++i) {
+ if (data_[i] != buf.data_[i]) {
+ return false;
+ }
+ }
+ return true;
}
- bool operator!=(const Buffer& buf) const { return !(*this == buf); }
+ bool operator!=(const BufferT& buf) const { return !(*this == buf); }
- uint8_t& operator[](size_t index) {
+ T& operator[](size_t index) {
RTC_DCHECK_LT(index, size_);
return data()[index];
}
- uint8_t operator[](size_t index) const {
+ T operator[](size_t index) const {
RTC_DCHECK_LT(index, size_);
return data()[index];
}
// The SetData functions replace the contents of the buffer. They accept the
// same input types as the constructors.
- template <typename T, typename internal::ByteType<T>::t = 0>
- void SetData(const T* data, size_t size) {
+ template <typename U,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ void SetData(const U* data, size_t size) {
RTC_DCHECK(IsConsistent());
size_ = 0;
AppendData(data, size);
}
- template <typename T, size_t N, typename internal::ByteType<T>::t = 0>
- void SetData(const T(&array)[N]) {
+ template <typename U,
+ size_t N,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ void SetData(const U (&array)[N]) {
SetData(array, N);
}
- void SetData(const Buffer& buf) { SetData(buf.data(), buf.size()); }
+ void SetData(const BufferT& buf) { SetData(buf.data(), buf.size()); }
- // Replace the data in the buffer with at most |max_bytes| of data, using the
- // function |setter|, which should have the following signature:
- // size_t setter(ArrayView<T> view)
+ // Replace the data in the buffer with at most |max_elements| of data, using
+ // the function |setter|, which should have the following signature:
+ // size_t setter(ArrayView<U> view)
// |setter| is given an appropriately typed ArrayView of the area in which to
// write the data (i.e. starting at the beginning of the buffer) and should
- // return the number of bytes actually written. This number must be <=
- // |max_bytes|.
- template <typename T = uint8_t, typename F,
- typename internal::ByteType<T>::t = 0>
- size_t SetData(size_t max_bytes, F&& setter) {
+ // return the number of elements actually written. This number must be <=
+ // |max_elements|.
+ template <typename U = T,
+ typename F,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ size_t SetData(size_t max_elements, F&& setter) {
RTC_DCHECK(IsConsistent());
size_ = 0;
- return AppendData<T>(max_bytes, std::forward<F>(setter));
+ return AppendData<U>(max_elements, std::forward<F>(setter));
}
- // The AppendData functions adds data to the end of the buffer. They accept
+ // The AppendData functions add data to the end of the buffer. They accept
// the same input types as the constructors.
- template <typename T, typename internal::ByteType<T>::t = 0>
- void AppendData(const T* data, size_t size) {
+ template <typename U,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ void AppendData(const U* data, size_t size) {
RTC_DCHECK(IsConsistent());
const size_t new_size = size_ + size;
EnsureCapacity(new_size);
- std::memcpy(data_.get() + size_, data, size);
+ static_assert(sizeof(T) == sizeof(U), "");
+ std::memcpy(data_.get() + size_, data, size * sizeof(U));
size_ = new_size;
RTC_DCHECK(IsConsistent());
}
- template <typename T, size_t N, typename internal::ByteType<T>::t = 0>
- void AppendData(const T(&array)[N]) {
+ template <typename U,
+ size_t N,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ void AppendData(const U (&array)[N]) {
AppendData(array, N);
}
- void AppendData(const Buffer& buf) { AppendData(buf.data(), buf.size()); }
+ void AppendData(const BufferT& buf) { AppendData(buf.data(), buf.size()); }
- // Append at most |max_bytes| of data to the end of the buffer, using the
- // function |setter|, which should have the following signature:
- // size_t setter(ArrayView<T> view)
+ // Append at most |max_elements| to the end of the buffer, using the function
+ // |setter|, which should have the following signature:
+ // size_t setter(ArrayView<U> view)
// |setter| is given an appropriately typed ArrayView of the area in which to
// write the data (i.e. starting at the former end of the buffer) and should
- // return the number of bytes actually written. This number must be <=
- // |max_bytes|.
- template <typename T = uint8_t, typename F,
- typename internal::ByteType<T>::t = 0>
- size_t AppendData(size_t max_bytes, F&& setter) {
+ // return the number of elements actually written. This number must be <=
+ // |max_elements|.
+ template <typename U = T,
+ typename F,
+ typename std::enable_if<
+ internal::BufferCompat<T, U>::value>::type* = nullptr>
+ size_t AppendData(size_t max_elements, F&& setter) {
RTC_DCHECK(IsConsistent());
const size_t old_size = size_;
- SetSize(old_size + max_bytes);
- T *base_ptr = data<T>() + old_size;
- size_t written_bytes =
- setter(rtc::ArrayView<T>(base_ptr, max_bytes));
+ SetSize(old_size + max_elements);
+ U* base_ptr = data<U>() + old_size;
+ size_t written_elements = setter(rtc::ArrayView<U>(base_ptr, max_elements));
- RTC_CHECK_LE(written_bytes, max_bytes);
- size_ = old_size + written_bytes;
+ RTC_CHECK_LE(written_elements, max_elements);
+ size_ = old_size + written_elements;
RTC_DCHECK(IsConsistent());
- return written_bytes;
+ return written_elements;
}
// Sets the size of the buffer. If the new size is smaller than the old, the
@@ -214,8 +275,8 @@ class Buffer {
RTC_DCHECK(IsConsistent());
if (capacity <= capacity_)
return;
- std::unique_ptr<uint8_t[]> new_data(new uint8_t[capacity]);
- std::memcpy(new_data.get(), data_.get(), size_);
+ std::unique_ptr<T[]> new_data(new T[capacity]);
+ std::memcpy(new_data.get(), data_.get(), size_ * sizeof(T));
data_ = std::move(new_data);
capacity_ = capacity;
RTC_DCHECK(IsConsistent());
@@ -229,7 +290,7 @@ class Buffer {
}
// Swaps two buffers. Also works for buffers that have been moved from.
- friend void swap(Buffer& a, Buffer& b) {
+ friend void swap(BufferT& a, BufferT& b) {
using std::swap;
swap(a.size_, b.size_);
swap(a.capacity_, b.capacity_);
@@ -262,11 +323,12 @@ class Buffer {
size_t size_;
size_t capacity_;
- std::unique_ptr<uint8_t[]> data_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(Buffer);
+ std::unique_ptr<T[]> data_;
};
+// By far the most common sort of buffer.
+using Buffer = BufferT<uint8_t>;
+
} // namespace rtc
#endif // WEBRTC_BASE_BUFFER_H_
diff --git a/chromium/third_party/webrtc/base/buffer_unittest.cc b/chromium/third_party/webrtc/base/buffer_unittest.cc
index 2f3bcfd6060..e9a853c6153 100644
--- a/chromium/third_party/webrtc/base/buffer_unittest.cc
+++ b/chromium/third_party/webrtc/base/buffer_unittest.cc
@@ -11,8 +11,8 @@
#include "webrtc/base/buffer.h"
#include "webrtc/base/gunit.h"
-#include <algorithm> // std::swap (pre-C++11)
-#include <utility> // std::swap (C++11 and later)
+#include <type_traits>
+#include <utility>
namespace rtc {
@@ -301,4 +301,60 @@ TEST(BufferTest, TestBracketWrite) {
}
}
+TEST(BufferTest, TestInt16) {
+ static constexpr int16_t test_data[] = {14, 15, 16, 17, 18};
+ BufferT<int16_t> buf(test_data);
+ EXPECT_EQ(buf.size(), 5u);
+ EXPECT_EQ(buf.capacity(), 5u);
+ EXPECT_NE(buf.data(), nullptr);
+ for (size_t i = 0; i != buf.size(); ++i) {
+ EXPECT_EQ(test_data[i], buf[i]);
+ }
+ BufferT<int16_t> buf2(test_data);
+ EXPECT_EQ(buf, buf2);
+ buf2[0] = 9;
+ EXPECT_NE(buf, buf2);
+}
+
+TEST(BufferTest, TestFloat) {
+ static constexpr float test_data[] = {14, 15, 16, 17, 18};
+ BufferT<float> buf;
+ EXPECT_EQ(buf.size(), 0u);
+ EXPECT_EQ(buf.capacity(), 0u);
+ EXPECT_EQ(buf.data(), nullptr);
+ buf.SetData(test_data);
+ EXPECT_EQ(buf.size(), 5u);
+ EXPECT_EQ(buf.capacity(), 5u);
+ EXPECT_NE(buf.data(), nullptr);
+ float* p1 = buf.data();
+ while (buf.data() == p1) {
+ buf.AppendData(test_data);
+ }
+ EXPECT_EQ(buf.size(), buf.capacity());
+ EXPECT_GT(buf.size(), 5u);
+ EXPECT_EQ(buf.size() % 5, 0u);
+ EXPECT_NE(buf.data(), nullptr);
+ for (size_t i = 0; i != buf.size(); ++i) {
+ EXPECT_EQ(test_data[i % 5], buf[i]);
+ }
+}
+
+TEST(BufferTest, TestStruct) {
+ struct BloodStone {
+ bool blood;
+ const char* stone;
+ };
+ BufferT<BloodStone> buf(4);
+ EXPECT_EQ(buf.size(), 4u);
+ EXPECT_EQ(buf.capacity(), 4u);
+ EXPECT_NE(buf.data(), nullptr);
+ BufferT<BloodStone*> buf2(4);
+ for (size_t i = 0; i < buf2.size(); ++i) {
+ buf2[i] = &buf[i];
+ }
+ static const char kObsidian[] = "obsidian";
+ buf2[2]->stone = kObsidian;
+ EXPECT_EQ(kObsidian, buf[2].stone);
+}
+
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/bufferqueue.h b/chromium/third_party/webrtc/base/bufferqueue.h
index 623d85a3847..bc9fc842dbd 100644
--- a/chromium/third_party/webrtc/base/bufferqueue.h
+++ b/chromium/third_party/webrtc/base/bufferqueue.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/bytebuffer.cc b/chromium/third_party/webrtc/base/bytebuffer.cc
index cf4ce42574e..9730ff23d67 100644
--- a/chromium/third_party/webrtc/base/bytebuffer.cc
+++ b/chromium/third_party/webrtc/base/bytebuffer.cc
@@ -88,6 +88,20 @@ void ByteBufferWriter::WriteUInt64(uint64_t val) {
WriteBytes(reinterpret_cast<const char*>(&v), 8);
}
+// Serializes an unsigned varint in the format described by
+// https://developers.google.com/protocol-buffers/docs/encoding#varints
+// with the caveat that integers are 64-bit, not 128-bit.
+void ByteBufferWriter::WriteUVarint(uint64_t val) {
+ while (val >= 0x80) {
+ // Write 7 bits at a time, then set the msb to a continuation byte (msb=1).
+ char byte = static_cast<char>(val) | 0x80;
+ WriteBytes(&byte, 1);
+ val >>= 7;
+ }
+ char last_byte = static_cast<char>(val);
+ WriteBytes(&last_byte, 1);
+}
+
void ByteBufferWriter::WriteString(const std::string& val) {
WriteBytes(val.c_str(), val.size());
}
@@ -220,6 +234,29 @@ bool ByteBufferReader::ReadUInt64(uint64_t* val) {
}
}
+bool ByteBufferReader::ReadUVarint(uint64_t* val) {
+ if (!val) {
+ return false;
+ }
+ // Integers are deserialized 7 bits at a time, with each byte having a
+ // continuation byte (msb=1) if there are more bytes to be read.
+ uint64_t v = 0;
+ for (int i = 0; i < 64; i += 7) {
+ char byte;
+ if (!ReadBytes(&byte, 1)) {
+ return false;
+ }
+ // Read the first 7 bits of the byte, then offset by bits read so far.
+ v |= (static_cast<uint64_t>(byte) & 0x7F) << i;
+ // True if the msb is not a continuation byte.
+ if (static_cast<uint64_t>(byte) < 0x80) {
+ *val = v;
+ return true;
+ }
+ }
+ return false;
+}
+
bool ByteBufferReader::ReadString(std::string* val, size_t len) {
if (!val) return false;
diff --git a/chromium/third_party/webrtc/base/bytebuffer.h b/chromium/third_party/webrtc/base/bytebuffer.h
index 8fd086367db..cd7b2c6cea8 100644
--- a/chromium/third_party/webrtc/base/bytebuffer.h
+++ b/chromium/third_party/webrtc/base/bytebuffer.h
@@ -57,6 +57,7 @@ class ByteBufferWriter : public ByteBuffer {
void WriteUInt24(uint32_t val);
void WriteUInt32(uint32_t val);
void WriteUInt64(uint64_t val);
+ void WriteUVarint(uint64_t val);
void WriteString(const std::string& val);
void WriteBytes(const char* val, size_t len);
@@ -110,6 +111,7 @@ class ByteBufferReader : public ByteBuffer {
bool ReadUInt24(uint32_t* val);
bool ReadUInt32(uint32_t* val);
bool ReadUInt64(uint64_t* val);
+ bool ReadUVarint(uint64_t* val);
bool ReadBytes(char* val, size_t len);
// Appends next |len| bytes from the buffer to |val|. Returns false
diff --git a/chromium/third_party/webrtc/base/bytebuffer_unittest.cc b/chromium/third_party/webrtc/base/bytebuffer_unittest.cc
index 723641811f2..bdbb159b665 100644
--- a/chromium/third_party/webrtc/base/bytebuffer_unittest.cc
+++ b/chromium/third_party/webrtc/base/bytebuffer_unittest.cc
@@ -196,4 +196,64 @@ TEST(ByteBufferTest, TestReadWriteBuffer) {
}
}
+TEST(ByteBufferTest, TestReadWriteUVarint) {
+ ByteBufferWriter::ByteOrder orders[2] = {ByteBufferWriter::ORDER_HOST,
+ ByteBufferWriter::ORDER_NETWORK};
+ for (ByteBufferWriter::ByteOrder& order : orders) {
+ ByteBufferWriter write_buffer(order);
+ size_t size = 0;
+ EXPECT_EQ(size, write_buffer.Length());
+
+ write_buffer.WriteUVarint(1u);
+ ++size;
+ EXPECT_EQ(size, write_buffer.Length());
+
+ write_buffer.WriteUVarint(2u);
+ ++size;
+ EXPECT_EQ(size, write_buffer.Length());
+
+ write_buffer.WriteUVarint(27u);
+ ++size;
+ EXPECT_EQ(size, write_buffer.Length());
+
+ write_buffer.WriteUVarint(149u);
+ size += 2;
+ EXPECT_EQ(size, write_buffer.Length());
+
+ write_buffer.WriteUVarint(68719476736u);
+ size += 6;
+ EXPECT_EQ(size, write_buffer.Length());
+
+ ByteBufferReader read_buffer(write_buffer.Data(), write_buffer.Length(),
+ order);
+ EXPECT_EQ(size, read_buffer.Length());
+ uint64_t val1, val2, val3, val4, val5;
+
+ ASSERT_TRUE(read_buffer.ReadUVarint(&val1));
+ EXPECT_EQ(1u, val1);
+ --size;
+ EXPECT_EQ(size, read_buffer.Length());
+
+ ASSERT_TRUE(read_buffer.ReadUVarint(&val2));
+ EXPECT_EQ(2u, val2);
+ --size;
+ EXPECT_EQ(size, read_buffer.Length());
+
+ ASSERT_TRUE(read_buffer.ReadUVarint(&val3));
+ EXPECT_EQ(27u, val3);
+ --size;
+ EXPECT_EQ(size, read_buffer.Length());
+
+ ASSERT_TRUE(read_buffer.ReadUVarint(&val4));
+ EXPECT_EQ(149u, val4);
+ size -= 2;
+ EXPECT_EQ(size, read_buffer.Length());
+
+ ASSERT_TRUE(read_buffer.ReadUVarint(&val5));
+ EXPECT_EQ(68719476736u, val5);
+ size -= 6;
+ EXPECT_EQ(size, read_buffer.Length());
+ }
+}
+
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/callback_unittest.cc b/chromium/third_party/webrtc/base/callback_unittest.cc
index db294cd96eb..aba1e0ce28f 100644
--- a/chromium/third_party/webrtc/base/callback_unittest.cc
+++ b/chromium/third_party/webrtc/base/callback_unittest.cc
@@ -34,7 +34,7 @@ class RefCountedBindTester : public RefCountInterface {
int AddRef() const override {
return ++count_;
}
- int Release() const {
+ int Release() const override {
return --count_;
}
int RefCount() const { return count_; }
diff --git a/chromium/third_party/webrtc/base/checks.cc b/chromium/third_party/webrtc/base/checks.cc
index 49a31f29b9d..d9dc8f2d63b 100644
--- a/chromium/third_party/webrtc/base/checks.cc
+++ b/chromium/third_party/webrtc/base/checks.cc
@@ -22,11 +22,16 @@
#endif
#if defined(WEBRTC_ANDROID)
-#define LOG_TAG "rtc"
+#define RTC_LOG_TAG "rtc"
#include <android/log.h> // NOLINT
#endif
+#if defined(WEBRTC_WIN)
+#include <windows.h>
+#endif
+
#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
#if defined(_MSC_VER)
// Warning C4722: destructor never returns, potential memory leak.
@@ -38,7 +43,7 @@ namespace rtc {
void VPrintError(const char* format, va_list args) {
#if defined(WEBRTC_ANDROID)
- __android_log_vprint(ANDROID_LOG_ERROR, LOG_TAG, format, args);
+ __android_log_vprint(ANDROID_LOG_ERROR, RTC_LOG_TAG, format, args);
#else
vfprintf(stderr, format, args);
#endif
@@ -105,8 +110,11 @@ NO_RETURN FatalMessage::~FatalMessage() {
}
void FatalMessage::Init(const char* file, int line) {
- stream_ << std::endl << std::endl << "#" << std::endl << "# Fatal error in "
- << file << ", line " << line << std::endl << "# ";
+ stream_ << std::endl << std::endl
+ << "#" << std::endl
+ << "# Fatal error in " << file << ", line " << line << std::endl
+ << "# last system error: " << LAST_SYSTEM_ERROR << std::endl
+ << "# ";
}
// MSVC doesn't like complex extern templates and DLLs.
diff --git a/chromium/third_party/webrtc/base/copyonwritebuffer.h b/chromium/third_party/webrtc/base/copyonwritebuffer.h
index 87f24bf51d6..a7e52beea53 100644
--- a/chromium/third_party/webrtc/base/copyonwritebuffer.h
+++ b/chromium/third_party/webrtc/base/copyonwritebuffer.h
@@ -36,10 +36,14 @@ class CopyOnWriteBuffer {
// Construct a buffer and copy the specified number of bytes into it. The
// source array may be (const) uint8_t*, int8_t*, or char*.
- template <typename T, typename internal::ByteType<T>::t = 0>
+ template <typename T,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
CopyOnWriteBuffer(const T* data, size_t size)
: CopyOnWriteBuffer(data, size, size) {}
- template <typename T, typename internal::ByteType<T>::t = 0>
+ template <typename T,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
CopyOnWriteBuffer(const T* data, size_t size, size_t capacity)
: CopyOnWriteBuffer(size, capacity) {
if (buffer_) {
@@ -48,22 +52,29 @@ class CopyOnWriteBuffer {
}
// Construct a buffer from the contents of an array.
- template <typename T, size_t N, typename internal::ByteType<T>::t = 0>
- CopyOnWriteBuffer(const T(&array)[N]) // NOLINT: runtime/explicit
+ template <typename T,
+ size_t N,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
+ CopyOnWriteBuffer(const T (&array)[N]) // NOLINT: runtime/explicit
: CopyOnWriteBuffer(array, N) {}
~CopyOnWriteBuffer();
// Get a pointer to the data. Just .data() will give you a (const) uint8_t*,
// but you may also use .data<int8_t>() and .data<char>().
- template <typename T = uint8_t, typename internal::ByteType<T>::t = 0>
+ template <typename T = uint8_t,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
const T* data() const {
return cdata<T>();
}
// Get writable pointer to the data. This will create a copy of the underlying
// data if it is shared with other buffers.
- template <typename T = uint8_t, typename internal::ByteType<T>::t = 0>
+ template <typename T = uint8_t,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
T* data() {
RTC_DCHECK(IsConsistent());
if (!buffer_) {
@@ -75,7 +86,9 @@ class CopyOnWriteBuffer {
// Get const pointer to the data. This will not create a copy of the
// underlying data if it is shared with other buffers.
- template <typename T = uint8_t, typename internal::ByteType<T>::t = 0>
+ template <typename T = uint8_t,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
T* cdata() const {
RTC_DCHECK(IsConsistent());
if (!buffer_) {
@@ -137,7 +150,9 @@ class CopyOnWriteBuffer {
// Replace the contents of the buffer. Accepts the same types as the
// constructors.
- template <typename T, typename internal::ByteType<T>::t = 0>
+ template <typename T,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
void SetData(const T* data, size_t size) {
RTC_DCHECK(IsConsistent());
if (!buffer_ || !buffer_->HasOneRef()) {
@@ -149,8 +164,11 @@ class CopyOnWriteBuffer {
RTC_DCHECK(IsConsistent());
}
- template <typename T, size_t N, typename internal::ByteType<T>::t = 0>
- void SetData(const T(&array)[N]) {
+ template <typename T,
+ size_t N,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
+ void SetData(const T (&array)[N]) {
SetData(array, N);
}
@@ -163,7 +181,9 @@ class CopyOnWriteBuffer {
}
// Append data to the buffer. Accepts the same types as the constructors.
- template <typename T, typename internal::ByteType<T>::t = 0>
+ template <typename T,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
void AppendData(const T* data, size_t size) {
RTC_DCHECK(IsConsistent());
if (!buffer_) {
@@ -178,8 +198,11 @@ class CopyOnWriteBuffer {
RTC_DCHECK(IsConsistent());
}
- template <typename T, size_t N, typename internal::ByteType<T>::t = 0>
- void AppendData(const T(&array)[N]) {
+ template <typename T,
+ size_t N,
+ typename std::enable_if<
+ internal::BufferCompat<uint8_t, T>::value>::type* = nullptr>
+ void AppendData(const T (&array)[N]) {
AppendData(array, N);
}
diff --git a/chromium/third_party/webrtc/base/criticalsection_unittest.cc b/chromium/third_party/webrtc/base/criticalsection_unittest.cc
index a0e10338628..d33afacd8ce 100644
--- a/chromium/third_party/webrtc/base/criticalsection_unittest.cc
+++ b/chromium/third_party/webrtc/base/criticalsection_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <set>
#include <vector>
@@ -17,7 +18,6 @@
#include "webrtc/base/event.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scopedptrcollection.h"
#include "webrtc/base/thread.h"
@@ -226,8 +226,8 @@ TEST(AtomicOpsTest, Simple) {
TEST(AtomicOpsTest, SimplePtr) {
class Foo {};
Foo* volatile foo = nullptr;
- scoped_ptr<Foo> a(new Foo());
- scoped_ptr<Foo> b(new Foo());
+ std::unique_ptr<Foo> a(new Foo());
+ std::unique_ptr<Foo> b(new Foo());
// Reading the initial value should work as expected.
EXPECT_TRUE(rtc::AtomicOps::AcquireLoadPtr(&foo) == nullptr);
// Setting using compare and swap should work.
diff --git a/chromium/third_party/webrtc/base/cryptstring.h b/chromium/third_party/webrtc/base/cryptstring.h
index a6bae51f897..adaac2f36bc 100644
--- a/chromium/third_party/webrtc/base/cryptstring.h
+++ b/chromium/third_party/webrtc/base/cryptstring.h
@@ -13,11 +13,11 @@
#include <string.h>
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/base/linked_ptr.h"
-#include "webrtc/base/scoped_ptr.h"
namespace rtc {
@@ -42,8 +42,8 @@ public:
};
class CryptString {
-public:
- CryptString();
+ public:
+ CryptString();
size_t GetLength() const { return impl_->GetLength(); }
void CopyTo(char * dest, bool nullterminate) const { impl_->CopyTo(dest, nullterminate); }
CryptString(const CryptString& other);
@@ -60,9 +60,9 @@ public:
void CopyRawTo(std::vector<unsigned char> * dest) const {
return impl_->CopyRawTo(dest);
}
-
-private:
- scoped_ptr<const CryptStringImpl> impl_;
+
+ private:
+ std::unique_ptr<const CryptStringImpl> impl_;
};
diff --git a/chromium/third_party/webrtc/base/dbus_unittest.cc b/chromium/third_party/webrtc/base/dbus_unittest.cc
index 17752f143f6..38c507d2893 100644
--- a/chromium/third_party/webrtc/base/dbus_unittest.cc
+++ b/chromium/third_party/webrtc/base/dbus_unittest.cc
@@ -10,6 +10,8 @@
#ifdef HAVE_DBUS_GLIB
+#include <memory>
+
#include "webrtc/base/dbus.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/thread.h"
@@ -51,7 +53,7 @@ class DBusSigFilterTest : public DBusSigFilter {
TEST(DBusMonitorTest, StartStopStartStop) {
DBusSigFilterTest filter;
- rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ std::unique_ptr<rtc::DBusMonitor> monitor;
monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
if (monitor) {
EXPECT_TRUE(monitor->AddFilter(&filter));
@@ -83,7 +85,7 @@ TEST(DBusMonitorTest, StartStopStartStop) {
// This test is to make sure that we capture the "NameAcquired" signal.
TEST(DBusMonitorTest, ReceivedNameAcquiredSignal) {
DBusSigFilterTest filter;
- rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ std::unique_ptr<rtc::DBusMonitor> monitor;
monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
if (monitor) {
EXPECT_TRUE(monitor->AddFilter(&filter));
@@ -100,12 +102,12 @@ TEST(DBusMonitorTest, ReceivedNameAcquiredSignal) {
TEST(DBusMonitorTest, ConcurrentMonitors) {
DBusSigFilterTest filter1;
- rtc::scoped_ptr<rtc::DBusMonitor> monitor1;
+ std::unique_ptr<rtc::DBusMonitor> monitor1;
monitor1.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
if (monitor1) {
EXPECT_TRUE(monitor1->AddFilter(&filter1));
DBusSigFilterTest filter2;
- rtc::scoped_ptr<rtc::DBusMonitor> monitor2;
+ std::unique_ptr<rtc::DBusMonitor> monitor2;
monitor2.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
EXPECT_TRUE(monitor2->AddFilter(&filter2));
@@ -129,7 +131,7 @@ TEST(DBusMonitorTest, ConcurrentMonitors) {
TEST(DBusMonitorTest, ConcurrentFilters) {
DBusSigFilterTest filter1;
DBusSigFilterTest filter2;
- rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ std::unique_ptr<rtc::DBusMonitor> monitor;
monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
if (monitor) {
EXPECT_TRUE(monitor->AddFilter(&filter1));
@@ -151,7 +153,7 @@ TEST(DBusMonitorTest, ConcurrentFilters) {
TEST(DBusMonitorTest, NoAddFilterIfRunning) {
DBusSigFilterTest filter1;
DBusSigFilterTest filter2;
- rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ std::unique_ptr<rtc::DBusMonitor> monitor;
monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
if (monitor) {
EXPECT_TRUE(monitor->AddFilter(&filter1));
@@ -170,7 +172,7 @@ TEST(DBusMonitorTest, NoAddFilterIfRunning) {
TEST(DBusMonitorTest, AddFilterAfterStop) {
DBusSigFilterTest filter1;
DBusSigFilterTest filter2;
- rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ std::unique_ptr<rtc::DBusMonitor> monitor;
monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
if (monitor) {
EXPECT_TRUE(monitor->AddFilter(&filter1));
@@ -194,7 +196,7 @@ TEST(DBusMonitorTest, AddFilterAfterStop) {
TEST(DBusMonitorTest, StopRightAfterStart) {
DBusSigFilterTest filter;
- rtc::scoped_ptr<rtc::DBusMonitor> monitor;
+ std::unique_ptr<rtc::DBusMonitor> monitor;
monitor.reset(rtc::DBusMonitor::Create(DBUS_BUS_SYSTEM));
if (monitor) {
EXPECT_TRUE(monitor->AddFilter(&filter));
diff --git a/chromium/third_party/webrtc/base/diskcache.cc b/chromium/third_party/webrtc/base/diskcache.cc
index a1fba6af9ad..233d2ab4309 100644
--- a/chromium/third_party/webrtc/base/diskcache.cc
+++ b/chromium/third_party/webrtc/base/diskcache.cc
@@ -15,6 +15,8 @@
#endif
#include <algorithm>
+#include <memory>
+
#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/diskcache.h"
@@ -123,7 +125,7 @@ StreamInterface* DiskCache::WriteResource(const std::string& id, size_t index) {
previous_size = entry->size;
}
- scoped_ptr<FileStream> file(new FileStream);
+ std::unique_ptr<FileStream> file(new FileStream);
if (!file->Open(filename, "wb", NULL)) {
LOG_F(LS_ERROR) << "Couldn't create cache file";
return NULL;
@@ -161,7 +163,7 @@ StreamInterface* DiskCache::ReadResource(const std::string& id,
if (index >= entry->streams)
return NULL;
- scoped_ptr<FileStream> file(new FileStream);
+ std::unique_ptr<FileStream> file(new FileStream);
if (!file->Open(IdToFilename(id, index), "rb", NULL))
return NULL;
diff --git a/chromium/third_party/webrtc/base/fakenetwork.h b/chromium/third_party/webrtc/base/fakenetwork.h
index e3996e6649c..2dd2137aa81 100644
--- a/chromium/third_party/webrtc/base/fakenetwork.h
+++ b/chromium/third_party/webrtc/base/fakenetwork.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_BASE_FAKENETWORK_H_
#define WEBRTC_BASE_FAKENETWORK_H_
+#include <memory>
#include <string>
#include <utility>
#include <vector>
@@ -99,9 +100,9 @@ class FakeNetworkManager : public NetworkManagerBase,
prefix_length = kFakeIPv6NetworkPrefixLength;
}
IPAddress prefix = TruncateIP(it->first.ipaddr(), prefix_length);
- scoped_ptr<Network> net(new Network(it->first.hostname(),
- it->first.hostname(), prefix,
- prefix_length, it->second));
+ std::unique_ptr<Network> net(new Network(it->first.hostname(),
+ it->first.hostname(), prefix,
+ prefix_length, it->second));
net->set_default_local_address_provider(this);
net->AddIP(it->first.ipaddr());
networks.push_back(net.release());
diff --git a/chromium/third_party/webrtc/base/fakesslidentity.h b/chromium/third_party/webrtc/base/fakesslidentity.h
index 47ff86d03a8..3b0df298412 100644
--- a/chromium/third_party/webrtc/base/fakesslidentity.h
+++ b/chromium/third_party/webrtc/base/fakesslidentity.h
@@ -12,6 +12,7 @@
#define WEBRTC_BASE_FAKESSLIDENTITY_H_
#include <algorithm>
+#include <memory>
#include <vector>
#include "webrtc/base/common.h"
@@ -36,13 +37,13 @@ class FakeSSLCertificate : public rtc::SSLCertificate {
certs_.push_back(FakeSSLCertificate(*it));
}
}
- virtual FakeSSLCertificate* GetReference() const {
+ FakeSSLCertificate* GetReference() const override {
return new FakeSSLCertificate(*this);
}
- virtual std::string ToPEMString() const {
+ std::string ToPEMString() const override {
return data_;
}
- virtual void ToDER(Buffer* der_buffer) const {
+ void ToDER(Buffer* der_buffer) const override {
std::string der_string;
VERIFY(SSLIdentity::PemToDer(kPemTypeCertificate, data_, &der_string));
der_buffer->SetData(der_string.c_str(), der_string.size());
@@ -56,24 +57,24 @@ class FakeSSLCertificate : public rtc::SSLCertificate {
void set_digest_algorithm(const std::string& algorithm) {
digest_algorithm_ = algorithm;
}
- virtual bool GetSignatureDigestAlgorithm(std::string* algorithm) const {
+ bool GetSignatureDigestAlgorithm(std::string* algorithm) const override {
*algorithm = digest_algorithm_;
return true;
}
- virtual bool ComputeDigest(const std::string& algorithm,
- unsigned char* digest,
- size_t size,
- size_t* length) const {
+ bool ComputeDigest(const std::string& algorithm,
+ unsigned char* digest,
+ size_t size,
+ size_t* length) const override {
*length = rtc::ComputeDigest(algorithm, data_.c_str(), data_.size(),
digest, size);
return (*length != 0);
}
- virtual rtc::scoped_ptr<SSLCertChain> GetChain() const {
+ std::unique_ptr<SSLCertChain> GetChain() const override {
if (certs_.empty())
return nullptr;
std::vector<SSLCertificate*> new_certs(certs_.size());
std::transform(certs_.begin(), certs_.end(), new_certs.begin(), DupCert);
- rtc::scoped_ptr<SSLCertChain> chain(new SSLCertChain(new_certs));
+ std::unique_ptr<SSLCertChain> chain(new SSLCertChain(new_certs));
std::for_each(new_certs.begin(), new_certs.end(), DeleteCert);
return chain;
}
@@ -98,6 +99,18 @@ class FakeSSLIdentity : public rtc::SSLIdentity {
return new FakeSSLIdentity(*this);
}
virtual const FakeSSLCertificate& certificate() const { return cert_; }
+ virtual std::string PrivateKeyToPEMString() const {
+ RTC_NOTREACHED(); // Not implemented.
+ return "";
+ }
+ virtual std::string PublicKeyToPEMString() const {
+ RTC_NOTREACHED(); // Not implemented.
+ return "";
+ }
+ virtual bool operator==(const SSLIdentity& other) const {
+ RTC_NOTREACHED(); // Not implemented.
+ return false;
+ }
private:
FakeSSLCertificate cert_;
};
diff --git a/chromium/third_party/webrtc/base/filerotatingstream.h b/chromium/third_party/webrtc/base/filerotatingstream.h
index 9e8e35ddd7f..a8522ff0c5f 100644
--- a/chromium/third_party/webrtc/base/filerotatingstream.h
+++ b/chromium/third_party/webrtc/base/filerotatingstream.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_BASE_FILEROTATINGSTREAM_H_
#define WEBRTC_BASE_FILEROTATINGSTREAM_H_
+#include <memory>
#include <string>
#include <vector>
@@ -110,7 +111,7 @@ class FileRotatingStream : public StreamInterface {
const Mode mode_;
// FileStream is used to write to the current file.
- scoped_ptr<FileStream> file_stream_;
+ std::unique_ptr<FileStream> file_stream_;
// Convenience storage for file names so we don't generate them over and over.
std::vector<std::string> file_names_;
size_t max_file_size_;
diff --git a/chromium/third_party/webrtc/base/filerotatingstream_unittest.cc b/chromium/third_party/webrtc/base/filerotatingstream_unittest.cc
index 09438f870ee..bac2a3a3e32 100644
--- a/chromium/third_party/webrtc/base/filerotatingstream_unittest.cc
+++ b/chromium/third_party/webrtc/base/filerotatingstream_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/arraysize.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/filerotatingstream.h"
@@ -57,13 +59,13 @@ class FileRotatingStreamTest : public ::testing::Test {
const size_t expected_length,
const std::string& dir_path,
const char* file_prefix) {
- scoped_ptr<FileRotatingStream> stream;
+ std::unique_ptr<FileRotatingStream> stream;
stream.reset(new FileRotatingStream(dir_path, file_prefix));
ASSERT_TRUE(stream->Open());
size_t read = 0;
size_t stream_size = 0;
EXPECT_TRUE(stream->GetSize(&stream_size));
- scoped_ptr<uint8_t[]> buffer(new uint8_t[expected_length]);
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[expected_length]);
EXPECT_EQ(SR_SUCCESS,
stream->ReadAll(buffer.get(), expected_length, &read, nullptr));
EXPECT_EQ(0, memcmp(expected_contents, buffer.get(), expected_length));
@@ -74,8 +76,8 @@ class FileRotatingStreamTest : public ::testing::Test {
void VerifyFileContents(const char* expected_contents,
const size_t expected_length,
const std::string& file_path) {
- scoped_ptr<uint8_t[]> buffer(new uint8_t[expected_length]);
- scoped_ptr<FileStream> stream(Filesystem::OpenFile(file_path, "r"));
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[expected_length]);
+ std::unique_ptr<FileStream> stream(Filesystem::OpenFile(file_path, "r"));
EXPECT_TRUE(stream);
if (!stream) {
return;
@@ -88,7 +90,7 @@ class FileRotatingStreamTest : public ::testing::Test {
EXPECT_EQ(file_size, expected_length);
}
- scoped_ptr<FileRotatingStream> stream_;
+ std::unique_ptr<FileRotatingStream> stream_;
std::string dir_path_;
};
@@ -114,7 +116,7 @@ TEST_F(FileRotatingStreamTest, EmptyWrite) {
WriteAndFlush("a", 0);
std::string logfile_path = stream_->GetFilePath(0);
- scoped_ptr<FileStream> stream(Filesystem::OpenFile(logfile_path, "r"));
+ std::unique_ptr<FileStream> stream(Filesystem::OpenFile(logfile_path, "r"));
size_t file_size = 0;
EXPECT_TRUE(stream->GetSize(&file_size));
EXPECT_EQ(0u, file_size);
@@ -215,13 +217,13 @@ class CallSessionFileRotatingStreamTest : public ::testing::Test {
void VerifyStreamRead(const char* expected_contents,
const size_t expected_length,
const std::string& dir_path) {
- scoped_ptr<CallSessionFileRotatingStream> stream(
+ std::unique_ptr<CallSessionFileRotatingStream> stream(
new CallSessionFileRotatingStream(dir_path));
ASSERT_TRUE(stream->Open());
size_t read = 0;
size_t stream_size = 0;
EXPECT_TRUE(stream->GetSize(&stream_size));
- scoped_ptr<uint8_t[]> buffer(new uint8_t[expected_length]);
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[expected_length]);
EXPECT_EQ(SR_SUCCESS,
stream->ReadAll(buffer.get(), expected_length, &read, nullptr));
EXPECT_EQ(0, memcmp(expected_contents, buffer.get(), expected_length));
@@ -229,7 +231,7 @@ class CallSessionFileRotatingStreamTest : public ::testing::Test {
EXPECT_EQ(stream_size, read);
}
- scoped_ptr<CallSessionFileRotatingStream> stream_;
+ std::unique_ptr<CallSessionFileRotatingStream> stream_;
std::string dir_path_;
};
@@ -266,7 +268,7 @@ TEST_F(CallSessionFileRotatingStreamTest, WriteAndReadLarge) {
ASSERT_TRUE(stream_->Open());
const size_t buffer_size = 1024 * 1024;
- scoped_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
for (int i = 0; i < 8; i++) {
memset(buffer.get(), i, buffer_size);
EXPECT_EQ(SR_SUCCESS,
@@ -275,7 +277,7 @@ TEST_F(CallSessionFileRotatingStreamTest, WriteAndReadLarge) {
stream_.reset(new CallSessionFileRotatingStream(dir_path_));
ASSERT_TRUE(stream_->Open());
- scoped_ptr<uint8_t[]> expected_buffer(new uint8_t[buffer_size]);
+ std::unique_ptr<uint8_t[]> expected_buffer(new uint8_t[buffer_size]);
int expected_vals[] = {0, 1, 2, 6, 7};
for (size_t i = 0; i < arraysize(expected_vals); ++i) {
memset(expected_buffer.get(), expected_vals[i], buffer_size);
@@ -293,7 +295,7 @@ TEST_F(CallSessionFileRotatingStreamTest, WriteAndReadFirstHalf) {
6 * 1024 * 1024);
ASSERT_TRUE(stream_->Open());
const size_t buffer_size = 1024 * 1024;
- scoped_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
for (int i = 0; i < 2; i++) {
memset(buffer.get(), i, buffer_size);
EXPECT_EQ(SR_SUCCESS,
@@ -302,7 +304,7 @@ TEST_F(CallSessionFileRotatingStreamTest, WriteAndReadFirstHalf) {
stream_.reset(new CallSessionFileRotatingStream(dir_path_));
ASSERT_TRUE(stream_->Open());
- scoped_ptr<uint8_t[]> expected_buffer(new uint8_t[buffer_size]);
+ std::unique_ptr<uint8_t[]> expected_buffer(new uint8_t[buffer_size]);
int expected_vals[] = {0, 1};
for (size_t i = 0; i < arraysize(expected_vals); ++i) {
memset(expected_buffer.get(), expected_vals[i], buffer_size);
diff --git a/chromium/third_party/webrtc/base/fileutils.cc b/chromium/third_party/webrtc/base/fileutils.cc
index cb23153de77..d354dd8eb56 100644
--- a/chromium/third_party/webrtc/base/fileutils.cc
+++ b/chromium/third_party/webrtc/base/fileutils.cc
@@ -133,7 +133,7 @@ bool DirectoryIterator::OlderThan(int seconds) const {
#else
file_modify_time = stat_.st_mtime;
#endif
- return TimeDiff(time(NULL), file_modify_time) >= seconds;
+ return time(NULL) - file_modify_time >= seconds;
}
FilesystemInterface* Filesystem::default_filesystem_ = NULL;
diff --git a/chromium/third_party/webrtc/base/fileutils.h b/chromium/third_party/webrtc/base/fileutils.h
index bf02571d936..23d36b15f47 100644
--- a/chromium/third_party/webrtc/base/fileutils.h
+++ b/chromium/third_party/webrtc/base/fileutils.h
@@ -23,8 +23,8 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/platform_file.h"
-#include "webrtc/base/scoped_ptr.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/fileutils_unittest.cc b/chromium/third_party/webrtc/base/fileutils_unittest.cc
index 6e98e145091..51396caefbb 100644
--- a/chromium/third_party/webrtc/base/fileutils_unittest.cc
+++ b/chromium/third_party/webrtc/base/fileutils_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/fileutils.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/pathutils.h"
@@ -74,7 +76,7 @@ TEST(FilesystemTest, TestCreatePrivateFile) {
EXPECT_FALSE(Filesystem::CreatePrivateFile(path));
// Verify that we have permission to open the file for reading and writing.
- scoped_ptr<FileStream> fs(Filesystem::OpenFile(path, "wb"));
+ std::unique_ptr<FileStream> fs(Filesystem::OpenFile(path, "wb"));
EXPECT_TRUE(fs.get() != NULL);
// Have to close the file on Windows before it will let us delete it.
fs.reset();
diff --git a/chromium/third_party/webrtc/base/flags.h b/chromium/third_party/webrtc/base/flags.h
index 4ce857b74a7..d16f12bad63 100644
--- a/chromium/third_party/webrtc/base/flags.h
+++ b/chromium/third_party/webrtc/base/flags.h
@@ -27,6 +27,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/gunit.h b/chromium/third_party/webrtc/base/gunit.h
index 1a6c36374e9..e705322e6f8 100644
--- a/chromium/third_party/webrtc/base/gunit.h
+++ b/chromium/third_party/webrtc/base/gunit.h
@@ -20,21 +20,22 @@
#endif
// Wait until "ex" is true, or "timeout" expires.
-#define WAIT(ex, timeout) \
- for (uint32_t start = rtc::Time(); !(ex) && rtc::Time() < start + timeout;) \
+#define WAIT(ex, timeout) \
+ for (int64_t start = rtc::TimeMillis(); \
+ !(ex) && rtc::TimeMillis() < start + timeout;) \
rtc::Thread::Current()->ProcessMessages(1);
// This returns the result of the test in res, so that we don't re-evaluate
// the expression in the XXXX_WAIT macros below, since that causes problems
// when the expression is only true the first time you check it.
-#define WAIT_(ex, timeout, res) \
- do { \
- uint32_t start = rtc::Time(); \
- res = (ex); \
- while (!res && rtc::Time() < start + timeout) { \
- rtc::Thread::Current()->ProcessMessages(1); \
- res = (ex); \
- } \
+#define WAIT_(ex, timeout, res) \
+ do { \
+ int64_t start = rtc::TimeMillis(); \
+ res = (ex); \
+ while (!res && rtc::TimeMillis() < start + timeout) { \
+ rtc::Thread::Current()->ProcessMessages(1); \
+ res = (ex); \
+ } \
} while (0)
// The typical EXPECT_XXXX and ASSERT_XXXXs, but done until true or a timeout.
diff --git a/chromium/third_party/webrtc/base/helpers.cc b/chromium/third_party/webrtc/base/helpers.cc
index 1ad5d0e12ba..0a39ee923e1 100644
--- a/chromium/third_party/webrtc/base/helpers.cc
+++ b/chromium/third_party/webrtc/base/helpers.cc
@@ -11,6 +11,7 @@
#include "webrtc/base/helpers.h"
#include <limits>
+#include <memory>
#if defined(FEATURE_ENABLE_SSL)
#include "webrtc/base/sslconfig.h"
@@ -28,7 +29,6 @@
#include "webrtc/base/base64.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/timeutils.h"
// Protect against max macro inclusion.
@@ -181,8 +181,8 @@ static const char kUuidDigit17[4] = {'8', '9', 'a', 'b'};
// This round about way of creating a global RNG is to safe-guard against
// indeterminant static initialization order.
-scoped_ptr<RandomGenerator>& GetGlobalRng() {
- RTC_DEFINE_STATIC_LOCAL(scoped_ptr<RandomGenerator>, global_rng,
+std::unique_ptr<RandomGenerator>& GetGlobalRng() {
+ RTC_DEFINE_STATIC_LOCAL(std::unique_ptr<RandomGenerator>, global_rng,
(new SecureRandomGenerator()));
return global_rng;
}
@@ -223,7 +223,7 @@ bool CreateRandomString(size_t len,
const char* table, int table_size,
std::string* str) {
str->clear();
- scoped_ptr<uint8_t[]> bytes(new uint8_t[len]);
+ std::unique_ptr<uint8_t[]> bytes(new uint8_t[len]);
if (!Rng().Generate(bytes.get(), len)) {
LOG(LS_ERROR) << "Failed to generate random string!";
return false;
@@ -250,7 +250,7 @@ bool CreateRandomString(size_t len, const std::string& table,
// Where 'x' is a hex digit, and 'y' is 8, 9, a or b.
std::string CreateRandomUuid() {
std::string str;
- scoped_ptr<uint8_t[]> bytes(new uint8_t[31]);
+ std::unique_ptr<uint8_t[]> bytes(new uint8_t[31]);
if (!Rng().Generate(bytes.get(), 31)) {
LOG(LS_ERROR) << "Failed to generate random string!";
return str;
diff --git a/chromium/third_party/webrtc/base/httpbase.cc b/chromium/third_party/webrtc/base/httpbase.cc
index 81ca4cceeb1..efdc8af8a9e 100644
--- a/chromium/third_party/webrtc/base/httpbase.cc
+++ b/chromium/third_party/webrtc/base/httpbase.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#if defined(WEBRTC_WIN)
#include "webrtc/base/win32.h"
@@ -271,9 +272,8 @@ public:
// When the method returns, we restore the old document. Ideally, we would
// pass our StreamInterface* to DoReceiveLoop, but due to the callbacks
// of HttpParser, we would still need to store the pointer temporarily.
- scoped_ptr<StreamInterface>
- stream(new BlockingMemoryStream(reinterpret_cast<char*>(buffer),
- buffer_len));
+ std::unique_ptr<StreamInterface> stream(
+ new BlockingMemoryStream(reinterpret_cast<char*>(buffer), buffer_len));
// Replace the existing document with our wrapped buffer.
base_->data_->document.swap(stream);
diff --git a/chromium/third_party/webrtc/base/httpclient.cc b/chromium/third_party/webrtc/base/httpclient.cc
index e078334094a..a458590bdb6 100644
--- a/chromium/third_party/webrtc/base/httpclient.cc
+++ b/chromium/third_party/webrtc/base/httpclient.cc
@@ -10,6 +10,7 @@
#include <time.h>
#include <algorithm>
+#include <memory>
#include "webrtc/base/asyncsocket.h"
#include "webrtc/base/common.h"
#include "webrtc/base/diskcache.h"
@@ -17,7 +18,6 @@
#include "webrtc/base/httpcommon-inl.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/pathutils.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketstream.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/base/stringutils.h"
@@ -466,7 +466,8 @@ bool HttpClient::BeginCacheFile() {
return false;
}
- scoped_ptr<StreamInterface> stream(cache_->WriteResource(id, kCacheBody));
+ std::unique_ptr<StreamInterface> stream(
+ cache_->WriteResource(id, kCacheBody));
if (!stream) {
LOG_F(LS_ERROR) << "Couldn't open body cache";
return false;
@@ -485,7 +486,8 @@ bool HttpClient::BeginCacheFile() {
}
HttpError HttpClient::WriteCacheHeaders(const std::string& id) {
- scoped_ptr<StreamInterface> stream(cache_->WriteResource(id, kCacheHeader));
+ std::unique_ptr<StreamInterface> stream(
+ cache_->WriteResource(id, kCacheHeader));
if (!stream) {
LOG_F(LS_ERROR) << "Couldn't open header cache";
return HE_CACHE;
@@ -563,7 +565,8 @@ bool HttpClient::CheckCache() {
}
HttpError HttpClient::ReadCacheHeaders(const std::string& id, bool override) {
- scoped_ptr<StreamInterface> stream(cache_->ReadResource(id, kCacheHeader));
+ std::unique_ptr<StreamInterface> stream(
+ cache_->ReadResource(id, kCacheHeader));
if (!stream) {
return HE_CACHE;
}
@@ -586,7 +589,7 @@ HttpError HttpClient::ReadCacheBody(const std::string& id) {
HttpError error = HE_NONE;
size_t data_size;
- scoped_ptr<StreamInterface> stream(cache_->ReadResource(id, kCacheBody));
+ std::unique_ptr<StreamInterface> stream(cache_->ReadResource(id, kCacheBody));
if (!stream || !stream->GetAvailable(&data_size)) {
LOG_F(LS_ERROR) << "Unavailable cache body";
error = HE_CACHE;
@@ -599,7 +602,7 @@ HttpError HttpClient::ReadCacheBody(const std::string& id) {
&& response().document) {
// Allocate on heap to not explode the stack.
const int array_size = 1024 * 64;
- scoped_ptr<char[]> buffer(new char[array_size]);
+ std::unique_ptr<char[]> buffer(new char[array_size]);
StreamResult result = Flow(stream.get(), buffer.get(), array_size,
response().document.get());
if (SR_SUCCESS != result) {
diff --git a/chromium/third_party/webrtc/base/httpclient.h b/chromium/third_party/webrtc/base/httpclient.h
index e7d2c5ce7d5..0c19d2efb43 100644
--- a/chromium/third_party/webrtc/base/httpclient.h
+++ b/chromium/third_party/webrtc/base/httpclient.h
@@ -11,11 +11,12 @@
#ifndef WEBRTC_BASE_HTTPCLIENT_H__
#define WEBRTC_BASE_HTTPCLIENT_H__
+#include <memory>
+
#include "webrtc/base/common.h"
#include "webrtc/base/httpbase.h"
#include "webrtc/base/nethelpers.h"
#include "webrtc/base/proxyinfo.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/socketpool.h"
@@ -172,7 +173,7 @@ private:
size_t retries_, attempt_, redirects_;
RedirectAction redirect_action_;
UriForm uri_form_;
- scoped_ptr<HttpAuthContext> context_;
+ std::unique_ptr<HttpAuthContext> context_;
DiskCache* cache_;
CacheState cache_state_;
AsyncResolverInterface* resolver_;
diff --git a/chromium/third_party/webrtc/base/httpcommon.h b/chromium/third_party/webrtc/base/httpcommon.h
index addc1bc30d2..3450b58b568 100644
--- a/chromium/third_party/webrtc/base/httpcommon.h
+++ b/chromium/third_party/webrtc/base/httpcommon.h
@@ -12,11 +12,11 @@
#define WEBRTC_BASE_HTTPCOMMON_H__
#include <map>
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/base/basictypes.h"
#include "webrtc/base/common.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/stream.h"
@@ -292,7 +292,7 @@ struct HttpData {
typedef HeaderMap::iterator iterator;
HttpVersion version;
- scoped_ptr<StreamInterface> document;
+ std::unique_ptr<StreamInterface> document;
HttpData();
diff --git a/chromium/third_party/webrtc/base/httpserver.h b/chromium/third_party/webrtc/base/httpserver.h
index 30c8f4c5516..c322e81790b 100644
--- a/chromium/third_party/webrtc/base/httpserver.h
+++ b/chromium/third_party/webrtc/base/httpserver.h
@@ -12,6 +12,8 @@
#define WEBRTC_BASE_HTTPSERVER_H__
#include <map>
+#include <memory>
+
#include "webrtc/base/httpbase.h"
namespace rtc {
@@ -127,7 +129,7 @@ private:
void OnConnectionClosed(HttpServer* server, int connection_id,
StreamInterface* stream);
- scoped_ptr<AsyncSocket> listener_;
+ std::unique_ptr<AsyncSocket> listener_;
};
//////////////////////////////////////////////////////////////////////
diff --git a/chromium/third_party/webrtc/base/latebindingsymboltable.h b/chromium/third_party/webrtc/base/latebindingsymboltable.h
index 636e7d07072..794a4ab5a8f 100644
--- a/chromium/third_party/webrtc/base/latebindingsymboltable.h
+++ b/chromium/third_party/webrtc/base/latebindingsymboltable.h
@@ -14,6 +14,7 @@
#include <string.h>
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/linux.h b/chromium/third_party/webrtc/base/linux.h
index ba73b854ba8..b69de3b3701 100644
--- a/chromium/third_party/webrtc/base/linux.h
+++ b/chromium/third_party/webrtc/base/linux.h
@@ -14,9 +14,9 @@
#if defined(WEBRTC_LINUX)
#include <string>
#include <map>
+#include <memory>
#include <vector>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stream.h"
namespace rtc {
@@ -51,7 +51,7 @@ class ConfigParser {
virtual bool ParseLine(std::string* key, std::string* value);
private:
- scoped_ptr<StreamInterface> instream_;
+ std::unique_ptr<StreamInterface> instream_;
};
//////////////////////////////////////////////////////////////////////////////
diff --git a/chromium/third_party/webrtc/base/logging.cc b/chromium/third_party/webrtc/base/logging.cc
index 019a31623be..60603624a5e 100644
--- a/chromium/third_party/webrtc/base/logging.cc
+++ b/chromium/third_party/webrtc/base/logging.cc
@@ -42,7 +42,6 @@ static const char kLibjingle[] = "libjingle";
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/timeutils.h"
@@ -125,7 +124,7 @@ LogMessage::LogMessage(const char* file,
const char* module)
: severity_(sev), tag_(kLibjingle) {
if (timestamp_) {
- uint32_t time = TimeSince(LogStartTime());
+ int64_t time = TimeSince(LogStartTime());
// Also ensure WallClockStartTime is initialized, so that it matches
// LogStartTime.
WallClockStartTime();
@@ -171,10 +170,8 @@ LogMessage::LogMessage(const char* file,
#endif // WEBRTC_WIN
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
case ERRCTX_OSSTATUS: {
- tmp << " " << nonnull(GetMacOSStatusErrorString(err), "Unknown error");
- if (const char* desc = GetMacOSStatusCommentString(err)) {
- tmp << ": " << desc;
- }
+ std::string desc(DescriptionFromOSStatus(err));
+ tmp << " " << (desc.empty() ? "Unknown error" : desc.c_str());
break;
}
#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
@@ -212,8 +209,8 @@ LogMessage::~LogMessage() {
}
}
-uint32_t LogMessage::LogStartTime() {
- static const uint32_t g_start = Time();
+int64_t LogMessage::LogStartTime() {
+ static const int64_t g_start = TimeMillis();
return g_start;
}
diff --git a/chromium/third_party/webrtc/base/logging.h b/chromium/third_party/webrtc/base/logging.h
index 8c7b3f70e52..631c6384d35 100644
--- a/chromium/third_party/webrtc/base/logging.h
+++ b/chromium/third_party/webrtc/base/logging.h
@@ -46,11 +46,17 @@
#ifndef WEBRTC_BASE_LOGGING_H_
#define WEBRTC_BASE_LOGGING_H_
+#include <errno.h>
+
#include <list>
#include <sstream>
#include <string>
#include <utility>
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <CoreServices/CoreServices.h>
+#endif
+
#include "webrtc/base/basictypes.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/thread_annotations.h"
@@ -80,6 +86,11 @@ struct ConstantLabel { int value; const char * label; };
const char* FindLabel(int value, const ConstantLabel entries[]);
std::string ErrorName(int err, const ConstantLabel* err_table);
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+// Returns a UTF8 description from an OS X Status error.
+std::string DescriptionFromOSStatus(OSStatus err);
+#endif
+
//////////////////////////////////////////////////////////////////////
// Note that the non-standard LoggingSeverity aliases exist because they are
@@ -147,7 +158,7 @@ class LogMessage {
// If this is not called externally, the LogMessage ctor also calls it, in
// which case the logging start time will be the time of the first LogMessage
// instance is created.
- static uint32_t LogStartTime();
+ static int64_t LogStartTime();
// Returns the wall clock equivalent of |LogStartTime|, in seconds from the
// epoch.
diff --git a/chromium/third_party/webrtc/base/logging_mac.mm b/chromium/third_party/webrtc/base/logging_mac.mm
new file mode 100644
index 00000000000..ffee3541a48
--- /dev/null
+++ b/chromium/third_party/webrtc/base/logging_mac.mm
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/logging.h"
+
+#import <Foundation/Foundation.h>
+
+
+namespace rtc {
+std::string DescriptionFromOSStatus(OSStatus err) {
+ NSError* error =
+ [NSError errorWithDomain:NSOSStatusErrorDomain code:err userInfo:nil];
+ return error.description.UTF8String;
+}
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/logging_unittest.cc b/chromium/third_party/webrtc/base/logging_unittest.cc
index 6047361bf58..d5bd9d4a005 100644
--- a/chromium/third_party/webrtc/base/logging_unittest.cc
+++ b/chromium/third_party/webrtc/base/logging_unittest.cc
@@ -138,18 +138,18 @@ TEST(LogTest, Perf) {
stream.DisableBuffering();
LogMessage::AddLogToStream(&stream, LS_SENSITIVE);
- uint32_t start = Time(), finish;
+ int64_t start = TimeMillis(), finish;
std::string message('X', 80);
for (int i = 0; i < 1000; ++i) {
LOG(LS_SENSITIVE) << message;
}
- finish = Time();
+ finish = TimeMillis();
LogMessage::RemoveLogToStream(&stream);
stream.Close();
Filesystem::DeleteFile(path);
- LOG(LS_INFO) << "Average log time: " << TimeDiff(finish, start) << " us";
+ LOG(LS_INFO) << "Average log time: " << TimeDiff(finish, start) << " ms";
}
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/logsinks.h b/chromium/third_party/webrtc/base/logsinks.h
index eabf0563986..e75120e3f54 100644
--- a/chromium/third_party/webrtc/base/logsinks.h
+++ b/chromium/third_party/webrtc/base/logsinks.h
@@ -11,12 +11,12 @@
#ifndef WEBRTC_BASE_FILE_ROTATING_LOG_SINK_H_
#define WEBRTC_BASE_FILE_ROTATING_LOG_SINK_H_
+#include <memory>
#include <string>
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/filerotatingstream.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
namespace rtc {
@@ -46,7 +46,7 @@ class FileRotatingLogSink : public LogSink {
explicit FileRotatingLogSink(FileRotatingStream* stream);
private:
- scoped_ptr<FileRotatingStream> stream_;
+ std::unique_ptr<FileRotatingStream> stream_;
RTC_DISALLOW_COPY_AND_ASSIGN(FileRotatingLogSink);
};
diff --git a/chromium/third_party/webrtc/base/macasyncsocket.h b/chromium/third_party/webrtc/base/macasyncsocket.h
index 5861ee3276a..c0f57b948c6 100644
--- a/chromium/third_party/webrtc/base/macasyncsocket.h
+++ b/chromium/third_party/webrtc/base/macasyncsocket.h
@@ -19,6 +19,7 @@
#include <CoreFoundation/CoreFoundation.h>
#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/nethelpers.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/maccocoasocketserver.h b/chromium/third_party/webrtc/base/maccocoasocketserver.h
index 0acf8d757a5..3e07a452e67 100644
--- a/chromium/third_party/webrtc/base/maccocoasocketserver.h
+++ b/chromium/third_party/webrtc/base/maccocoasocketserver.h
@@ -13,6 +13,7 @@
#ifndef WEBRTC_BASE_MACCOCOASOCKETSERVER_H_
#define WEBRTC_BASE_MACCOCOASOCKETSERVER_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/macsocketserver.h"
#ifdef __OBJC__
diff --git a/chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm b/chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm
index 5401ffb329a..3d9e4da6997 100644
--- a/chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm
+++ b/chromium/third_party/webrtc/base/maccocoasocketserver_unittest.mm
@@ -9,7 +9,6 @@
*/
#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/maccocoasocketserver.h"
diff --git a/chromium/third_party/webrtc/base/macifaddrs_converter.cc b/chromium/third_party/webrtc/base/macifaddrs_converter.cc
index 0916cb5ba27..2ad070e8eab 100644
--- a/chromium/third_party/webrtc/base/macifaddrs_converter.cc
+++ b/chromium/third_party/webrtc/base/macifaddrs_converter.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include <net/if.h>
#include <sys/ioctl.h>
#include <unistd.h>
@@ -15,7 +17,6 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/ifaddrs_converter.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#if !defined(WEBRTC_IOS)
#include <net/if_media.h>
@@ -269,7 +270,7 @@ class MacIfAddrsConverter : public IfAddrsConverter {
}
private:
- rtc::scoped_ptr<IPv6AttributesGetter> ip_attribute_getter_;
+ std::unique_ptr<IPv6AttributesGetter> ip_attribute_getter_;
};
} // namespace
diff --git a/chromium/third_party/webrtc/base/macsocketserver_unittest.cc b/chromium/third_party/webrtc/base/macsocketserver_unittest.cc
index ecb9a706b71..87cfe07b2f1 100644
--- a/chromium/third_party/webrtc/base/macsocketserver_unittest.cc
+++ b/chromium/third_party/webrtc/base/macsocketserver_unittest.cc
@@ -8,8 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socket_unittest.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/macsocketserver.h"
@@ -98,7 +99,7 @@ class MacAsyncSocketTest : public SocketTest {
virtual MacBaseSocketServer* CreateSocketServer() {
return new MacCFSocketServer();
};
- rtc::scoped_ptr<MacBaseSocketServer> server_;
+ std::unique_ptr<MacBaseSocketServer> server_;
SocketServerScope scope_;
};
diff --git a/chromium/third_party/webrtc/base/macutils.cc b/chromium/third_party/webrtc/base/macutils.cc
index 7b1ff475368..74b49199647 100644
--- a/chromium/third_party/webrtc/base/macutils.cc
+++ b/chromium/third_party/webrtc/base/macutils.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <sstream>
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/macutils.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stringutils.h"
namespace rtc {
@@ -26,7 +26,7 @@ bool ToUtf8(const CFStringRef str16, std::string* str8) {
}
size_t maxlen = CFStringGetMaximumSizeForEncoding(CFStringGetLength(str16),
kCFStringEncodingUTF8) + 1;
- scoped_ptr<char[]> buffer(new char[maxlen]);
+ std::unique_ptr<char[]> buffer(new char[maxlen]);
if (!buffer || !CFStringGetCString(str16, buffer.get(), maxlen,
kCFStringEncodingUTF8)) {
return false;
diff --git a/chromium/third_party/webrtc/base/messagedigest.cc b/chromium/third_party/webrtc/base/messagedigest.cc
index 0c2b4a16ac9..c08cab4ea9d 100644
--- a/chromium/third_party/webrtc/base/messagedigest.cc
+++ b/chromium/third_party/webrtc/base/messagedigest.cc
@@ -10,6 +10,8 @@
#include "webrtc/base/messagedigest.h"
+#include <memory>
+
#include <string.h>
#include "webrtc/base/basictypes.h"
@@ -20,7 +22,6 @@
#include "webrtc/base/md5digest.h"
#include "webrtc/base/sha1digest.h"
#endif
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stringencode.h"
namespace rtc {
@@ -75,14 +76,14 @@ size_t ComputeDigest(MessageDigest* digest, const void* input, size_t in_len,
size_t ComputeDigest(const std::string& alg, const void* input, size_t in_len,
void* output, size_t out_len) {
- scoped_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
+ std::unique_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
return (digest) ?
ComputeDigest(digest.get(), input, in_len, output, out_len) :
0;
}
std::string ComputeDigest(MessageDigest* digest, const std::string& input) {
- scoped_ptr<char[]> output(new char[digest->Size()]);
+ std::unique_ptr<char[]> output(new char[digest->Size()]);
ComputeDigest(digest, input.data(), input.size(),
output.get(), digest->Size());
return hex_encode(output.get(), digest->Size());
@@ -90,7 +91,7 @@ std::string ComputeDigest(MessageDigest* digest, const std::string& input) {
bool ComputeDigest(const std::string& alg, const std::string& input,
std::string* output) {
- scoped_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
+ std::unique_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
if (!digest) {
return false;
}
@@ -117,7 +118,7 @@ size_t ComputeHmac(MessageDigest* digest,
}
// Copy the key to a block-sized buffer to simplify padding.
// If the key is longer than a block, hash it and use the result instead.
- scoped_ptr<uint8_t[]> new_key(new uint8_t[block_len]);
+ std::unique_ptr<uint8_t[]> new_key(new uint8_t[block_len]);
if (key_len > block_len) {
ComputeDigest(digest, key, key_len, new_key.get(), block_len);
memset(new_key.get() + digest->Size(), 0, block_len - digest->Size());
@@ -126,14 +127,14 @@ size_t ComputeHmac(MessageDigest* digest,
memset(new_key.get() + key_len, 0, block_len - key_len);
}
// Set up the padding from the key, salting appropriately for each padding.
- scoped_ptr<uint8_t[]> o_pad(new uint8_t[block_len]);
- scoped_ptr<uint8_t[]> i_pad(new uint8_t[block_len]);
+ std::unique_ptr<uint8_t[]> o_pad(new uint8_t[block_len]);
+ std::unique_ptr<uint8_t[]> i_pad(new uint8_t[block_len]);
for (size_t i = 0; i < block_len; ++i) {
o_pad[i] = 0x5c ^ new_key[i];
i_pad[i] = 0x36 ^ new_key[i];
}
// Inner hash; hash the inner padding, and then the input buffer.
- scoped_ptr<uint8_t[]> inner(new uint8_t[digest->Size()]);
+ std::unique_ptr<uint8_t[]> inner(new uint8_t[digest->Size()]);
digest->Update(i_pad.get(), block_len);
digest->Update(input, in_len);
digest->Finish(inner.get(), digest->Size());
@@ -146,7 +147,7 @@ size_t ComputeHmac(MessageDigest* digest,
size_t ComputeHmac(const std::string& alg, const void* key, size_t key_len,
const void* input, size_t in_len,
void* output, size_t out_len) {
- scoped_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
+ std::unique_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
if (!digest) {
return 0;
}
@@ -156,7 +157,7 @@ size_t ComputeHmac(const std::string& alg, const void* key, size_t key_len,
std::string ComputeHmac(MessageDigest* digest, const std::string& key,
const std::string& input) {
- scoped_ptr<char[]> output(new char[digest->Size()]);
+ std::unique_ptr<char[]> output(new char[digest->Size()]);
ComputeHmac(digest, key.data(), key.size(),
input.data(), input.size(), output.get(), digest->Size());
return hex_encode(output.get(), digest->Size());
@@ -164,7 +165,7 @@ std::string ComputeHmac(MessageDigest* digest, const std::string& key,
bool ComputeHmac(const std::string& alg, const std::string& key,
const std::string& input, std::string* output) {
- scoped_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
+ std::unique_ptr<MessageDigest> digest(MessageDigestFactory::Create(alg));
if (!digest) {
return false;
}
diff --git a/chromium/third_party/webrtc/base/messagehandler.h b/chromium/third_party/webrtc/base/messagehandler.h
index b55b229a6db..6a3c2ef7402 100644
--- a/chromium/third_party/webrtc/base/messagehandler.h
+++ b/chromium/third_party/webrtc/base/messagehandler.h
@@ -11,10 +11,10 @@
#ifndef WEBRTC_BASE_MESSAGEHANDLER_H_
#define WEBRTC_BASE_MESSAGEHANDLER_H_
+#include <memory>
#include <utility>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
namespace rtc {
@@ -50,18 +50,18 @@ class FunctorMessageHandler : public MessageHandler {
ReturnT result_;
};
-// Specialization for rtc::scoped_ptr<ReturnT>.
+// Specialization for std::unique_ptr<ReturnT>.
template <class ReturnT, class FunctorT>
-class FunctorMessageHandler<class rtc::scoped_ptr<ReturnT>, FunctorT>
+class FunctorMessageHandler<class std::unique_ptr<ReturnT>, FunctorT>
: public MessageHandler {
public:
explicit FunctorMessageHandler(const FunctorT& functor) : functor_(functor) {}
virtual void OnMessage(Message* msg) { result_ = std::move(functor_()); }
- rtc::scoped_ptr<ReturnT> result() { return std::move(result_); }
+ std::unique_ptr<ReturnT> result() { return std::move(result_); }
private:
FunctorT functor_;
- rtc::scoped_ptr<ReturnT> result_;
+ std::unique_ptr<ReturnT> result_;
};
// Specialization for ReturnT of void.
diff --git a/chromium/third_party/webrtc/base/messagequeue.cc b/chromium/third_party/webrtc/base/messagequeue.cc
index 61aa61192bb..e8b5bf5bc1f 100644
--- a/chromium/third_party/webrtc/base/messagequeue.cc
+++ b/chromium/third_party/webrtc/base/messagequeue.cc
@@ -7,27 +7,17 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-
-#if defined(WEBRTC_POSIX)
-#include <sys/time.h>
-#endif
-
#include <algorithm>
+#include "webrtc/base/checks.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/messagequeue.h"
-#if defined(__native_client__)
-#include "webrtc/base/nullsocketserver.h"
-typedef rtc::NullSocketServer DefaultSocketServer;
-#else
-#include "webrtc/base/physicalsocketserver.h"
-typedef rtc::PhysicalSocketServer DefaultSocketServer;
-#endif
+#include "webrtc/base/trace_event.h"
namespace rtc {
-const uint32_t kMaxMsgLatency = 150; // 150 ms
+const int kMaxMsgLatency = 150; // 150 ms
//------------------------------------------------------------------
// MessageQueueManager
@@ -115,25 +105,26 @@ void MessageQueueManager::ClearInternal(MessageHandler *handler) {
//------------------------------------------------------------------
// MessageQueue
-
MessageQueue::MessageQueue(SocketServer* ss, bool init_queue)
: fStop_(false), fPeekKeep_(false),
dmsgq_next_num_(0), fInitialized_(false), fDestroyed_(false), ss_(ss) {
- if (!ss_) {
- // Currently, MessageQueue holds a socket server, and is the base class for
- // Thread. It seems like it makes more sense for Thread to hold the socket
- // server, and provide it to the MessageQueue, since the Thread controls
- // the I/O model, and MQ is agnostic to those details. Anyway, this causes
- // messagequeue_unittest to depend on network libraries... yuck.
- default_ss_.reset(new DefaultSocketServer());
- ss_ = default_ss_.get();
- }
+ RTC_DCHECK(ss);
+ // Currently, MessageQueue holds a socket server, and is the base class for
+ // Thread. It seems like it makes more sense for Thread to hold the socket
+ // server, and provide it to the MessageQueue, since the Thread controls
+ // the I/O model, and MQ is agnostic to those details. Anyway, this causes
+ // messagequeue_unittest to depend on network libraries... yuck.
ss_->SetMessageQueue(this);
if (init_queue) {
DoInit();
}
}
+MessageQueue::MessageQueue(std::unique_ptr<SocketServer> ss, bool init_queue)
+ : MessageQueue(ss.get(), init_queue) {
+ own_ss_ = std::move(ss);
+}
+
MessageQueue::~MessageQueue() {
DoDestroy();
}
@@ -178,7 +169,7 @@ void MessageQueue::set_socketserver(SocketServer* ss) {
// Other places that only read "ss_" can use a shared lock as simultaneous
// read access is allowed.
ExclusiveScope es(&ss_lock_);
- ss_ = ss ? ss : default_ss_.get();
+ ss_ = ss ? ss : own_ss_.get();
ss_->SetMessageQueue(this);
}
@@ -224,16 +215,16 @@ bool MessageQueue::Get(Message *pmsg, int cmsWait, bool process_io) {
// Get w/wait + timer scan / dispatch + socket / event multiplexer dispatch
- int cmsTotal = cmsWait;
- int cmsElapsed = 0;
- uint32_t msStart = Time();
- uint32_t msCurrent = msStart;
+ int64_t cmsTotal = cmsWait;
+ int64_t cmsElapsed = 0;
+ int64_t msStart = TimeMillis();
+ int64_t msCurrent = msStart;
while (true) {
// Check for sent messages
ReceiveSends();
// Check for posted events
- int cmsDelayNext = kForever;
+ int64_t cmsDelayNext = kForever;
bool first_pass = true;
while (true) {
// All queue operations need to be locked, but nothing else in this loop
@@ -246,7 +237,7 @@ bool MessageQueue::Get(Message *pmsg, int cmsWait, bool process_io) {
if (first_pass) {
first_pass = false;
while (!dmsgq_.empty()) {
- if (TimeIsLater(msCurrent, dmsgq_.top().msTrigger_)) {
+ if (msCurrent < dmsgq_.top().msTrigger_) {
cmsDelayNext = TimeDiff(dmsgq_.top().msTrigger_, msCurrent);
break;
}
@@ -265,7 +256,7 @@ bool MessageQueue::Get(Message *pmsg, int cmsWait, bool process_io) {
// Log a warning for time-sensitive messages that we're late to deliver.
if (pmsg->ts_sensitive) {
- int32_t delay = TimeDiff(msCurrent, pmsg->ts_sensitive);
+ int64_t delay = TimeDiff(msCurrent, pmsg->ts_sensitive);
if (delay > 0) {
LOG_F(LS_WARNING) << "id: " << pmsg->message_id << " delay: "
<< (delay + kMaxMsgLatency) << "ms";
@@ -286,11 +277,11 @@ bool MessageQueue::Get(Message *pmsg, int cmsWait, bool process_io) {
// Which is shorter, the delay wait or the asked wait?
- int cmsNext;
+ int64_t cmsNext;
if (cmsWait == kForever) {
cmsNext = cmsDelayNext;
} else {
- cmsNext = std::max(0, cmsTotal - cmsElapsed);
+ cmsNext = std::max<int64_t>(0, cmsTotal - cmsElapsed);
if ((cmsDelayNext != kForever) && (cmsDelayNext < cmsNext))
cmsNext = cmsDelayNext;
}
@@ -298,13 +289,13 @@ bool MessageQueue::Get(Message *pmsg, int cmsWait, bool process_io) {
{
// Wait and multiplex in the meantime
SharedScope ss(&ss_lock_);
- if (!ss_->Wait(cmsNext, process_io))
+ if (!ss_->Wait(static_cast<int>(cmsNext), process_io))
return false;
}
// If the specified timeout expired, return
- msCurrent = Time();
+ msCurrent = TimeMillis();
cmsElapsed = TimeDiff(msCurrent, msStart);
if (cmsWait != kForever) {
if (cmsElapsed >= cmsWait)
@@ -335,7 +326,7 @@ void MessageQueue::Post(MessageHandler* phandler,
msg.message_id = id;
msg.pdata = pdata;
if (time_sensitive) {
- msg.ts_sensitive = Time() + kMaxMsgLatency;
+ msg.ts_sensitive = TimeMillis() + kMaxMsgLatency;
}
msgq_.push_back(msg);
}
@@ -353,11 +344,20 @@ void MessageQueue::PostAt(uint32_t tstamp,
MessageHandler* phandler,
uint32_t id,
MessageData* pdata) {
+ // This should work even if it is used (unexpectedly).
+ int delay = static_cast<uint32_t>(TimeMillis()) - tstamp;
+ return DoDelayPost(delay, tstamp, phandler, id, pdata);
+}
+
+void MessageQueue::PostAt(int64_t tstamp,
+ MessageHandler* phandler,
+ uint32_t id,
+ MessageData* pdata) {
return DoDelayPost(TimeUntil(tstamp), tstamp, phandler, id, pdata);
}
void MessageQueue::DoDelayPost(int cmsDelay,
- uint32_t tstamp,
+ int64_t tstamp,
MessageHandler* phandler,
uint32_t id,
MessageData* pdata) {
@@ -451,6 +451,7 @@ void MessageQueue::Clear(MessageHandler* phandler,
}
void MessageQueue::Dispatch(Message *pmsg) {
+ TRACE_EVENT0("webrtc", "MessageQueue::Dispatch");
pmsg->phandler->OnMessage(pmsg);
}
diff --git a/chromium/third_party/webrtc/base/messagequeue.h b/chromium/third_party/webrtc/base/messagequeue.h
index efc479cf264..4aa96eb531f 100644
--- a/chromium/third_party/webrtc/base/messagequeue.h
+++ b/chromium/third_party/webrtc/base/messagequeue.h
@@ -15,6 +15,7 @@
#include <algorithm>
#include <list>
+#include <memory>
#include <queue>
#include <vector>
@@ -22,7 +23,6 @@
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/messagehandler.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/sharedexclusivelock.h"
#include "webrtc/base/sigslot.h"
@@ -89,10 +89,11 @@ template <class T>
class ScopedMessageData : public MessageData {
public:
explicit ScopedMessageData(T* data) : data_(data) { }
- const scoped_ptr<T>& data() const { return data_; }
- scoped_ptr<T>& data() { return data_; }
+ const std::unique_ptr<T>& data() const { return data_; }
+ std::unique_ptr<T>& data() { return data_; }
+
private:
- scoped_ptr<T> data_;
+ std::unique_ptr<T> data_;
};
// Like ScopedMessageData, but for reference counted pointers.
@@ -141,7 +142,7 @@ struct Message {
MessageHandler *phandler;
uint32_t message_id;
MessageData *pdata;
- uint32_t ts_sensitive;
+ int64_t ts_sensitive;
};
typedef std::list<Message> MessageList;
@@ -151,7 +152,7 @@ typedef std::list<Message> MessageList;
class DelayedMessage {
public:
- DelayedMessage(int delay, uint32_t trigger, uint32_t num, const Message& msg)
+ DelayedMessage(int delay, int64_t trigger, uint32_t num, const Message& msg)
: cmsDelay_(delay), msTrigger_(trigger), num_(num), msg_(msg) {}
bool operator< (const DelayedMessage& dmsg) const {
@@ -160,7 +161,7 @@ class DelayedMessage {
}
int cmsDelay_; // for debugging
- uint32_t msTrigger_;
+ int64_t msTrigger_;
uint32_t num_;
Message msg_;
};
@@ -174,8 +175,8 @@ class MessageQueue {
// init_queue and call DoInit() from their constructor to prevent races
// with the MessageQueueManager using the object while the vtable is still
// being created.
- explicit MessageQueue(SocketServer* ss = NULL,
- bool init_queue = true);
+ MessageQueue(SocketServer* ss, bool init_queue);
+ MessageQueue(std::unique_ptr<SocketServer> ss, bool init_queue);
// NOTE: SUBCLASSES OF MessageQueue THAT OVERRIDE Clear MUST CALL
// DoDestroy() IN THEIR DESTRUCTORS! This is required to avoid a data race
@@ -211,6 +212,11 @@ class MessageQueue {
MessageHandler* phandler,
uint32_t id = 0,
MessageData* pdata = NULL);
+ virtual void PostAt(int64_t tstamp,
+ MessageHandler* phandler,
+ uint32_t id = 0,
+ MessageData* pdata = NULL);
+ // TODO(honghaiz): Remove this when all the dependencies are removed.
virtual void PostAt(uint32_t tstamp,
MessageHandler* phandler,
uint32_t id = 0,
@@ -249,7 +255,7 @@ class MessageQueue {
};
void DoDelayPost(int cmsDelay,
- uint32_t tstamp,
+ int64_t tstamp,
MessageHandler* phandler,
uint32_t id,
MessageData* pdata);
@@ -275,13 +281,13 @@ class MessageQueue {
bool fDestroyed_;
private:
- // The SocketServer is not owned by MessageQueue.
+ // The SocketServer might not be owned by MessageQueue.
SocketServer* ss_ GUARDED_BY(ss_lock_);
- // If a server isn't supplied in the constructor, use this one.
- scoped_ptr<SocketServer> default_ss_;
+ // Used if SocketServer ownership lies with |this|.
+ std::unique_ptr<SocketServer> own_ss_;
SharedExclusiveLock ss_lock_;
- RTC_DISALLOW_COPY_AND_ASSIGN(MessageQueue);
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MessageQueue);
};
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/messagequeue_unittest.cc b/chromium/third_party/webrtc/base/messagequeue_unittest.cc
index 78024e0b2d6..50c2ad0ba2e 100644
--- a/chromium/third_party/webrtc/base/messagequeue_unittest.cc
+++ b/chromium/third_party/webrtc/base/messagequeue_unittest.cc
@@ -21,6 +21,7 @@ using namespace rtc;
class MessageQueueTest: public testing::Test, public MessageQueue {
public:
+ MessageQueueTest() : MessageQueue(SocketServer::CreateDefault(), true) {}
bool IsLocked_Worker() {
if (!crit_.TryEnter()) {
return true;
@@ -53,7 +54,7 @@ struct DeletedLockChecker {
static void DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder(
MessageQueue* q) {
EXPECT_TRUE(q != NULL);
- TimeStamp now = Time();
+ int64_t now = TimeMillis();
q->PostAt(now, NULL, 3);
q->PostAt(now - 2, NULL, 0);
q->PostAt(now - 1, NULL, 1);
@@ -72,10 +73,11 @@ static void DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder(
TEST_F(MessageQueueTest,
DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder) {
- MessageQueue q;
+ MessageQueue q(SocketServer::CreateDefault(), true);
DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder(&q);
+
NullSocketServer nullss;
- MessageQueue q_nullss(&nullss);
+ MessageQueue q_nullss(&nullss, true);
DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder(&q_nullss);
}
diff --git a/chromium/third_party/webrtc/base/multipart.h b/chromium/third_party/webrtc/base/multipart.h
index a099230cc50..9ae3ca2adf0 100644
--- a/chromium/third_party/webrtc/base/multipart.h
+++ b/chromium/third_party/webrtc/base/multipart.h
@@ -14,6 +14,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/stream.h"
diff --git a/chromium/third_party/webrtc/base/multipart_unittest.cc b/chromium/third_party/webrtc/base/multipart_unittest.cc
index 9db316b15de..627d1c620ad 100644
--- a/chromium/third_party/webrtc/base/multipart_unittest.cc
+++ b/chromium/third_party/webrtc/base/multipart_unittest.cc
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/pathutils.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/multipart.h"
namespace rtc {
@@ -61,7 +61,7 @@ TEST(MultipartTest, TestAddAndRead) {
EXPECT_TRUE(multipart.GetSize(&size));
EXPECT_EQ(part_size, size);
- rtc::scoped_ptr<rtc::MemoryStream> stream(
+ std::unique_ptr<rtc::MemoryStream> stream(
new rtc::MemoryStream(kTestStreamContent));
size_t stream_size = 0;
EXPECT_TRUE(stream->GetSize(&stream_size));
diff --git a/chromium/third_party/webrtc/base/nat_unittest.cc b/chromium/third_party/webrtc/base/nat_unittest.cc
index 8be1be9f05a..ca72c9356ae 100644
--- a/chromium/third_party/webrtc/base/nat_unittest.cc
+++ b/chromium/third_party/webrtc/base/nat_unittest.cc
@@ -9,6 +9,7 @@
*/
#include <algorithm>
+#include <memory>
#include <string>
#include "webrtc/base/gunit.h"
@@ -178,11 +179,11 @@ bool TestConnectivity(const SocketAddress& src, const IPAddress& dst) {
// The physical NAT tests require connectivity to the selected ip from the
// internal address used for the NAT. Things like firewalls can break that, so
// check to see if it's worth even trying with this ip.
- scoped_ptr<PhysicalSocketServer> pss(new PhysicalSocketServer());
- scoped_ptr<AsyncSocket> client(pss->CreateAsyncSocket(src.family(),
- SOCK_DGRAM));
- scoped_ptr<AsyncSocket> server(pss->CreateAsyncSocket(src.family(),
- SOCK_DGRAM));
+ std::unique_ptr<PhysicalSocketServer> pss(new PhysicalSocketServer());
+ std::unique_ptr<AsyncSocket> client(
+ pss->CreateAsyncSocket(src.family(), SOCK_DGRAM));
+ std::unique_ptr<AsyncSocket> server(
+ pss->CreateAsyncSocket(src.family(), SOCK_DGRAM));
if (client->Bind(SocketAddress(src.ipaddr(), 0)) != 0 ||
server->Bind(SocketAddress(dst, 0)) != 0) {
return false;
@@ -244,8 +245,8 @@ void TestPhysicalInternal(const SocketAddress& int_addr) {
SocketAddress(ext_addr2)
};
- scoped_ptr<PhysicalSocketServer> int_pss(new PhysicalSocketServer());
- scoped_ptr<PhysicalSocketServer> ext_pss(new PhysicalSocketServer());
+ std::unique_ptr<PhysicalSocketServer> int_pss(new PhysicalSocketServer());
+ std::unique_ptr<PhysicalSocketServer> ext_pss(new PhysicalSocketServer());
TestBindings(int_pss.get(), int_addr, ext_pss.get(), ext_addrs);
TestFilters(int_pss.get(), int_addr, ext_pss.get(), ext_addrs);
@@ -274,16 +275,16 @@ class TestVirtualSocketServer : public VirtualSocketServer {
IPAddress GetNextIP(int af) { return VirtualSocketServer::GetNextIP(af); }
private:
- scoped_ptr<SocketServer> ss_;
+ std::unique_ptr<SocketServer> ss_;
};
} // namespace
void TestVirtualInternal(int family) {
- scoped_ptr<TestVirtualSocketServer> int_vss(new TestVirtualSocketServer(
- new PhysicalSocketServer()));
- scoped_ptr<TestVirtualSocketServer> ext_vss(new TestVirtualSocketServer(
- new PhysicalSocketServer()));
+ std::unique_ptr<TestVirtualSocketServer> int_vss(
+ new TestVirtualSocketServer(new PhysicalSocketServer()));
+ std::unique_ptr<TestVirtualSocketServer> ext_vss(
+ new TestVirtualSocketServer(new PhysicalSocketServer()));
SocketAddress int_addr;
SocketAddress ext_addrs[4];
@@ -351,15 +352,15 @@ class NatTcpTest : public testing::Test, public sigslot::has_slots<> {
bool connected_;
PhysicalSocketServer* int_pss_;
PhysicalSocketServer* ext_pss_;
- rtc::scoped_ptr<TestVirtualSocketServer> int_vss_;
- rtc::scoped_ptr<TestVirtualSocketServer> ext_vss_;
- rtc::scoped_ptr<Thread> int_thread_;
- rtc::scoped_ptr<Thread> ext_thread_;
- rtc::scoped_ptr<NATServer> nat_;
- rtc::scoped_ptr<NATSocketFactory> natsf_;
- rtc::scoped_ptr<AsyncSocket> client_;
- rtc::scoped_ptr<AsyncSocket> server_;
- rtc::scoped_ptr<AsyncSocket> accepted_;
+ std::unique_ptr<TestVirtualSocketServer> int_vss_;
+ std::unique_ptr<TestVirtualSocketServer> ext_vss_;
+ std::unique_ptr<Thread> int_thread_;
+ std::unique_ptr<Thread> ext_thread_;
+ std::unique_ptr<NATServer> nat_;
+ std::unique_ptr<NATSocketFactory> natsf_;
+ std::unique_ptr<AsyncSocket> client_;
+ std::unique_ptr<AsyncSocket> server_;
+ std::unique_ptr<AsyncSocket> accepted_;
};
TEST_F(NatTcpTest, DISABLED_TestConnectOut) {
@@ -377,8 +378,8 @@ TEST_F(NatTcpTest, DISABLED_TestConnectOut) {
EXPECT_EQ(client_->GetRemoteAddress(), server_->GetLocalAddress());
EXPECT_EQ(accepted_->GetRemoteAddress().ipaddr(), ext_addr_.ipaddr());
- rtc::scoped_ptr<rtc::TestClient> in(CreateTCPTestClient(client_.release()));
- rtc::scoped_ptr<rtc::TestClient> out(
+ std::unique_ptr<rtc::TestClient> in(CreateTCPTestClient(client_.release()));
+ std::unique_ptr<rtc::TestClient> out(
CreateTCPTestClient(accepted_.release()));
const char* buf = "test_packet";
diff --git a/chromium/third_party/webrtc/base/natserver.cc b/chromium/third_party/webrtc/base/natserver.cc
index b071e014dbd..222d2709e27 100644
--- a/chromium/third_party/webrtc/base/natserver.cc
+++ b/chromium/third_party/webrtc/base/natserver.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/natsocketfactory.h"
#include "webrtc/base/natserver.h"
#include "webrtc/base/logging.h"
@@ -195,7 +197,7 @@ void NATServer::OnExternalUDPPacket(
// Forward this packet to the internal address.
// First prepend the address in a quasi-STUN format.
- scoped_ptr<char[]> real_buf(new char[size + kNATEncodedIPv6AddressSize]);
+ std::unique_ptr<char[]> real_buf(new char[size + kNATEncodedIPv6AddressSize]);
size_t addrlength = PackAddressForNAT(real_buf.get(),
size + kNATEncodedIPv6AddressSize,
remote_addr);
diff --git a/chromium/third_party/webrtc/base/natserver.h b/chromium/third_party/webrtc/base/natserver.h
index b6a02feca38..460518bd2e4 100644
--- a/chromium/third_party/webrtc/base/natserver.h
+++ b/chromium/third_party/webrtc/base/natserver.h
@@ -15,6 +15,7 @@
#include <set>
#include "webrtc/base/asyncudpsocket.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/socketaddresspair.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/socketfactory.h"
diff --git a/chromium/third_party/webrtc/base/natsocketfactory.cc b/chromium/third_party/webrtc/base/natsocketfactory.cc
index 0abd2a1b056..985748cff90 100644
--- a/chromium/third_party/webrtc/base/natsocketfactory.cc
+++ b/chromium/third_party/webrtc/base/natsocketfactory.cc
@@ -141,7 +141,7 @@ class NATSocket : public AsyncSocket, public sigslot::has_slots<> {
return socket_->SendTo(data, size, addr);
}
// This array will be too large for IPv4 packets, but only by 12 bytes.
- scoped_ptr<char[]> buf(new char[size + kNATEncodedIPv6AddressSize]);
+ std::unique_ptr<char[]> buf(new char[size + kNATEncodedIPv6AddressSize]);
size_t addrlength = PackAddressForNAT(buf.get(),
size + kNATEncodedIPv6AddressSize,
addr);
diff --git a/chromium/third_party/webrtc/base/natsocketfactory.h b/chromium/third_party/webrtc/base/natsocketfactory.h
index 9ca0739440e..6fad30c5602 100644
--- a/chromium/third_party/webrtc/base/natsocketfactory.h
+++ b/chromium/third_party/webrtc/base/natsocketfactory.h
@@ -13,8 +13,10 @@
#include <string>
#include <map>
+#include <memory>
#include <set>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/natserver.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/socketserver.h"
@@ -116,8 +118,8 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory {
private:
NATSocketServer* server_;
- scoped_ptr<SocketFactory> internal_factory_;
- scoped_ptr<NATServer> nat_server_;
+ std::unique_ptr<SocketFactory> internal_factory_;
+ std::unique_ptr<NATServer> nat_server_;
TranslatorMap nats_;
std::set<SocketAddress> clients_;
};
diff --git a/chromium/third_party/webrtc/base/nethelpers.cc b/chromium/third_party/webrtc/base/nethelpers.cc
index 0c7cce6b7c3..d9015283b0b 100644
--- a/chromium/third_party/webrtc/base/nethelpers.cc
+++ b/chromium/third_party/webrtc/base/nethelpers.cc
@@ -10,6 +10,8 @@
#include "webrtc/base/nethelpers.h"
+#include <memory>
+
#if defined(WEBRTC_WIN)
#include <ws2spi.h>
#include <ws2tcpip.h>
@@ -127,7 +129,7 @@ bool HasIPv6Enabled() {
return false;
}
DWORD protbuff_size = 4096;
- scoped_ptr<char[]> protocols;
+ std::unique_ptr<char[]> protocols;
LPWSAPROTOCOL_INFOW protocol_infos = NULL;
int requested_protocols[2] = {AF_INET6, 0};
diff --git a/chromium/third_party/webrtc/base/network.cc b/chromium/third_party/webrtc/base/network.cc
index 00187794704..b6caaa8bbf0 100644
--- a/chromium/third_party/webrtc/base/network.cc
+++ b/chromium/third_party/webrtc/base/network.cc
@@ -32,10 +32,10 @@
#include <stdio.h>
#include <algorithm>
+#include <memory>
#include "webrtc/base/logging.h"
#include "webrtc/base/networkmonitor.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socket.h" // includes something that makes windows happy
#include "webrtc/base/stream.h"
#include "webrtc/base/stringencode.h"
@@ -357,12 +357,34 @@ bool NetworkManagerBase::GetDefaultLocalAddress(int family,
*ipaddr = default_local_ipv4_address_;
return true;
} else if (family == AF_INET6 && !default_local_ipv6_address_.IsNil()) {
- *ipaddr = default_local_ipv6_address_;
+ Network* ipv6_network = GetNetworkFromAddress(default_local_ipv6_address_);
+ if (ipv6_network) {
+ // If the default ipv6 network's BestIP is different than
+ // default_local_ipv6_address_, use it instead.
+ // This is to prevent potential IP address leakage. See WebRTC bug 5376.
+ *ipaddr = ipv6_network->GetBestIP();
+ } else {
+ *ipaddr = default_local_ipv6_address_;
+ }
return true;
}
return false;
}
+Network* NetworkManagerBase::GetNetworkFromAddress(
+ const rtc::IPAddress& ip) const {
+ for (Network* network : networks_) {
+ const auto& ips = network->GetIPs();
+ if (std::find_if(ips.begin(), ips.end(),
+ [ip](const InterfaceAddress& existing_ip) {
+ return ip == static_cast<rtc::IPAddress>(existing_ip);
+ }) != ips.end()) {
+ return network;
+ }
+ }
+ return nullptr;
+}
+
BasicNetworkManager::BasicNetworkManager()
: thread_(NULL), sent_first_update_(false), start_count_(0),
ignore_non_default_routes_(false) {
@@ -449,9 +471,9 @@ void BasicNetworkManager::ConvertIfAddrs(struct ifaddrs* interfaces,
}
#endif
// TODO(phoglund): Need to recognize other types as well.
- scoped_ptr<Network> network(new Network(cursor->ifa_name,
- cursor->ifa_name, prefix,
- prefix_length, adapter_type));
+ std::unique_ptr<Network> network(
+ new Network(cursor->ifa_name, cursor->ifa_name, prefix, prefix_length,
+ adapter_type));
network->set_default_local_address_provider(this);
network->set_scope_id(scope_id);
network->AddIP(ip);
@@ -475,7 +497,7 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
return false;
}
- rtc::scoped_ptr<IfAddrsConverter> ifaddrs_converter(CreateIfAddrsConverter());
+ std::unique_ptr<IfAddrsConverter> ifaddrs_converter(CreateIfAddrsConverter());
ConvertIfAddrs(interfaces, ifaddrs_converter.get(), include_ignored,
networks);
@@ -531,7 +553,7 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
NetworkMap current_networks;
// MSDN recommends a 15KB buffer for the first try at GetAdaptersAddresses.
size_t buffer_size = 16384;
- scoped_ptr<char[]> adapter_info(new char[buffer_size]);
+ std::unique_ptr<char[]> adapter_info(new char[buffer_size]);
PIP_ADAPTER_ADDRESSES adapter_addrs =
reinterpret_cast<PIP_ADAPTER_ADDRESSES>(adapter_info.get());
int adapter_flags = (GAA_FLAG_SKIP_DNS_SERVER | GAA_FLAG_SKIP_ANYCAST |
@@ -567,7 +589,7 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
IPAddress ip;
int scope_id = 0;
- scoped_ptr<Network> network;
+ std::unique_ptr<Network> network;
switch (address->Address.lpSockaddr->sa_family) {
case AF_INET: {
sockaddr_in* v4_addr =
@@ -606,8 +628,8 @@ bool BasicNetworkManager::CreateNetworks(bool include_ignored,
// TODO(phoglund): Need to recognize other types as well.
adapter_type = ADAPTER_TYPE_LOOPBACK;
}
- scoped_ptr<Network> network(new Network(name, description, prefix,
- prefix_length, adapter_type));
+ std::unique_ptr<Network> network(new Network(
+ name, description, prefix, prefix_length, adapter_type));
network->set_default_local_address_provider(this);
network->set_scope_id(scope_id);
network->AddIP(ip);
@@ -770,7 +792,7 @@ IPAddress BasicNetworkManager::QueryDefaultLocalAddress(int family) const {
ASSERT(thread_->socketserver() != nullptr);
ASSERT(family == AF_INET || family == AF_INET6);
- scoped_ptr<AsyncSocket> socket(
+ std::unique_ptr<AsyncSocket> socket(
thread_->socketserver()->CreateAsyncSocket(family, SOCK_DGRAM));
if (!socket) {
LOG_ERR(LERROR) << "Socket creation failed";
diff --git a/chromium/third_party/webrtc/base/network.h b/chromium/third_party/webrtc/base/network.h
index ee22d5e5730..a41da4a69a0 100644
--- a/chromium/third_party/webrtc/base/network.h
+++ b/chromium/third_party/webrtc/base/network.h
@@ -13,6 +13,7 @@
#include <deque>
#include <map>
+#include <memory>
#include <string>
#include <vector>
@@ -20,7 +21,6 @@
#include "webrtc/base/ipaddress.h"
#include "webrtc/base/networkmonitor.h"
#include "webrtc/base/messagehandler.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#if defined(WEBRTC_POSIX)
@@ -160,6 +160,8 @@ class NetworkManagerBase : public NetworkManager {
private:
friend class NetworkTest;
+ Network* GetNetworkFromAddress(const rtc::IPAddress& ip) const;
+
EnumerationPermission enumeration_permission_;
NetworkList networks_;
@@ -168,8 +170,8 @@ class NetworkManagerBase : public NetworkManager {
NetworkMap networks_map_;
bool ipv6_enabled_;
- rtc::scoped_ptr<rtc::Network> ipv4_any_address_network_;
- rtc::scoped_ptr<rtc::Network> ipv6_any_address_network_;
+ std::unique_ptr<rtc::Network> ipv4_any_address_network_;
+ std::unique_ptr<rtc::Network> ipv6_any_address_network_;
IPAddress default_local_ipv4_address_;
IPAddress default_local_ipv6_address_;
@@ -252,7 +254,7 @@ class BasicNetworkManager : public NetworkManagerBase,
int start_count_;
std::vector<std::string> network_ignore_list_;
bool ignore_non_default_routes_;
- scoped_ptr<NetworkMonitorInterface> network_monitor_;
+ std::unique_ptr<NetworkMonitorInterface> network_monitor_;
};
// Represents a Unix-type network interface, with a name and single address.
diff --git a/chromium/third_party/webrtc/base/network_unittest.cc b/chromium/third_party/webrtc/base/network_unittest.cc
index 7133d8b405d..f3193e2b0f3 100644
--- a/chromium/third_party/webrtc/base/network_unittest.cc
+++ b/chromium/third_party/webrtc/base/network_unittest.cc
@@ -12,6 +12,7 @@
#include "webrtc/base/nethelpers.h"
#include "webrtc/base/networkmonitor.h"
+#include <memory>
#include <vector>
#if defined(WEBRTC_POSIX)
#include <sys/types.h>
@@ -108,7 +109,7 @@ class NetworkTest : public testing::Test, public sigslot::has_slots<> {
bool include_ignored,
NetworkManager::NetworkList* networks) {
// Use the base IfAddrsConverter for test cases.
- rtc::scoped_ptr<IfAddrsConverter> ifaddrs_converter(new IfAddrsConverter());
+ std::unique_ptr<IfAddrsConverter> ifaddrs_converter(new IfAddrsConverter());
network_manager.ConvertIfAddrs(interfaces, ifaddrs_converter.get(),
include_ignored, networks);
}
@@ -1006,6 +1007,35 @@ TEST_F(NetworkTest, DefaultLocalAddress) {
EXPECT_EQ(ip, GetLoopbackIP(AF_INET));
EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET6, &ip));
EXPECT_EQ(ip, GetLoopbackIP(AF_INET6));
+
+ // More tests on GetDefaultLocalAddress with ipv6 addresses where the set
+ // default address may be different from the best IP address of any network.
+ InterfaceAddress ip1;
+ EXPECT_TRUE(IPFromString("abcd::1234:5678:abcd:1111",
+ IPV6_ADDRESS_FLAG_TEMPORARY, &ip1));
+ // Create a network with a prefix of ip1.
+ Network ipv6_network("test_eth0", "Test NetworkAdapter", TruncateIP(ip1, 64),
+ 64);
+ IPAddress ip2;
+ EXPECT_TRUE(IPFromString("abcd::1234:5678:abcd:2222", &ip2));
+ ipv6_network.AddIP(ip1);
+ ipv6_network.AddIP(ip2);
+ BasicNetworkManager::NetworkList list(1, new Network(ipv6_network));
+ bool changed;
+ MergeNetworkList(manager, list, &changed);
+ // If the set default address is not in any network, GetDefaultLocalAddress
+ // should return it.
+ IPAddress ip3;
+ EXPECT_TRUE(IPFromString("abcd::1234:5678:abcd:3333", &ip3));
+ manager.set_default_local_addresses(GetLoopbackIP(AF_INET), ip3);
+ EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET6, &ip));
+ EXPECT_EQ(ip3, ip);
+ // If the set default address is in a network, GetDefaultLocalAddress will
+ // return the best IP in that network.
+ manager.set_default_local_addresses(GetLoopbackIP(AF_INET), ip2);
+ EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET6, &ip));
+ EXPECT_EQ(static_cast<IPAddress>(ip1), ip);
+
manager.StopUpdating();
}
diff --git a/chromium/third_party/webrtc/base/networkmonitor.h b/chromium/third_party/webrtc/base/networkmonitor.h
index 35ab2b120e3..5459cd63e95 100644
--- a/chromium/third_party/webrtc/base/networkmonitor.h
+++ b/chromium/third_party/webrtc/base/networkmonitor.h
@@ -12,7 +12,6 @@
#define WEBRTC_BASE_NETWORKMONITOR_H_
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/thread.h"
diff --git a/chromium/third_party/webrtc/base/networkroute.h b/chromium/third_party/webrtc/base/networkroute.h
index 95c1c088ac1..52fa6d612f4 100644
--- a/chromium/third_party/webrtc/base/networkroute.h
+++ b/chromium/third_party/webrtc/base/networkroute.h
@@ -15,7 +15,7 @@
// the media code can rely on and the network code can implement, and both can
// depend on that, but not depend on each other. Then, move this file to that
// directory.
-namespace cricket {
+namespace rtc {
struct NetworkRoute {
bool connected;
@@ -47,6 +47,6 @@ struct NetworkRoute {
bool operator!=(const NetworkRoute& nr) const { return !(*this == nr); }
};
-} // namespace cricket
+} // namespace rtc
#endif // WEBRTC_BASE_NETWORKROUTE_H_
diff --git a/chromium/third_party/webrtc/base/nullsocketserver.cc b/chromium/third_party/webrtc/base/nullsocketserver.cc
new file mode 100644
index 00000000000..5dfd49030d9
--- /dev/null
+++ b/chromium/third_party/webrtc/base/nullsocketserver.cc
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/nullsocketserver.h"
+
+namespace rtc {
+
+NullSocketServer::NullSocketServer() : event_(false, false) {}
+NullSocketServer::~NullSocketServer() {}
+
+bool NullSocketServer::Wait(int cms, bool process_io) {
+ event_.Wait(cms);
+ return true;
+}
+
+void NullSocketServer::WakeUp() {
+ event_.Set();
+}
+
+rtc::Socket* NullSocketServer::CreateSocket(int /* type */) {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+rtc::Socket* NullSocketServer::CreateSocket(int /* family */, int /* type */) {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+rtc::AsyncSocket* NullSocketServer::CreateAsyncSocket(int /* type */) {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+rtc::AsyncSocket* NullSocketServer::CreateAsyncSocket(int /* family */,
+ int /* type */) {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/nullsocketserver.h b/chromium/third_party/webrtc/base/nullsocketserver.h
index 5378e43158c..e59f2fafe58 100644
--- a/chromium/third_party/webrtc/base/nullsocketserver.h
+++ b/chromium/third_party/webrtc/base/nullsocketserver.h
@@ -12,48 +12,25 @@
#define WEBRTC_BASE_NULLSOCKETSERVER_H_
#include "webrtc/base/event.h"
-#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/socketserver.h"
namespace rtc {
-// NullSocketServer
-
-class NullSocketServer : public rtc::SocketServer {
+class NullSocketServer : public SocketServer {
public:
- NullSocketServer() : event_(false, false) {}
-
- virtual bool Wait(int cms, bool process_io) {
- event_.Wait(cms);
- return true;
- }
-
- virtual void WakeUp() {
- event_.Set();
- }
-
- virtual rtc::Socket* CreateSocket(int type) {
- ASSERT(false);
- return NULL;
- }
-
- virtual rtc::Socket* CreateSocket(int family, int type) {
- ASSERT(false);
- return NULL;
- }
-
- virtual rtc::AsyncSocket* CreateAsyncSocket(int type) {
- ASSERT(false);
- return NULL;
- }
+ NullSocketServer();
+ ~NullSocketServer() override;
- virtual rtc::AsyncSocket* CreateAsyncSocket(int family, int type) {
- ASSERT(false);
- return NULL;
- }
+ bool Wait(int cms, bool process_io) override;
+ void WakeUp() override;
+ Socket* CreateSocket(int type) override;
+ Socket* CreateSocket(int family, int type) override;
+ AsyncSocket* CreateAsyncSocket(int type) override;
+ AsyncSocket* CreateAsyncSocket(int family, int type) override;
private:
- rtc::Event event_;
+ Event event_;
};
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/nullsocketserver_unittest.cc b/chromium/third_party/webrtc/base/nullsocketserver_unittest.cc
index 4f22c382d80..e18afb2e943 100644
--- a/chromium/third_party/webrtc/base/nullsocketserver_unittest.cc
+++ b/chromium/third_party/webrtc/base/nullsocketserver_unittest.cc
@@ -37,7 +37,7 @@ TEST_F(NullSocketServerTest, WaitAndSet) {
}
TEST_F(NullSocketServerTest, TestWait) {
- uint32_t start = Time();
+ int64_t start = TimeMillis();
ss_.Wait(200, true);
// The actual wait time is dependent on the resolution of the timer used by
// the Event class. Allow for the event to signal ~20ms early.
diff --git a/chromium/third_party/webrtc/base/objc/OWNERS b/chromium/third_party/webrtc/base/objc/OWNERS
deleted file mode 100644
index cd06158b7fc..00000000000
--- a/chromium/third_party/webrtc/base/objc/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-tkchin@webrtc.org
diff --git a/chromium/third_party/webrtc/base/onetimeevent.h b/chromium/third_party/webrtc/base/onetimeevent.h
new file mode 100644
index 00000000000..240cf14c584
--- /dev/null
+++ b/chromium/third_party/webrtc/base/onetimeevent.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_ONETIMEEVENT_H_
+#define WEBRTC_BASE_ONETIMEEVENT_H_
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+// Provides a simple way to perform an operation (such as logging) one
+// time in a certain scope.
+// Example:
+// OneTimeEvent firstFrame;
+// ...
+// if (firstFrame()) {
+// LOG(LS_INFO) << "This is the first frame".
+// }
+class OneTimeEvent {
+ public:
+ OneTimeEvent() {}
+ bool operator()() {
+ rtc::CritScope cs(&critsect_);
+ if (happened_) {
+ return false;
+ }
+ happened_ = true;
+ return true;
+ }
+
+ private:
+ bool happened_ = false;
+ rtc::CriticalSection critsect_;
+};
+
+// A non-thread-safe, ligher-weight version of the OneTimeEvent class.
+class ThreadUnsafeOneTimeEvent {
+ public:
+ ThreadUnsafeOneTimeEvent() {}
+ bool operator()() {
+ if (happened_) {
+ return false;
+ }
+ happened_ = true;
+ return true;
+ }
+
+ private:
+ bool happened_ = false;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_BASE_ONETIMEEVENT_H_
diff --git a/chromium/third_party/webrtc/base/onetimeevent_unittest.cc b/chromium/third_party/webrtc/base/onetimeevent_unittest.cc
new file mode 100644
index 00000000000..4ebc139b2a8
--- /dev/null
+++ b/chromium/third_party/webrtc/base/onetimeevent_unittest.cc
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/onetimeevent.h"
+
+namespace webrtc {
+
+TEST(OneTimeEventTest, ThreadSafe) {
+ OneTimeEvent ot;
+
+ // The one time event is expected to evaluate to true only the first time.
+ EXPECT_TRUE(ot());
+ EXPECT_FALSE(ot());
+ EXPECT_FALSE(ot());
+}
+
+TEST(OneTimeEventTest, ThreadUnsafe) {
+ ThreadUnsafeOneTimeEvent ot;
+
+ EXPECT_TRUE(ot());
+ EXPECT_FALSE(ot());
+ EXPECT_FALSE(ot());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/base/opensslidentity.cc b/chromium/third_party/webrtc/base/opensslidentity.cc
index 9c2112e157c..58a0cd8adeb 100644
--- a/chromium/third_party/webrtc/base/opensslidentity.cc
+++ b/chromium/third_party/webrtc/base/opensslidentity.cc
@@ -12,6 +12,8 @@
#include "webrtc/base/opensslidentity.h"
+#include <memory>
+
// Must be included first before openssl headers.
#include "webrtc/base/win32.h" // NOLINT
@@ -164,6 +166,29 @@ OpenSSLKeyPair* OpenSSLKeyPair::Generate(const KeyParams& key_params) {
return new OpenSSLKeyPair(pkey);
}
+OpenSSLKeyPair* OpenSSLKeyPair::FromPrivateKeyPEMString(
+ const std::string& pem_string) {
+ BIO* bio = BIO_new_mem_buf(const_cast<char*>(pem_string.c_str()), -1);
+ if (!bio) {
+ LOG(LS_ERROR) << "Failed to create a new BIO buffer.";
+ return nullptr;
+ }
+ BIO_set_mem_eof_return(bio, 0);
+ EVP_PKEY* pkey =
+ PEM_read_bio_PrivateKey(bio, nullptr, nullptr, const_cast<char*>("\0"));
+ BIO_free(bio); // Frees the BIO, but not the pointed-to string.
+ if (!pkey) {
+ LOG(LS_ERROR) << "Failed to create the private key from PEM string.";
+ return nullptr;
+ }
+ if (EVP_PKEY_missing_parameters(pkey) != 0) {
+ LOG(LS_ERROR) << "The resulting key pair is missing public key parameters.";
+ EVP_PKEY_free(pkey);
+ return nullptr;
+ }
+ return new OpenSSLKeyPair(pkey);
+}
+
OpenSSLKeyPair::~OpenSSLKeyPair() {
EVP_PKEY_free(pkey_);
}
@@ -181,6 +206,57 @@ void OpenSSLKeyPair::AddReference() {
#endif
}
+std::string OpenSSLKeyPair::PrivateKeyToPEMString() const {
+ BIO* temp_memory_bio = BIO_new(BIO_s_mem());
+ if (!temp_memory_bio) {
+ LOG_F(LS_ERROR) << "Failed to allocate temporary memory bio";
+ RTC_NOTREACHED();
+ return "";
+ }
+ if (!PEM_write_bio_PrivateKey(
+ temp_memory_bio, pkey_, nullptr, nullptr, 0, nullptr, nullptr)) {
+ LOG_F(LS_ERROR) << "Failed to write private key";
+ BIO_free(temp_memory_bio);
+ RTC_NOTREACHED();
+ return "";
+ }
+ BIO_write(temp_memory_bio, "\0", 1);
+ char* buffer;
+ BIO_get_mem_data(temp_memory_bio, &buffer);
+ std::string priv_key_str = buffer;
+ BIO_free(temp_memory_bio);
+ return priv_key_str;
+}
+
+std::string OpenSSLKeyPair::PublicKeyToPEMString() const {
+ BIO* temp_memory_bio = BIO_new(BIO_s_mem());
+ if (!temp_memory_bio) {
+ LOG_F(LS_ERROR) << "Failed to allocate temporary memory bio";
+ RTC_NOTREACHED();
+ return "";
+ }
+ if (!PEM_write_bio_PUBKEY(temp_memory_bio, pkey_)) {
+ LOG_F(LS_ERROR) << "Failed to write public key";
+ BIO_free(temp_memory_bio);
+ RTC_NOTREACHED();
+ return "";
+ }
+ BIO_write(temp_memory_bio, "\0", 1);
+ char* buffer;
+ BIO_get_mem_data(temp_memory_bio, &buffer);
+ std::string pub_key_str = buffer;
+ BIO_free(temp_memory_bio);
+ return pub_key_str;
+}
+
+bool OpenSSLKeyPair::operator==(const OpenSSLKeyPair& other) const {
+ return EVP_PKEY_cmp(this->pkey_, other.pkey_) == 1;
+}
+
+bool OpenSSLKeyPair::operator!=(const OpenSSLKeyPair& other) const {
+ return !(*this == other);
+}
+
#if !defined(NDEBUG)
// Print a certificate to the log, for debugging.
static void PrintCert(X509* x509) {
@@ -280,7 +356,7 @@ bool OpenSSLCertificate::GetSignatureDigestAlgorithm(
return true;
}
-rtc::scoped_ptr<SSLCertChain> OpenSSLCertificate::GetChain() const {
+std::unique_ptr<SSLCertChain> OpenSSLCertificate::GetChain() const {
// Chains are not yet supported when using OpenSSL.
// OpenSSLStreamAdapter::SSLVerifyCallback currently requires the remote
// certificate to be self-signed.
@@ -368,6 +444,14 @@ void OpenSSLCertificate::AddReference() const {
#endif
}
+bool OpenSSLCertificate::operator==(const OpenSSLCertificate& other) const {
+ return X509_cmp(this->x509_, other.x509_) == 0;
+}
+
+bool OpenSSLCertificate::operator!=(const OpenSSLCertificate& other) const {
+ return !(*this == other);
+}
+
// Documented in sslidentity.h.
int64_t OpenSSLCertificate::CertificateExpirationTime() const {
ASN1_TIME* expire_time = X509_get_notAfter(x509_);
@@ -430,29 +514,21 @@ OpenSSLIdentity* OpenSSLIdentity::GenerateForTest(
SSLIdentity* OpenSSLIdentity::FromPEMStrings(
const std::string& private_key,
const std::string& certificate) {
- scoped_ptr<OpenSSLCertificate> cert(
+ std::unique_ptr<OpenSSLCertificate> cert(
OpenSSLCertificate::FromPEMString(certificate));
if (!cert) {
LOG(LS_ERROR) << "Failed to create OpenSSLCertificate from PEM string.";
- return NULL;
- }
-
- BIO* bio = BIO_new_mem_buf(const_cast<char*>(private_key.c_str()), -1);
- if (!bio) {
- LOG(LS_ERROR) << "Failed to create a new BIO buffer.";
- return NULL;
+ return nullptr;
}
- BIO_set_mem_eof_return(bio, 0);
- EVP_PKEY* pkey =
- PEM_read_bio_PrivateKey(bio, NULL, NULL, const_cast<char*>("\0"));
- BIO_free(bio); // Frees the BIO, but not the pointed-to string.
- if (!pkey) {
- LOG(LS_ERROR) << "Failed to create the private key from PEM string.";
- return NULL;
+ OpenSSLKeyPair* key_pair =
+ OpenSSLKeyPair::FromPrivateKeyPEMString(private_key);
+ if (!key_pair) {
+ LOG(LS_ERROR) << "Failed to create key pair from PEM string.";
+ return nullptr;
}
- return new OpenSSLIdentity(new OpenSSLKeyPair(pkey),
+ return new OpenSSLIdentity(key_pair,
cert.release());
}
@@ -475,6 +551,23 @@ bool OpenSSLIdentity::ConfigureIdentity(SSL_CTX* ctx) {
return true;
}
+std::string OpenSSLIdentity::PrivateKeyToPEMString() const {
+ return key_pair_->PrivateKeyToPEMString();
+}
+
+std::string OpenSSLIdentity::PublicKeyToPEMString() const {
+ return key_pair_->PublicKeyToPEMString();
+}
+
+bool OpenSSLIdentity::operator==(const OpenSSLIdentity& other) const {
+ return *this->key_pair_ == *other.key_pair_ &&
+ *this->certificate_ == *other.certificate_;
+}
+
+bool OpenSSLIdentity::operator!=(const OpenSSLIdentity& other) const {
+ return !(*this == other);
+}
+
} // namespace rtc
#endif // HAVE_OPENSSL_SSL_H
diff --git a/chromium/third_party/webrtc/base/opensslidentity.h b/chromium/third_party/webrtc/base/opensslidentity.h
index df495087e35..316572c48bb 100644
--- a/chromium/third_party/webrtc/base/opensslidentity.h
+++ b/chromium/third_party/webrtc/base/opensslidentity.h
@@ -14,10 +14,11 @@
#include <openssl/evp.h>
#include <openssl/x509.h>
+#include <memory>
#include <string>
#include "webrtc/base/common.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/sslidentity.h"
typedef struct ssl_ctx_st SSL_CTX;
@@ -33,12 +34,20 @@ class OpenSSLKeyPair {
}
static OpenSSLKeyPair* Generate(const KeyParams& key_params);
+ // Constructs a key pair from the private key PEM string. This must not result
+ // in missing public key parameters. Returns null on error.
+ static OpenSSLKeyPair* FromPrivateKeyPEMString(
+ const std::string& pem_string);
virtual ~OpenSSLKeyPair();
virtual OpenSSLKeyPair* GetReference();
EVP_PKEY* pkey() const { return pkey_; }
+ std::string PrivateKeyToPEMString() const;
+ std::string PublicKeyToPEMString() const;
+ bool operator==(const OpenSSLKeyPair& other) const;
+ bool operator!=(const OpenSSLKeyPair& other) const;
private:
void AddReference();
@@ -68,8 +77,9 @@ class OpenSSLCertificate : public SSLCertificate {
X509* x509() const { return x509_; }
std::string ToPEMString() const override;
-
void ToDER(Buffer* der_buffer) const override;
+ bool operator==(const OpenSSLCertificate& other) const;
+ bool operator!=(const OpenSSLCertificate& other) const;
// Compute the digest of the certificate given algorithm
bool ComputeDigest(const std::string& algorithm,
@@ -85,7 +95,7 @@ class OpenSSLCertificate : public SSLCertificate {
size_t* length);
bool GetSignatureDigestAlgorithm(std::string* algorithm) const override;
- rtc::scoped_ptr<SSLCertChain> GetChain() const override;
+ std::unique_ptr<SSLCertChain> GetChain() const override;
int64_t CertificateExpirationTime() const override;
@@ -115,13 +125,18 @@ class OpenSSLIdentity : public SSLIdentity {
// Configure an SSL context object to use our key and certificate.
bool ConfigureIdentity(SSL_CTX* ctx);
+ std::string PrivateKeyToPEMString() const override;
+ std::string PublicKeyToPEMString() const override;
+ bool operator==(const OpenSSLIdentity& other) const;
+ bool operator!=(const OpenSSLIdentity& other) const;
+
private:
OpenSSLIdentity(OpenSSLKeyPair* key_pair, OpenSSLCertificate* certificate);
static OpenSSLIdentity* GenerateInternal(const SSLIdentityParams& params);
- scoped_ptr<OpenSSLKeyPair> key_pair_;
- scoped_ptr<OpenSSLCertificate> certificate_;
+ std::unique_ptr<OpenSSLKeyPair> key_pair_;
+ std::unique_ptr<OpenSSLCertificate> certificate_;
RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLIdentity);
};
diff --git a/chromium/third_party/webrtc/base/opensslstreamadapter.cc b/chromium/third_party/webrtc/base/opensslstreamadapter.cc
index 16dd9803b40..abdf5e4834c 100644
--- a/chromium/third_party/webrtc/base/opensslstreamadapter.cc
+++ b/chromium/third_party/webrtc/base/opensslstreamadapter.cc
@@ -18,7 +18,11 @@
#include <openssl/rand.h>
#include <openssl/tls1.h>
#include <openssl/x509v3.h>
+#ifndef OPENSSL_IS_BORINGSSL
+#include <openssl/dtls1.h>
+#endif
+#include <memory>
#include <vector>
#include "webrtc/base/common.h"
@@ -290,9 +294,9 @@ void OpenSSLStreamAdapter::SetServerRole(SSLRole role) {
role_ = role;
}
-rtc::scoped_ptr<SSLCertificate> OpenSSLStreamAdapter::GetPeerCertificate()
+std::unique_ptr<SSLCertificate> OpenSSLStreamAdapter::GetPeerCertificate()
const {
- return peer_certificate_ ? rtc::scoped_ptr<SSLCertificate>(
+ return peer_certificate_ ? std::unique_ptr<SSLCertificate>(
peer_certificate_->GetReference())
: nullptr;
}
@@ -1138,7 +1142,9 @@ static const cipher_list OK_RSA_ciphers[] = {
#ifdef TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA256
CDEF(ECDHE_RSA_WITH_AES_256_GCM_SHA256),
#endif
+#ifdef TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256
CDEF(ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256),
+#endif
};
static const cipher_list OK_ECDSA_ciphers[] = {
@@ -1148,7 +1154,9 @@ static const cipher_list OK_ECDSA_ciphers[] = {
#ifdef TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA256
CDEF(ECDHE_ECDSA_WITH_AES_256_GCM_SHA256),
#endif
+#ifdef TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256
CDEF(ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256),
+#endif
};
#undef CDEF
diff --git a/chromium/third_party/webrtc/base/opensslstreamadapter.h b/chromium/third_party/webrtc/base/opensslstreamadapter.h
index 463c8f2280b..1e90bacc0e4 100644
--- a/chromium/third_party/webrtc/base/opensslstreamadapter.h
+++ b/chromium/third_party/webrtc/base/opensslstreamadapter.h
@@ -12,6 +12,7 @@
#define WEBRTC_BASE_OPENSSLSTREAMADAPTER_H__
#include <string>
+#include <memory>
#include <vector>
#include "webrtc/base/buffer.h"
@@ -69,7 +70,7 @@ class OpenSSLStreamAdapter : public SSLStreamAdapter {
const unsigned char* digest_val,
size_t digest_len) override;
- rtc::scoped_ptr<SSLCertificate> GetPeerCertificate() const override;
+ std::unique_ptr<SSLCertificate> GetPeerCertificate() const override;
int StartSSLWithServer(const char* server_name) override;
int StartSSLWithPeer() override;
@@ -184,13 +185,13 @@ class OpenSSLStreamAdapter : public SSLStreamAdapter {
SSL_CTX* ssl_ctx_;
// Our key and certificate, mostly useful in peer-to-peer mode.
- scoped_ptr<OpenSSLIdentity> identity_;
+ std::unique_ptr<OpenSSLIdentity> identity_;
// in traditional mode, the server name that the server's certificate
// must specify. Empty in peer-to-peer mode.
std::string ssl_server_name_;
// The certificate that the peer must present or did present. Initially
// null in traditional mode, until the connection is established.
- scoped_ptr<OpenSSLCertificate> peer_certificate_;
+ std::unique_ptr<OpenSSLCertificate> peer_certificate_;
// In peer-to-peer mode, the digest of the certificate that
// the peer must present.
Buffer peer_certificate_digest_value_;
diff --git a/chromium/third_party/webrtc/base/optional.h b/chromium/third_party/webrtc/base/optional.h
index b8071e63587..25cfbfe4175 100644
--- a/chromium/third_party/webrtc/base/optional.h
+++ b/chromium/third_party/webrtc/base/optional.h
@@ -12,17 +12,14 @@
#define WEBRTC_BASE_OPTIONAL_H_
#include <algorithm>
+#include <memory>
#include <utility>
#include "webrtc/base/checks.h"
namespace rtc {
-// Simple std::experimental::optional-wannabe. It either contains a T or not.
-// In order to keep the implementation simple and portable, this implementation
-// actually contains a (default-constructed) T even when it supposedly doesn't
-// contain a value; use e.g. rtc::scoped_ptr<T> instead if that's too
-// expensive.
+// Simple std::optional-wannabe. It either contains a T or not.
//
// A moved-from Optional<T> may only be destroyed, and assigned to if T allows
// being assigned to after having been moved from. Specifically, you may not
@@ -65,28 +62,90 @@ class Optional final {
Optional() : has_value_(false) {}
// Construct an Optional that contains a value.
- explicit Optional(const T& val) : value_(val), has_value_(true) {}
- explicit Optional(T&& val) : value_(std::move(val)), has_value_(true) {}
-
- // Copy and move constructors.
- // TODO(kwiberg): =default the move constructor when MSVC supports it.
- Optional(const Optional&) = default;
- Optional(Optional&& m)
- : value_(std::move(m.value_)), has_value_(m.has_value_) {}
-
- // Assignment.
- // TODO(kwiberg): =default the move assignment op when MSVC supports it.
- Optional& operator=(const Optional&) = default;
+ explicit Optional(const T& value) : has_value_(true) {
+ new (&value_) T(value);
+ }
+ explicit Optional(T&& value) : has_value_(true) {
+ new (&value_) T(std::move(value));
+ }
+
+ // Copy constructor: copies the value from m if it has one.
+ Optional(const Optional& m) : has_value_(m.has_value_) {
+ if (has_value_)
+ new (&value_) T(m.value_);
+ }
+
+ // Move constructor: if m has a value, moves the value from m, leaving m
+ // still in a state where it has a value, but a moved-from one (the
+ // properties of which depends on T; the only general guarantee is that we
+ // can destroy m).
+ Optional(Optional&& m) : has_value_(m.has_value_) {
+ if (has_value_)
+ new (&value_) T(std::move(m.value_));
+ }
+
+ ~Optional() {
+ if (has_value_)
+ value_.~T();
+ }
+
+ // Copy assignment. Uses T's copy assignment if both sides have a value, T's
+ // copy constructor if only the right-hand side has a value.
+ Optional& operator=(const Optional& m) {
+ if (m.has_value_) {
+ if (has_value_) {
+ value_ = m.value_; // T's copy assignment.
+ } else {
+ new (&value_) T(m.value_); // T's copy constructor.
+ has_value_ = true;
+ }
+ } else if (has_value_) {
+ value_.~T();
+ has_value_ = false;
+ }
+ return *this;
+ }
+
+ // Move assignment. Uses T's move assignment if both sides have a value, T's
+ // move constructor if only the right-hand side has a value. The state of m
+ // after it's been moved from is as for the move constructor.
Optional& operator=(Optional&& m) {
- value_ = std::move(m.value_);
- has_value_ = m.has_value_;
+ if (m.has_value_) {
+ if (has_value_) {
+ value_ = std::move(m.value_); // T's move assignment.
+ } else {
+ new (&value_) T(std::move(m.value_)); // T's move constructor.
+ has_value_ = true;
+ }
+ } else if (has_value_) {
+ value_.~T();
+ has_value_ = false;
+ }
return *this;
}
+ // Swap the values if both m1 and m2 have values; move the value if only one
+ // of them has one.
friend void swap(Optional& m1, Optional& m2) {
- using std::swap;
- swap(m1.value_, m2.value_);
- swap(m1.has_value_, m2.has_value_);
+ if (m1.has_value_) {
+ if (m2.has_value_) {
+ // Both have values: swap.
+ using std::swap;
+ swap(m1.value_, m2.value_);
+ } else {
+ // Only m1 has a value: move it to m2.
+ new (&m2.value_) T(std::move(m1.value_));
+ m1.value_.~T(); // Destroy the moved-from value.
+ m1.has_value_ = false;
+ m2.has_value_ = true;
+ }
+ } else if (m2.has_value_) {
+ // Only m2 has a value: move it to m1.
+ new (&m1.value_) T(std::move(m2.value_));
+ m2.value_.~T(); // Destroy the moved-from value.
+ m1.has_value_ = true;
+ m2.has_value_ = false;
+ }
}
// Conversion to bool to test if we have a value.
@@ -128,10 +187,15 @@ class Optional final {
}
private:
- // Invariant: Unless *this has been moved from, value_ is default-initialized
- // (or copied or moved from a default-initialized T) if !has_value_.
- T value_;
- bool has_value_;
+ bool has_value_; // True iff value_ contains a live value.
+ union {
+ // By placing value_ in a union, we get to manage its construction and
+ // destruction manually: the Optional constructors won't automatically
+ // construct it, and the Optional destructor won't automatically destroy
+ // it. Basically, this just allocates a properly sized and aligned block of
+ // memory in which we can manually put a T with placement new.
+ T value_;
+ };
};
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/optional_unittest.cc b/chromium/third_party/webrtc/base/optional_unittest.cc
index 8ddbebadb9a..b51701f6b26 100644
--- a/chromium/third_party/webrtc/base/optional_unittest.cc
+++ b/chromium/third_party/webrtc/base/optional_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <sstream>
#include <string>
#include <utility>
@@ -64,8 +65,8 @@ class Logger {
}
void Foo() { Log("Foo()"); }
void Foo() const { Log("Foo() const"); }
- static rtc::scoped_ptr<std::vector<std::string>> Setup() {
- rtc::scoped_ptr<std::vector<std::string>> s(new std::vector<std::string>);
+ static std::unique_ptr<std::vector<std::string>> Setup() {
+ std::unique_ptr<std::vector<std::string>> s(new std::vector<std::string>);
g_log = s.get();
g_next_id = 0;
return s;
@@ -124,7 +125,7 @@ TEST(OptionalTest, TestConstructDefault) {
Optional<Logger> x;
EXPECT_FALSE(x);
}
- EXPECT_EQ(V("0:0. default constructor", "0:0. destructor"), *log);
+ EXPECT_EQ(V(), *log);
}
TEST(OptionalTest, TestConstructCopyEmpty) {
@@ -135,9 +136,7 @@ TEST(OptionalTest, TestConstructCopyEmpty) {
auto y = x;
EXPECT_FALSE(y);
}
- EXPECT_EQ(V("0:0. default constructor", "1:0. copy constructor (from 0:0)",
- "1:0. destructor", "0:0. destructor"),
- *log);
+ EXPECT_EQ(V(), *log);
}
TEST(OptionalTest, TestConstructCopyFull) {
@@ -165,9 +164,7 @@ TEST(OptionalTest, TestConstructMoveEmpty) {
auto y = std::move(x);
EXPECT_FALSE(y);
}
- EXPECT_EQ(V("0:0. default constructor", "1:0. move constructor (from 0:0)",
- "1:0. destructor", "0:0. destructor"),
- *log);
+ EXPECT_EQ(V(), *log);
}
TEST(OptionalTest, TestConstructMoveFull) {
@@ -194,10 +191,7 @@ TEST(OptionalTest, TestCopyAssignToEmptyFromEmpty) {
Optional<Logger> x, y;
x = y;
}
- EXPECT_EQ(
- V("0:0. default constructor", "1:1. default constructor",
- "0:1. operator= copy (from 1:1)", "1:1. destructor", "0:1. destructor"),
- *log);
+ EXPECT_EQ(V(), *log);
}
TEST(OptionalTest, TestCopyAssignToFullFromEmpty) {
@@ -211,9 +205,7 @@ TEST(OptionalTest, TestCopyAssignToFullFromEmpty) {
}
EXPECT_EQ(
V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "2:2. default constructor", "---",
- "1:2. operator= copy (from 2:2)", "---", "2:2. destructor",
- "1:2. destructor"),
+ "0:17. destructor", "---", "1:17. destructor", "---"),
*log);
}
@@ -226,11 +218,11 @@ TEST(OptionalTest, TestCopyAssignToEmptyFromFull) {
x = y;
log->push_back("---");
}
- EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor",
- "2:17. move constructor (from 1:17)", "1:17. destructor", "---",
- "0:17. operator= copy (from 2:17)", "---", "2:17. destructor",
- "0:17. destructor"),
- *log);
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "---", "2:17. copy constructor (from 1:17)", "---",
+ "1:17. destructor", "2:17. destructor"),
+ *log);
}
TEST(OptionalTest, TestCopyAssignToFullFromFull) {
@@ -260,10 +252,10 @@ TEST(OptionalTest, TestCopyAssignToEmptyFromT) {
x = Optional<Logger>(y);
log->push_back("---");
}
- EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor", "---",
- "2:17. copy constructor (from 1:17)",
- "0:17. operator= move (from 2:17)", "2:17. destructor", "---",
- "1:17. destructor", "0:17. destructor"),
+ EXPECT_EQ(V("0:17. explicit constructor", "---",
+ "1:17. copy constructor (from 0:17)",
+ "2:17. move constructor (from 1:17)", "1:17. destructor", "---",
+ "0:17. destructor", "2:17. destructor"),
*log);
}
@@ -291,10 +283,7 @@ TEST(OptionalTest, TestMoveAssignToEmptyFromEmpty) {
Optional<Logger> x, y;
x = std::move(y);
}
- EXPECT_EQ(
- V("0:0. default constructor", "1:1. default constructor",
- "0:1. operator= move (from 1:1)", "1:1. destructor", "0:1. destructor"),
- *log);
+ EXPECT_EQ(V(), *log);
}
TEST(OptionalTest, TestMoveAssignToFullFromEmpty) {
@@ -308,9 +297,7 @@ TEST(OptionalTest, TestMoveAssignToFullFromEmpty) {
}
EXPECT_EQ(
V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
- "0:17. destructor", "2:2. default constructor", "---",
- "1:2. operator= move (from 2:2)", "---", "2:2. destructor",
- "1:2. destructor"),
+ "0:17. destructor", "---", "1:17. destructor", "---"),
*log);
}
@@ -323,11 +310,11 @@ TEST(OptionalTest, TestMoveAssignToEmptyFromFull) {
x = std::move(y);
log->push_back("---");
}
- EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor",
- "2:17. move constructor (from 1:17)", "1:17. destructor", "---",
- "0:17. operator= move (from 2:17)", "---", "2:17. destructor",
- "0:17. destructor"),
- *log);
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:17. move constructor (from 0:17)",
+ "0:17. destructor", "---", "2:17. move constructor (from 1:17)", "---",
+ "1:17. destructor", "2:17. destructor"),
+ *log);
}
TEST(OptionalTest, TestMoveAssignToFullFromFull) {
@@ -357,10 +344,10 @@ TEST(OptionalTest, TestMoveAssignToEmptyFromT) {
x = Optional<Logger>(std::move(y));
log->push_back("---");
}
- EXPECT_EQ(V("0:0. default constructor", "1:17. explicit constructor", "---",
- "2:17. move constructor (from 1:17)",
- "0:17. operator= move (from 2:17)", "2:17. destructor", "---",
- "1:17. destructor", "0:17. destructor"),
+ EXPECT_EQ(V("0:17. explicit constructor", "---",
+ "1:17. move constructor (from 0:17)",
+ "2:17. move constructor (from 1:17)", "1:17. destructor", "---",
+ "0:17. destructor", "2:17. destructor"),
*log);
}
@@ -425,14 +412,13 @@ TEST(OptionalTest, TestDereferenceWithDefault) {
}
EXPECT_EQ(
V("0:17. explicit constructor", "1:42. explicit constructor",
- "2:17. copy constructor (from 0:17)", "3:3. default constructor", "-1-",
- "4:42. explicit constructor", "operator== 0:17, 2:17",
- "4:42. destructor", "-2-", "5:42. explicit constructor",
- "operator== 1:42, 5:42", "5:42. destructor", "-3-",
- "6:17. explicit constructor", "7:17. move constructor (from 6:17)",
- "operator== 0:17, 7:17", "7:17. destructor", "6:17. destructor", "-4-",
- "8:8. default constructor", "operator== 1:42, 1:42", "8:8. destructor",
- "-5-", "3:3. destructor", "2:17. destructor", "1:42. destructor",
+ "2:17. copy constructor (from 0:17)", "-1-",
+ "3:42. explicit constructor", "operator== 0:17, 2:17",
+ "3:42. destructor", "-2-", "4:42. explicit constructor",
+ "operator== 1:42, 4:42", "4:42. destructor", "-3-",
+ "5:17. explicit constructor", "6:17. move constructor (from 5:17)",
+ "operator== 0:17, 6:17", "6:17. destructor", "5:17. destructor", "-4-",
+ "operator== 1:42, 1:42", "-5-", "2:17. destructor", "1:42. destructor",
"0:17. destructor"),
*log);
}
@@ -451,16 +437,15 @@ TEST(OptionalTest, TestEquality) {
EXPECT_EQ(me1, me2);
log->push_back("---");
}
- EXPECT_EQ(V("0:17. explicit constructor", "1:42. explicit constructor",
- "2:17. copy constructor (from 0:17)",
- "3:17. copy constructor (from 0:17)",
- "4:42. copy constructor (from 1:42)", "5:5. default constructor",
- "6:6. default constructor", "---", "operator== 2:17, 2:17",
- "operator== 2:17, 3:17", "operator!= 2:17, 4:42", "---",
- "6:6. destructor", "5:5. destructor", "4:42. destructor",
- "3:17. destructor", "2:17. destructor", "1:42. destructor",
- "0:17. destructor"),
- *log);
+ EXPECT_EQ(
+ V("0:17. explicit constructor", "1:42. explicit constructor",
+ "2:17. copy constructor (from 0:17)",
+ "3:17. copy constructor (from 0:17)",
+ "4:42. copy constructor (from 1:42)", "---", "operator== 2:17, 2:17",
+ "operator== 2:17, 3:17", "operator!= 2:17, 4:42", "---",
+ "4:42. destructor", "3:17. destructor", "2:17. destructor",
+ "1:42. destructor", "0:17. destructor"),
+ *log);
}
TEST(OptionalTest, TestSwap) {
@@ -477,11 +462,9 @@ TEST(OptionalTest, TestSwap) {
EXPECT_EQ(V("0:17. explicit constructor", "1:42. explicit constructor",
"2:17. copy constructor (from 0:17)",
"3:42. copy constructor (from 1:42)",
- "4:17. copy constructor (from 0:17)", "5:5. default constructor",
- "6:6. default constructor", "7:7. default constructor", "---",
- "swap 2:42, 3:17", "swap 4:5, 5:17", "swap 6:7, 7:6", "---",
- "7:6. destructor", "6:7. destructor", "5:17. destructor",
- "4:5. destructor", "3:17. destructor", "2:42. destructor",
+ "4:17. copy constructor (from 0:17)", "---", "swap 2:42, 3:17",
+ "5:17. move constructor (from 4:17)", "4:17. destructor", "---",
+ "5:17. destructor", "3:17. destructor", "2:42. destructor",
"1:42. destructor", "0:17. destructor"),
*log);
}
diff --git a/chromium/third_party/webrtc/base/optionsfile_unittest.cc b/chromium/third_party/webrtc/base/optionsfile_unittest.cc
index bcfb3efcf6f..f22a2f1f07e 100644
--- a/chromium/third_party/webrtc/base/optionsfile_unittest.cc
+++ b/chromium/third_party/webrtc/base/optionsfile_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/fileutils.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/optionsfile.h"
@@ -51,7 +53,7 @@ class OptionsFileTest : public testing::Test {
store_.reset(new OptionsFile(test_file_));
}
- rtc::scoped_ptr<OptionsFile> store_;
+ std::unique_ptr<OptionsFile> store_;
private:
std::string test_file_;
diff --git a/chromium/third_party/webrtc/base/pathutils.cc b/chromium/third_party/webrtc/base/pathutils.cc
index b5227ecb100..a420eb5136a 100644
--- a/chromium/third_party/webrtc/base/pathutils.cc
+++ b/chromium/third_party/webrtc/base/pathutils.cc
@@ -13,7 +13,7 @@
#include <shellapi.h>
#include <shlobj.h>
#include <tchar.h>
-#endif // WEBRTC_WIN
+#endif // WEBRTC_WIN
#include "webrtc/base/common.h"
#include "webrtc/base/fileutils.h"
@@ -35,9 +35,9 @@ const char* const FOLDER_DELIMS = "/\\";
// DEFAULT_FOLDER_DELIM is the preferred delimiter for this platform
#if WEBRTC_WIN
const char DEFAULT_FOLDER_DELIM = '\\';
-#else // !WEBRTC_WIN
+#else // !WEBRTC_WIN
const char DEFAULT_FOLDER_DELIM = '/';
-#endif // !WEBRTC_WIN
+#endif // !WEBRTC_WIN
///////////////////////////////////////////////////////////////////////////////
// Pathname - parsing of pathnames into components, and vice versa
@@ -55,6 +55,9 @@ Pathname::Pathname()
: folder_delimiter_(DEFAULT_FOLDER_DELIM) {
}
+Pathname::Pathname(const Pathname&) = default;
+Pathname::Pathname(Pathname&&) = default;
+
Pathname::Pathname(const std::string& pathname)
: folder_delimiter_(DEFAULT_FOLDER_DELIM) {
SetPathname(pathname);
@@ -65,6 +68,9 @@ Pathname::Pathname(const std::string& folder, const std::string& filename)
SetPathname(folder, filename);
}
+Pathname& Pathname::operator=(const Pathname&) = default;
+Pathname& Pathname::operator=(Pathname&&) = default;
+
void Pathname::SetFolderDelimiter(char delimiter) {
ASSERT(IsFolderDelimiter(delimiter));
folder_delimiter_ = delimiter;
diff --git a/chromium/third_party/webrtc/base/pathutils.h b/chromium/third_party/webrtc/base/pathutils.h
index 2d5819f1b61..2a0efa9763f 100644
--- a/chromium/third_party/webrtc/base/pathutils.h
+++ b/chromium/third_party/webrtc/base/pathutils.h
@@ -44,9 +44,14 @@ public:
static char DefaultFolderDelimiter();
Pathname();
+ Pathname(const Pathname&);
+ Pathname(Pathname&&);
Pathname(const std::string& pathname);
Pathname(const std::string& folder, const std::string& filename);
+ Pathname& operator=(const Pathname&);
+ Pathname& operator=(Pathname&&);
+
// Set's the default folder delimiter for this Pathname
char folder_delimiter() const { return folder_delimiter_; }
void SetFolderDelimiter(char delimiter);
diff --git a/chromium/third_party/webrtc/base/physicalsocketserver.cc b/chromium/third_party/webrtc/base/physicalsocketserver.cc
index 708499df531..0230077a52c 100644
--- a/chromium/third_party/webrtc/base/physicalsocketserver.cc
+++ b/chromium/third_party/webrtc/base/physicalsocketserver.cc
@@ -46,6 +46,7 @@
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/networkmonitor.h"
+#include "webrtc/base/nullsocketserver.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/base/winping.h"
#include "webrtc/base/win32socketinit.h"
@@ -62,6 +63,14 @@ typedef char* SockOptArg;
namespace rtc {
+std::unique_ptr<SocketServer> SocketServer::CreateDefault() {
+#if defined(__native_client__)
+ return std::unique_ptr<SocketServer>(new rtc::NullSocketServer);
+#else
+ return std::unique_ptr<SocketServer>(new rtc::PhysicalSocketServer);
+#endif
+}
+
#if defined(WEBRTC_WIN)
// Standard MTUs, from RFC 1191
const uint16_t PACKET_MAXIMUMS[] = {
@@ -1461,9 +1470,9 @@ bool PhysicalSocketServer::InstallSignal(int signum, void (*handler)(int)) {
#if defined(WEBRTC_WIN)
bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) {
- int cmsTotal = cmsWait;
- int cmsElapsed = 0;
- uint32_t msStart = Time();
+ int64_t cmsTotal = cmsWait;
+ int64_t cmsElapsed = 0;
+ int64_t msStart = Time();
fWait_ = true;
while (fWait_) {
@@ -1500,18 +1509,18 @@ bool PhysicalSocketServer::Wait(int cmsWait, bool process_io) {
// Which is shorter, the delay wait or the asked wait?
- int cmsNext;
+ int64_t cmsNext;
if (cmsWait == kForever) {
cmsNext = cmsWait;
} else {
- cmsNext = std::max(0, cmsTotal - cmsElapsed);
+ cmsNext = std::max<int64_t>(0, cmsTotal - cmsElapsed);
}
// Wait for one of the events to signal
DWORD dw = WSAWaitForMultipleEvents(static_cast<DWORD>(events.size()),
&events[0],
false,
- cmsNext,
+ static_cast<DWORD>(cmsNext),
false);
if (dw == WSA_WAIT_FAILED) {
diff --git a/chromium/third_party/webrtc/base/physicalsocketserver.h b/chromium/third_party/webrtc/base/physicalsocketserver.h
index 583306c31d7..f5867d25f92 100644
--- a/chromium/third_party/webrtc/base/physicalsocketserver.h
+++ b/chromium/third_party/webrtc/base/physicalsocketserver.h
@@ -11,11 +11,11 @@
#ifndef WEBRTC_BASE_PHYSICALSOCKETSERVER_H__
#define WEBRTC_BASE_PHYSICALSOCKETSERVER_H__
+#include <memory>
#include <vector>
#include "webrtc/base/asyncfile.h"
#include "webrtc/base/nethelpers.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketserver.h"
#include "webrtc/base/criticalsection.h"
@@ -104,7 +104,7 @@ class PhysicalSocketServer : public SocketServer {
#if defined(WEBRTC_POSIX)
static bool InstallSignal(int signum, void (*handler)(int));
- scoped_ptr<PosixSignalDispatcher> signal_dispatcher_;
+ std::unique_ptr<PosixSignalDispatcher> signal_dispatcher_;
#endif
DispatcherList dispatchers_;
IteratorList iterators_;
diff --git a/chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc b/chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc
index c53441d1a05..a04362d919d 100644
--- a/chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc
+++ b/chromium/third_party/webrtc/base/physicalsocketserver_unittest.cc
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <signal.h>
#include <stdarg.h>
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socket_unittest.h"
#include "webrtc/base/testutils.h"
#include "webrtc/base/thread.h"
@@ -100,7 +100,7 @@ class PhysicalSocketTest : public SocketTest {
void ConnectInternalAcceptError(const IPAddress& loopback);
void WritableAfterPartialWrite(const IPAddress& loopback);
- rtc::scoped_ptr<FakePhysicalSocketServer> server_;
+ std::unique_ptr<FakePhysicalSocketServer> server_;
SocketServerScope scope_;
bool fail_accept_;
int max_send_size_;
@@ -172,20 +172,20 @@ void PhysicalSocketTest::ConnectInternalAcceptError(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create two clients.
- scoped_ptr<AsyncSocket> client1(server_->CreateAsyncSocket(loopback.family(),
- SOCK_STREAM));
+ std::unique_ptr<AsyncSocket> client1(
+ server_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client1.get());
EXPECT_EQ(AsyncSocket::CS_CLOSED, client1->GetState());
EXPECT_PRED1(IsUnspecOrEmptyIP, client1->GetLocalAddress().ipaddr());
- scoped_ptr<AsyncSocket> client2(server_->CreateAsyncSocket(loopback.family(),
- SOCK_STREAM));
+ std::unique_ptr<AsyncSocket> client2(
+ server_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client2.get());
EXPECT_EQ(AsyncSocket::CS_CLOSED, client2->GetState());
EXPECT_PRED1(IsUnspecOrEmptyIP, client2->GetLocalAddress().ipaddr());
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
server_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -211,7 +211,7 @@ void PhysicalSocketTest::ConnectInternalAcceptError(const IPAddress& loopback) {
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
// Simulate "::accept" returning an error.
SetFailAccept(true);
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
EXPECT_FALSE(accepted);
ASSERT_TRUE(accept_addr.IsNil());
@@ -233,7 +233,7 @@ void PhysicalSocketTest::ConnectInternalAcceptError(const IPAddress& loopback) {
// Server has pending connection, try to accept it (will succeed).
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
SetFailAccept(false);
- scoped_ptr<AsyncSocket> accepted2(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted2(server->Accept(&accept_addr));
ASSERT_TRUE(accepted2);
EXPECT_FALSE(accept_addr.IsNil());
EXPECT_EQ(accepted2->GetRemoteAddress(), accept_addr);
@@ -515,7 +515,7 @@ class PosixSignalDeliveryTest : public testing::Test {
static std::vector<int> signals_received_;
static Thread *signaled_thread_;
- scoped_ptr<PhysicalSocketServer> ss_;
+ std::unique_ptr<PhysicalSocketServer> ss_;
};
std::vector<int> PosixSignalDeliveryTest::signals_received_;
@@ -583,8 +583,8 @@ TEST_F(PosixSignalDeliveryTest, SignalOnDifferentThread) {
// Start a new thread that raises it. It will have to be delivered to that
// thread. Our implementation should safely handle it and dispatch
// RecordSignal() on this thread.
- scoped_ptr<Thread> thread(new Thread());
- scoped_ptr<RaiseSigTermRunnable> runnable(new RaiseSigTermRunnable());
+ std::unique_ptr<Thread> thread(new Thread());
+ std::unique_ptr<RaiseSigTermRunnable> runnable(new RaiseSigTermRunnable());
thread->Start(runnable.get());
EXPECT_TRUE(ss_->Wait(1500, true));
EXPECT_TRUE(ExpectSignal(SIGTERM));
diff --git a/chromium/third_party/webrtc/base/platform_thread.cc b/chromium/third_party/webrtc/base/platform_thread.cc
index b6fd8732aaa..286bee95f12 100644
--- a/chromium/third_party/webrtc/base/platform_thread.cc
+++ b/chromium/third_party/webrtc/base/platform_thread.cc
@@ -99,7 +99,8 @@ PlatformThread::PlatformThread(ThreadRunFunction func,
name_(thread_name ? thread_name : "webrtc"),
#if defined(WEBRTC_WIN)
stop_(false),
- thread_(NULL) {
+ thread_(NULL),
+ thread_id_(0) {
#else
stop_event_(false, false),
thread_(0) {
@@ -112,6 +113,7 @@ PlatformThread::~PlatformThread() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
#if defined(WEBRTC_WIN)
RTC_DCHECK(!thread_);
+ RTC_DCHECK(!thread_id_);
#endif // defined(WEBRTC_WIN)
}
@@ -136,10 +138,10 @@ void PlatformThread::Start() {
// See bug 2902 for background on STACK_SIZE_PARAM_IS_A_RESERVATION.
// Set the reserved stack stack size to 1M, which is the default on Windows
// and Linux.
- DWORD thread_id;
thread_ = ::CreateThread(NULL, 1024 * 1024, &StartThread, this,
- STACK_SIZE_PARAM_IS_A_RESERVATION, &thread_id);
+ STACK_SIZE_PARAM_IS_A_RESERVATION, &thread_id_);
RTC_CHECK(thread_) << "CreateThread failed";
+ RTC_DCHECK(thread_id_);
#else
ThreadAttributes attr;
// Set the stack stack size to 1M.
@@ -157,6 +159,14 @@ bool PlatformThread::IsRunning() const {
#endif // defined(WEBRTC_WIN)
}
+PlatformThreadRef PlatformThread::GetThreadRef() const {
+#if defined(WEBRTC_WIN)
+ return thread_id_;
+#else
+ return thread_;
+#endif // defined(WEBRTC_WIN)
+}
+
void PlatformThread::Stop() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
if (!IsRunning())
@@ -164,10 +174,13 @@ void PlatformThread::Stop() {
#if defined(WEBRTC_WIN)
// Set stop_ to |true| on the worker thread.
- QueueUserAPC(&RaiseFlag, thread_, reinterpret_cast<ULONG_PTR>(&stop_));
+ bool queued = QueueAPC(&RaiseFlag, reinterpret_cast<ULONG_PTR>(&stop_));
+ // Queuing the APC can fail if the thread is being terminated.
+ RTC_CHECK(queued || GetLastError() == ERROR_GEN_FAILURE);
WaitForSingleObject(thread_, INFINITE);
CloseHandle(thread_);
thread_ = nullptr;
+ thread_id_ = 0;
#else
stop_event_.Set();
RTC_CHECK_EQ(0, pthread_join(thread_, nullptr));
@@ -247,4 +260,13 @@ bool PlatformThread::SetPriority(ThreadPriority priority) {
#endif // defined(WEBRTC_WIN)
}
+#if defined(WEBRTC_WIN)
+bool PlatformThread::QueueAPC(PAPCFUNC function, ULONG_PTR data) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(IsRunning());
+
+ return QueueUserAPC(function, thread_, data) != FALSE;
+}
+#endif
+
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/platform_thread.h b/chromium/third_party/webrtc/base/platform_thread.h
index 53465e4b17a..d74aec28114 100644
--- a/chromium/third_party/webrtc/base/platform_thread.h
+++ b/chromium/third_party/webrtc/base/platform_thread.h
@@ -16,7 +16,6 @@
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/event.h"
#include "webrtc/base/platform_thread_types.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_checker.h"
namespace rtc {
@@ -59,18 +58,30 @@ class PlatformThread {
PlatformThread(ThreadRunFunction func, void* obj, const char* thread_name);
virtual ~PlatformThread();
+ const std::string& name() const { return name_; }
+
// Spawns a thread and tries to set thread priority according to the priority
// from when CreateThread was called.
void Start();
bool IsRunning() const;
+ // Returns an identifier for the worker thread that can be used to do
+ // thread checks.
+ PlatformThreadRef GetThreadRef() const;
+
// Stops (joins) the spawned thread.
void Stop();
// Set the priority of the thread. Must be called when thread is running.
bool SetPriority(ThreadPriority priority);
+ protected:
+#if defined(WEBRTC_WIN)
+ // Exposed to derived classes to allow for special cases specific to Windows.
+ bool QueueAPC(PAPCFUNC apc_function, ULONG_PTR data);
+#endif
+
private:
void Run();
@@ -85,6 +96,7 @@ class PlatformThread {
bool stop_;
HANDLE thread_;
+ DWORD thread_id_;
#else
static void* StartThread(void* param);
diff --git a/chromium/third_party/webrtc/base/platform_thread_unittest.cc b/chromium/third_party/webrtc/base/platform_thread_unittest.cc
index f9db8e34a30..847946aaca4 100644
--- a/chromium/third_party/webrtc/base/platform_thread_unittest.cc
+++ b/chromium/third_party/webrtc/base/platform_thread_unittest.cc
@@ -11,34 +11,49 @@
#include "webrtc/base/platform_thread.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/sleep.h"
-namespace webrtc {
-
+namespace rtc {
+namespace {
// Function that does nothing, and reports success.
bool NullRunFunction(void* obj) {
- SleepMs(0); // Hand over timeslice, prevents busy looping.
+ webrtc::SleepMs(0); // Hand over timeslice, prevents busy looping.
return true;
}
-TEST(PlatformThreadTest, StartStop) {
- rtc::PlatformThread thread(&NullRunFunction, nullptr, "PlatformThreadTest");
- thread.Start();
- thread.Stop();
-}
-
// Function that sets a boolean.
bool SetFlagRunFunction(void* obj) {
bool* obj_as_bool = static_cast<bool*>(obj);
*obj_as_bool = true;
- SleepMs(0); // Hand over timeslice, prevents busy looping.
+ webrtc::SleepMs(0); // Hand over timeslice, prevents busy looping.
return true;
}
+} // namespace
+
+TEST(PlatformThreadTest, StartStop) {
+ PlatformThread thread(&NullRunFunction, nullptr, "PlatformThreadTest");
+ EXPECT_TRUE(thread.name() == "PlatformThreadTest");
+ EXPECT_TRUE(thread.GetThreadRef() == 0);
+ thread.Start();
+ EXPECT_TRUE(thread.GetThreadRef() != 0);
+ thread.Stop();
+ EXPECT_TRUE(thread.GetThreadRef() == 0);
+}
+
+TEST(PlatformThreadTest, StartStop2) {
+ PlatformThread thread1(&NullRunFunction, nullptr, "PlatformThreadTest1");
+ PlatformThread thread2(&NullRunFunction, nullptr, "PlatformThreadTest2");
+ EXPECT_TRUE(thread1.GetThreadRef() == thread2.GetThreadRef());
+ thread1.Start();
+ thread2.Start();
+ EXPECT_TRUE(thread1.GetThreadRef() != thread2.GetThreadRef());
+ thread2.Stop();
+ thread1.Stop();
+}
TEST(PlatformThreadTest, RunFunctionIsCalled) {
bool flag = false;
- rtc::PlatformThread thread(&SetFlagRunFunction, &flag, "RunFunctionIsCalled");
+ PlatformThread thread(&SetFlagRunFunction, &flag, "RunFunctionIsCalled");
thread.Start();
// At this point, the flag may be either true or false.
@@ -47,5 +62,4 @@ TEST(PlatformThreadTest, RunFunctionIsCalled) {
// We expect the thread to have run at least once.
EXPECT_TRUE(flag);
}
-
-} // namespace webrtc
+} // rtc
diff --git a/chromium/third_party/webrtc/base/profiler.h b/chromium/third_party/webrtc/base/profiler.h
index 419763fc8a1..4dd35f5baca 100644
--- a/chromium/third_party/webrtc/base/profiler.h
+++ b/chromium/third_party/webrtc/base/profiler.h
@@ -36,6 +36,7 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/sharedexclusivelock.h"
diff --git a/chromium/third_party/webrtc/base/proxy_unittest.cc b/chromium/third_party/webrtc/base/proxy_unittest.cc
index d8a523fe17e..4dba0dd8f34 100644
--- a/chromium/third_party/webrtc/base/proxy_unittest.cc
+++ b/chromium/third_party/webrtc/base/proxy_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/base/autodetectproxy.h"
#include "webrtc/base/gunit.h"
@@ -67,10 +68,10 @@ class ProxyTest : public testing::Test {
}
private:
- rtc::scoped_ptr<rtc::SocketServer> ss_;
- rtc::scoped_ptr<rtc::SocksProxyServer> socks_;
+ std::unique_ptr<rtc::SocketServer> ss_;
+ std::unique_ptr<rtc::SocksProxyServer> socks_;
// TODO: Make this a real HTTPS proxy server.
- rtc::scoped_ptr<rtc::HttpListenServer> https_;
+ std::unique_ptr<rtc::HttpListenServer> https_;
};
// Tests whether we can use a SOCKS5 proxy to connect to a server.
diff --git a/chromium/third_party/webrtc/base/proxydetect.cc b/chromium/third_party/webrtc/base/proxydetect.cc
index abb8f0a9279..10e7a02241c 100644
--- a/chromium/third_party/webrtc/base/proxydetect.cc
+++ b/chromium/third_party/webrtc/base/proxydetect.cc
@@ -29,6 +29,7 @@
#endif
#include <map>
+#include <memory>
#include "webrtc/base/arraysize.h"
#include "webrtc/base/fileutils.h"
@@ -430,7 +431,7 @@ bool GetDefaultFirefoxProfile(Pathname* profile_path) {
// Note: we are looking for the first entry with "Default=1", or the last
// entry in the file
path.SetFilename("profiles.ini");
- scoped_ptr<FileStream> fs(Filesystem::OpenFile(path, "r"));
+ std::unique_ptr<FileStream> fs(Filesystem::OpenFile(path, "r"));
if (!fs) {
return false;
}
@@ -495,7 +496,7 @@ bool GetDefaultFirefoxProfile(Pathname* profile_path) {
bool ReadFirefoxPrefs(const Pathname& filename,
const char * prefix,
StringMap* settings) {
- scoped_ptr<FileStream> fs(Filesystem::OpenFile(filename, "r"));
+ std::unique_ptr<FileStream> fs(Filesystem::OpenFile(filename, "r"));
if (!fs) {
LOG(LS_ERROR) << "Failed to open file: " << filename.pathname();
return false;
diff --git a/chromium/third_party/webrtc/base/proxyserver.h b/chromium/third_party/webrtc/base/proxyserver.h
index adb26ae9d04..86007c3606f 100644
--- a/chromium/third_party/webrtc/base/proxyserver.h
+++ b/chromium/third_party/webrtc/base/proxyserver.h
@@ -12,7 +12,9 @@
#define WEBRTC_BASE_PROXYSERVER_H_
#include <list>
+#include <memory>
#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/socketadapters.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/stream.h"
@@ -50,8 +52,8 @@ class ProxyBinding : public sigslot::has_slots<> {
void Destroy();
static const int kBufferSize = 4096;
- scoped_ptr<AsyncProxyServerSocket> int_socket_;
- scoped_ptr<AsyncSocket> ext_socket_;
+ std::unique_ptr<AsyncProxyServerSocket> int_socket_;
+ std::unique_ptr<AsyncSocket> ext_socket_;
bool connected_;
FifoBuffer out_buffer_;
FifoBuffer in_buffer_;
@@ -76,7 +78,7 @@ class ProxyServer : public sigslot::has_slots<> {
typedef std::list<ProxyBinding*> BindingList;
SocketFactory* ext_factory_;
SocketAddress ext_ip_;
- scoped_ptr<AsyncSocket> server_socket_;
+ std::unique_ptr<AsyncSocket> server_socket_;
BindingList bindings_;
RTC_DISALLOW_COPY_AND_ASSIGN(ProxyServer);
};
diff --git a/chromium/third_party/webrtc/base/rate_statistics.cc b/chromium/third_party/webrtc/base/rate_statistics.cc
index 8db2851e682..6529aa1f7a6 100644
--- a/chromium/third_party/webrtc/base/rate_statistics.cc
+++ b/chromium/third_party/webrtc/base/rate_statistics.cc
@@ -10,7 +10,9 @@
#include "webrtc/base/rate_statistics.h"
-#include <assert.h>
+#include <algorithm>
+
+#include "webrtc/base/checks.h"
namespace webrtc {
@@ -20,7 +22,7 @@ RateStatistics::RateStatistics(uint32_t window_size_ms, float scale)
accumulated_count_(0),
oldest_time_(0),
oldest_index_(0),
- scale_(scale / (num_buckets_ - 1)) {}
+ scale_(scale) {}
RateStatistics::~RateStatistics() {}
@@ -42,7 +44,7 @@ void RateStatistics::Update(size_t count, int64_t now_ms) {
EraseOld(now_ms);
int now_offset = static_cast<int>(now_ms - oldest_time_);
- assert(now_offset < num_buckets_);
+ RTC_DCHECK_LT(now_offset, num_buckets_);
int index = oldest_index_ + now_offset;
if (index >= num_buckets_) {
index -= num_buckets_;
@@ -53,18 +55,20 @@ void RateStatistics::Update(size_t count, int64_t now_ms) {
uint32_t RateStatistics::Rate(int64_t now_ms) {
EraseOld(now_ms);
- return static_cast<uint32_t>(accumulated_count_ * scale_ + 0.5f);
+ float scale = scale_ / (now_ms - oldest_time_ + 1);
+ return static_cast<uint32_t>(accumulated_count_ * scale + 0.5f);
}
void RateStatistics::EraseOld(int64_t now_ms) {
int64_t new_oldest_time = now_ms - num_buckets_ + 1;
if (new_oldest_time <= oldest_time_) {
+ if (accumulated_count_ == 0)
+ oldest_time_ = now_ms;
return;
}
-
while (oldest_time_ < new_oldest_time) {
size_t count_in_oldest_bucket = buckets_[oldest_index_];
- assert(accumulated_count_ >= count_in_oldest_bucket);
+ RTC_DCHECK_GE(accumulated_count_, count_in_oldest_bucket);
accumulated_count_ -= count_in_oldest_bucket;
buckets_[oldest_index_] = 0;
if (++oldest_index_ >= num_buckets_) {
@@ -74,6 +78,7 @@ void RateStatistics::EraseOld(int64_t now_ms) {
if (accumulated_count_ == 0) {
// This guarantees we go through all the buckets at most once, even if
// |new_oldest_time| is far greater than |oldest_time_|.
+ new_oldest_time = now_ms;
break;
}
}
diff --git a/chromium/third_party/webrtc/base/rate_statistics.h b/chromium/third_party/webrtc/base/rate_statistics.h
index 21f6ce61607..aea8d793fed 100644
--- a/chromium/third_party/webrtc/base/rate_statistics.h
+++ b/chromium/third_party/webrtc/base/rate_statistics.h
@@ -11,7 +11,8 @@
#ifndef WEBRTC_BASE_RATE_STATISTICS_H_
#define WEBRTC_BASE_RATE_STATISTICS_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -34,7 +35,7 @@ class RateStatistics {
// Counters are kept in buckets (circular buffer), with one bucket
// per millisecond.
const int num_buckets_;
- rtc::scoped_ptr<size_t[]> buckets_;
+ std::unique_ptr<size_t[]> buckets_;
// Total count recorded in buckets.
size_t accumulated_count_;
diff --git a/chromium/third_party/webrtc/base/rate_statistics_unittest.cc b/chromium/third_party/webrtc/base/rate_statistics_unittest.cc
index 0270253d5e2..9702da0699d 100644
--- a/chromium/third_party/webrtc/base/rate_statistics_unittest.cc
+++ b/chromium/third_party/webrtc/base/rate_statistics_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <algorithm>
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/rate_statistics.h"
@@ -15,9 +17,11 @@ namespace {
using webrtc::RateStatistics;
+const int64_t kWindowMs = 500;
+
class RateStatisticsTest : public ::testing::Test {
protected:
- RateStatisticsTest() : stats_(500, 8000) {}
+ RateStatisticsTest() : stats_(kWindowMs, 8000) {}
RateStatistics stats_;
};
@@ -26,8 +30,9 @@ TEST_F(RateStatisticsTest, TestStrictMode) {
// Should be initialized to 0.
EXPECT_EQ(0u, stats_.Rate(now_ms));
stats_.Update(1500, now_ms);
- // Expecting 24 kbps given a 500 ms window with one 1500 bytes packet.
- EXPECT_EQ(24000u, stats_.Rate(now_ms));
+ // Expecting 1200 kbps since the window is initially kept small and grows as
+ // we have more data.
+ EXPECT_EQ(12000000u, stats_.Rate(now_ms));
stats_.Reset();
// Expecting 0 after init.
EXPECT_EQ(0u, stats_.Rate(now_ms));
@@ -37,12 +42,12 @@ TEST_F(RateStatisticsTest, TestStrictMode) {
}
// Approximately 1200 kbps expected. Not exact since when packets
// are removed we will jump 10 ms to the next packet.
- if (now_ms > 0 && now_ms % 500 == 0) {
- EXPECT_NEAR(1200000u, stats_.Rate(now_ms), 24000u);
+ if (now_ms > 0 && now_ms % kWindowMs == 0) {
+ EXPECT_NEAR(1200000u, stats_.Rate(now_ms), 22000u);
}
now_ms += 1;
}
- now_ms += 500;
+ now_ms += kWindowMs;
// The window is 2 seconds. If nothing has been received for that time
// the estimate should be 0.
EXPECT_EQ(0u, stats_.Rate(now_ms));
@@ -54,25 +59,26 @@ TEST_F(RateStatisticsTest, IncreasingThenDecreasingBitrate) {
// Expecting 0 after init.
uint32_t bitrate = stats_.Rate(now_ms);
EXPECT_EQ(0u, bitrate);
+ const uint32_t kExpectedBitrate = 8000000;
// 1000 bytes per millisecond until plateau is reached.
+ int prev_error = kExpectedBitrate;
while (++now_ms < 10000) {
stats_.Update(1000, now_ms);
- uint32_t new_bitrate = stats_.Rate(now_ms);
- if (new_bitrate != bitrate) {
- // New bitrate must be higher than previous one.
- EXPECT_GT(new_bitrate, bitrate);
- } else {
- // Plateau reached, 8000 kbps expected.
- EXPECT_NEAR(8000000u, bitrate, 80000u);
- break;
- }
- bitrate = new_bitrate;
+ bitrate = stats_.Rate(now_ms);
+ int error = kExpectedBitrate - bitrate;
+ error = std::abs(error);
+ // Expect the estimation error to decrease as the window is extended.
+ EXPECT_LE(error, prev_error + 1);
+ prev_error = error;
}
+ // Window filled, expect to be close to 8000000.
+ EXPECT_EQ(kExpectedBitrate, bitrate);
+
// 1000 bytes per millisecond until 10-second mark, 8000 kbps expected.
while (++now_ms < 10000) {
stats_.Update(1000, now_ms);
bitrate = stats_.Rate(now_ms);
- EXPECT_NEAR(8000000u, bitrate, 80000u);
+ EXPECT_EQ(kExpectedBitrate, bitrate);
}
// Zero bytes per millisecond until 0 is reached.
while (++now_ms < 20000) {
@@ -94,4 +100,33 @@ TEST_F(RateStatisticsTest, IncreasingThenDecreasingBitrate) {
EXPECT_EQ(0u, stats_.Rate(now_ms));
}
}
+
+TEST_F(RateStatisticsTest, ResetAfterSilence) {
+ int64_t now_ms = 0;
+ stats_.Reset();
+ // Expecting 0 after init.
+ uint32_t bitrate = stats_.Rate(now_ms);
+ EXPECT_EQ(0u, bitrate);
+ const uint32_t kExpectedBitrate = 8000000;
+ // 1000 bytes per millisecond until the window has been filled.
+ int prev_error = kExpectedBitrate;
+ while (++now_ms < 10000) {
+ stats_.Update(1000, now_ms);
+ bitrate = stats_.Rate(now_ms);
+ int error = kExpectedBitrate - bitrate;
+ error = std::abs(error);
+ // Expect the estimation error to decrease as the window is extended.
+ EXPECT_LE(error, prev_error + 1);
+ prev_error = error;
+ }
+ // Window filled, expect to be close to 8000000.
+ EXPECT_EQ(kExpectedBitrate, bitrate);
+
+ now_ms += kWindowMs + 1;
+ EXPECT_EQ(0u, stats_.Rate(now_ms));
+ stats_.Update(1000, now_ms);
+ // We expect one sample of 1000 bytes, and that the bitrate is measured over
+ // 1 ms, i.e., 8 * 1000 / 0.001 = 8000000.
+ EXPECT_EQ(kExpectedBitrate, stats_.Rate(now_ms));
+}
} // namespace
diff --git a/chromium/third_party/webrtc/base/ratetracker.cc b/chromium/third_party/webrtc/base/ratetracker.cc
index c1ad2d5e844..a59ec2fc3f4 100644
--- a/chromium/third_party/webrtc/base/ratetracker.cc
+++ b/chromium/third_party/webrtc/base/ratetracker.cc
@@ -19,14 +19,16 @@
namespace rtc {
-RateTracker::RateTracker(uint32_t bucket_milliseconds, size_t bucket_count)
+static const int64_t kTimeUnset = -1;
+
+RateTracker::RateTracker(int64_t bucket_milliseconds, size_t bucket_count)
: bucket_milliseconds_(bucket_milliseconds),
bucket_count_(bucket_count),
sample_buckets_(new size_t[bucket_count + 1]),
total_sample_count_(0u),
- bucket_start_time_milliseconds_(~0u) {
- RTC_CHECK(bucket_milliseconds > 0u);
- RTC_CHECK(bucket_count > 0u);
+ bucket_start_time_milliseconds_(kTimeUnset) {
+ RTC_CHECK(bucket_milliseconds > 0);
+ RTC_CHECK(bucket_count > 0);
}
RateTracker::~RateTracker() {
@@ -34,33 +36,33 @@ RateTracker::~RateTracker() {
}
double RateTracker::ComputeRateForInterval(
- uint32_t interval_milliseconds) const {
- if (bucket_start_time_milliseconds_ == ~0u) {
+ int64_t interval_milliseconds) const {
+ if (bucket_start_time_milliseconds_ == kTimeUnset) {
return 0.0;
}
- uint32_t current_time = Time();
+ int64_t current_time = Time();
// Calculate which buckets to sum up given the current time. If the time
// has passed to a new bucket then we have to skip some of the oldest buckets.
- uint32_t available_interval_milliseconds = std::min<uint32_t>(
- interval_milliseconds,
- bucket_milliseconds_ * static_cast<uint32_t>(bucket_count_));
+ int64_t available_interval_milliseconds =
+ std::min(interval_milliseconds,
+ bucket_milliseconds_ * static_cast<int64_t>(bucket_count_));
// number of old buckets (i.e. after the current bucket in the ring buffer)
// that are expired given our current time interval.
size_t buckets_to_skip;
// Number of milliseconds of the first bucket that are not a portion of the
// current interval.
- uint32_t milliseconds_to_skip;
+ int64_t milliseconds_to_skip;
if (current_time >
initialization_time_milliseconds_ + available_interval_milliseconds) {
- uint32_t time_to_skip =
+ int64_t time_to_skip =
current_time - bucket_start_time_milliseconds_ +
- static_cast<uint32_t>(bucket_count_) * bucket_milliseconds_ -
+ static_cast<int64_t>(bucket_count_) * bucket_milliseconds_ -
available_interval_milliseconds;
buckets_to_skip = time_to_skip / bucket_milliseconds_;
milliseconds_to_skip = time_to_skip % bucket_milliseconds_;
} else {
buckets_to_skip = bucket_count_ - current_bucket_;
- milliseconds_to_skip = 0u;
+ milliseconds_to_skip = 0;
available_interval_milliseconds =
TimeDiff(current_time, initialization_time_milliseconds_);
// Let one bucket interval pass after initialization before reporting.
@@ -70,8 +72,7 @@ double RateTracker::ComputeRateForInterval(
}
// If we're skipping all buckets that means that there have been no samples
// within the sampling interval so report 0.
- if (buckets_to_skip > bucket_count_ ||
- available_interval_milliseconds == 0u) {
+ if (buckets_to_skip > bucket_count_ || available_interval_milliseconds == 0) {
return 0.0;
}
size_t start_bucket = NextBucketIndex(current_bucket_ + buckets_to_skip);
@@ -88,21 +89,21 @@ double RateTracker::ComputeRateForInterval(
total_samples += sample_buckets_[i];
}
// Convert to samples per second.
- return static_cast<double>(total_samples * 1000u) /
- static_cast<double>(available_interval_milliseconds);
+ return static_cast<double>(total_samples * 1000) /
+ static_cast<double>(available_interval_milliseconds);
}
double RateTracker::ComputeTotalRate() const {
- if (bucket_start_time_milliseconds_ == ~0u) {
+ if (bucket_start_time_milliseconds_ == kTimeUnset) {
return 0.0;
}
- uint32_t current_time = Time();
- if (TimeIsLaterOrEqual(current_time, initialization_time_milliseconds_)) {
+ int64_t current_time = Time();
+ if (current_time <= initialization_time_milliseconds_) {
return 0.0;
}
- return static_cast<double>(total_sample_count_ * 1000u) /
- static_cast<double>(
- TimeDiff(current_time, initialization_time_milliseconds_));
+ return static_cast<double>(total_sample_count_ * 1000) /
+ static_cast<double>(
+ TimeDiff(current_time, initialization_time_milliseconds_));
}
size_t RateTracker::TotalSampleCount() const {
@@ -111,15 +112,16 @@ size_t RateTracker::TotalSampleCount() const {
void RateTracker::AddSamples(size_t sample_count) {
EnsureInitialized();
- uint32_t current_time = Time();
+ int64_t current_time = Time();
// Advance the current bucket as needed for the current time, and reset
// bucket counts as we advance.
- for (size_t i = 0u; i <= bucket_count_ &&
- current_time >= bucket_start_time_milliseconds_ + bucket_milliseconds_;
- ++i) {
+ for (size_t i = 0;
+ i <= bucket_count_ &&
+ current_time >= bucket_start_time_milliseconds_ + bucket_milliseconds_;
+ ++i) {
bucket_start_time_milliseconds_ += bucket_milliseconds_;
current_bucket_ = NextBucketIndex(current_bucket_);
- sample_buckets_[current_bucket_] = 0u;
+ sample_buckets_[current_bucket_] = 0;
}
// Ensure that bucket_start_time_milliseconds_ is updated appropriately if
// the entire buffer of samples has been expired.
@@ -130,18 +132,18 @@ void RateTracker::AddSamples(size_t sample_count) {
total_sample_count_ += sample_count;
}
-uint32_t RateTracker::Time() const {
- return rtc::Time();
+int64_t RateTracker::Time() const {
+ return rtc::TimeMillis();
}
void RateTracker::EnsureInitialized() {
- if (bucket_start_time_milliseconds_ == ~0u) {
+ if (bucket_start_time_milliseconds_ == kTimeUnset) {
initialization_time_milliseconds_ = Time();
bucket_start_time_milliseconds_ = initialization_time_milliseconds_;
- current_bucket_ = 0u;
+ current_bucket_ = 0;
// We only need to initialize the first bucket because we reset buckets when
// current_bucket_ increments.
- sample_buckets_[current_bucket_] = 0u;
+ sample_buckets_[current_bucket_] = 0;
}
}
diff --git a/chromium/third_party/webrtc/base/ratetracker.h b/chromium/third_party/webrtc/base/ratetracker.h
index d49d7cacdd5..6ae9bec119d 100644
--- a/chromium/third_party/webrtc/base/ratetracker.h
+++ b/chromium/third_party/webrtc/base/ratetracker.h
@@ -21,19 +21,19 @@ namespace rtc {
// that over each bucket the rate was constant.
class RateTracker {
public:
- RateTracker(uint32_t bucket_milliseconds, size_t bucket_count);
+ RateTracker(int64_t bucket_milliseconds, size_t bucket_count);
virtual ~RateTracker();
// Computes the average rate over the most recent interval_milliseconds,
// or if the first sample was added within this period, computes the rate
// since the first sample was added.
- double ComputeRateForInterval(uint32_t interval_milliseconds) const;
+ double ComputeRateForInterval(int64_t interval_milliseconds) const;
// Computes the average rate over the rate tracker's recording interval
// of bucket_milliseconds * bucket_count.
double ComputeRate() const {
return ComputeRateForInterval(bucket_milliseconds_ *
- static_cast<uint32_t>(bucket_count_));
+ static_cast<int64_t>(bucket_count_));
}
// Computes the average rate since the first sample was added to the
@@ -49,19 +49,19 @@ class RateTracker {
protected:
// overrideable for tests
- virtual uint32_t Time() const;
+ virtual int64_t Time() const;
private:
void EnsureInitialized();
size_t NextBucketIndex(size_t bucket_index) const;
- const uint32_t bucket_milliseconds_;
+ const int64_t bucket_milliseconds_;
const size_t bucket_count_;
size_t* sample_buckets_;
size_t total_sample_count_;
size_t current_bucket_;
- uint32_t bucket_start_time_milliseconds_;
- uint32_t initialization_time_milliseconds_;
+ int64_t bucket_start_time_milliseconds_;
+ int64_t initialization_time_milliseconds_;
};
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/ratetracker_unittest.cc b/chromium/third_party/webrtc/base/ratetracker_unittest.cc
index 75fec558188..136934fea34 100644
--- a/chromium/third_party/webrtc/base/ratetracker_unittest.cc
+++ b/chromium/third_party/webrtc/base/ratetracker_unittest.cc
@@ -19,11 +19,11 @@ namespace {
class RateTrackerForTest : public RateTracker {
public:
RateTrackerForTest() : RateTracker(kBucketIntervalMs, 10u), time_(0) {}
- virtual uint32_t Time() const { return time_; }
- void AdvanceTime(uint32_t delta) { time_ += delta; }
+ virtual int64_t Time() const { return time_; }
+ void AdvanceTime(int delta) { time_ += delta; }
private:
- uint32_t time_;
+ int64_t time_;
};
TEST(RateTrackerTest, Test30FPS) {
@@ -36,7 +36,7 @@ TEST(RateTrackerTest, Test30FPS) {
tracker.AdvanceTime(1);
}
}
- EXPECT_DOUBLE_EQ(30.0, tracker.ComputeRateForInterval(50000u));
+ EXPECT_DOUBLE_EQ(30.0, tracker.ComputeRateForInterval(50000));
}
TEST(RateTrackerTest, Test60FPS) {
@@ -49,12 +49,12 @@ TEST(RateTrackerTest, Test60FPS) {
tracker.AdvanceTime(1);
}
}
- EXPECT_DOUBLE_EQ(60.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(60.0, tracker.ComputeRateForInterval(1000));
}
TEST(RateTrackerTest, TestRateTrackerBasics) {
RateTrackerForTest tracker;
- EXPECT_DOUBLE_EQ(0.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(0.0, tracker.ComputeRateForInterval(1000));
// Add a sample.
tracker.AddSamples(1234);
@@ -63,7 +63,7 @@ TEST(RateTrackerTest, TestRateTrackerBasics) {
EXPECT_DOUBLE_EQ(0.0, tracker.ComputeRate());
// Advance the clock by 100 ms (one bucket interval).
tracker.AdvanceTime(1);
- EXPECT_DOUBLE_EQ(12340.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(12340.0, tracker.ComputeRateForInterval(1000));
EXPECT_DOUBLE_EQ(12340.0, tracker.ComputeRate());
EXPECT_EQ(1234U, tracker.TotalSampleCount());
EXPECT_DOUBLE_EQ(12340.0, tracker.ComputeTotalRate());
@@ -71,7 +71,7 @@ TEST(RateTrackerTest, TestRateTrackerBasics) {
// Repeat.
tracker.AddSamples(1234);
tracker.AdvanceTime(100);
- EXPECT_DOUBLE_EQ(12340.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(12340.0, tracker.ComputeRateForInterval(1000));
EXPECT_DOUBLE_EQ(12340.0, tracker.ComputeRate());
EXPECT_EQ(1234U * 2, tracker.TotalSampleCount());
EXPECT_DOUBLE_EQ(12340.0, tracker.ComputeTotalRate());
@@ -79,20 +79,20 @@ TEST(RateTrackerTest, TestRateTrackerBasics) {
// Advance the clock by 800 ms, so we've elapsed a full second.
// units_second should now be filled in properly.
tracker.AdvanceTime(800);
- EXPECT_DOUBLE_EQ(1234.0 * 2.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(1234.0 * 2.0, tracker.ComputeRateForInterval(1000));
EXPECT_DOUBLE_EQ(1234.0 * 2.0, tracker.ComputeRate());
EXPECT_EQ(1234U * 2, tracker.TotalSampleCount());
EXPECT_DOUBLE_EQ(1234.0 * 2.0, tracker.ComputeTotalRate());
// Poll the tracker again immediately. The reported rate should stay the same.
- EXPECT_DOUBLE_EQ(1234.0 * 2.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(1234.0 * 2.0, tracker.ComputeRateForInterval(1000));
EXPECT_DOUBLE_EQ(1234.0 * 2.0, tracker.ComputeRate());
EXPECT_EQ(1234U * 2, tracker.TotalSampleCount());
EXPECT_DOUBLE_EQ(1234.0 * 2.0, tracker.ComputeTotalRate());
// Do nothing and advance by a second. We should drop down to zero.
tracker.AdvanceTime(1000);
- EXPECT_DOUBLE_EQ(0.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(0.0, tracker.ComputeRateForInterval(1000));
EXPECT_DOUBLE_EQ(0.0, tracker.ComputeRate());
EXPECT_EQ(1234U * 2, tracker.TotalSampleCount());
EXPECT_DOUBLE_EQ(1234.0, tracker.ComputeTotalRate());
@@ -103,7 +103,7 @@ TEST(RateTrackerTest, TestRateTrackerBasics) {
tracker.AddSamples(9876U);
tracker.AdvanceTime(100);
}
- EXPECT_DOUBLE_EQ(9876.0 * 10.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(9876.0 * 10.0, tracker.ComputeRateForInterval(1000));
EXPECT_DOUBLE_EQ(9876.0 * 10.0, tracker.ComputeRate());
EXPECT_EQ(1234U * 2 + 9876U * 55, tracker.TotalSampleCount());
EXPECT_DOUBLE_EQ((1234.0 * 2.0 + 9876.0 * 55.0) / 7.5,
@@ -112,14 +112,14 @@ TEST(RateTrackerTest, TestRateTrackerBasics) {
// Advance the clock by 500 ms. Since we sent nothing over this half-second,
// the reported rate should be reduced by half.
tracker.AdvanceTime(500);
- EXPECT_DOUBLE_EQ(9876.0 * 5.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(9876.0 * 5.0, tracker.ComputeRateForInterval(1000));
EXPECT_DOUBLE_EQ(9876.0 * 5.0, tracker.ComputeRate());
EXPECT_EQ(1234U * 2 + 9876U * 55, tracker.TotalSampleCount());
EXPECT_DOUBLE_EQ((1234.0 * 2.0 + 9876.0 * 55.0) / 8.0,
tracker.ComputeTotalRate());
// Rate over the last half second should be zero.
- EXPECT_DOUBLE_EQ(0.0, tracker.ComputeRateForInterval(500u));
+ EXPECT_DOUBLE_EQ(0.0, tracker.ComputeRateForInterval(500));
}
TEST(RateTrackerTest, TestLongPeriodBetweenSamples) {
@@ -149,7 +149,7 @@ TEST(RateTrackerTest, TestRolloff) {
tracker.AdvanceTime(50);
}
EXPECT_DOUBLE_EQ(15.0, tracker.ComputeRate());
- EXPECT_DOUBLE_EQ(20.0, tracker.ComputeRateForInterval(500u));
+ EXPECT_DOUBLE_EQ(20.0, tracker.ComputeRateForInterval(500));
for (int i = 0; i < 10; ++i) {
tracker.AddSamples(1U);
@@ -162,7 +162,7 @@ TEST(RateTrackerTest, TestGetUnitSecondsAfterInitialValue) {
RateTrackerForTest tracker;
tracker.AddSamples(1234);
tracker.AdvanceTime(1000);
- EXPECT_DOUBLE_EQ(1234.0, tracker.ComputeRateForInterval(1000u));
+ EXPECT_DOUBLE_EQ(1234.0, tracker.ComputeRateForInterval(1000));
}
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/referencecountedsingletonfactory.h b/chromium/third_party/webrtc/base/referencecountedsingletonfactory.h
index f9559868274..500150f6fd5 100644
--- a/chromium/third_party/webrtc/base/referencecountedsingletonfactory.h
+++ b/chromium/third_party/webrtc/base/referencecountedsingletonfactory.h
@@ -11,10 +11,12 @@
#ifndef WEBRTC_BASE_REFERENCECOUNTEDSINGLETONFACTORY_H_
#define WEBRTC_BASE_REFERENCECOUNTEDSINGLETONFACTORY_H_
+#include <memory>
+
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
namespace rtc {
@@ -41,11 +43,11 @@ class ReferenceCountedSingletonFactory {
protected:
// Must be implemented in a sub-class. The sub-class may choose whether or not
// to cache the instance across lifetimes by either reset()'ing or not
- // reset()'ing the scoped_ptr in CleanupInstance().
+ // reset()'ing the unique_ptr in CleanupInstance().
virtual bool SetupInstance() = 0;
virtual void CleanupInstance() = 0;
- scoped_ptr<Interface> instance_;
+ std::unique_ptr<Interface> instance_;
private:
Interface* GetInstance() {
diff --git a/chromium/third_party/webrtc/base/rollingaccumulator.h b/chromium/third_party/webrtc/base/rollingaccumulator.h
index 0c8e5fb8bdc..72415ad758d 100644
--- a/chromium/third_party/webrtc/base/rollingaccumulator.h
+++ b/chromium/third_party/webrtc/base/rollingaccumulator.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/rtccertificate.cc b/chromium/third_party/webrtc/base/rtccertificate.cc
index 7b764bd72e6..574bf75bf29 100644
--- a/chromium/third_party/webrtc/base/rtccertificate.cc
+++ b/chromium/third_party/webrtc/base/rtccertificate.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/rtccertificate.h"
#include "webrtc/base/checks.h"
@@ -15,7 +17,7 @@
namespace rtc {
scoped_refptr<RTCCertificate> RTCCertificate::Create(
- scoped_ptr<SSLIdentity> identity) {
+ std::unique_ptr<SSLIdentity> identity) {
return new RefCountedObject<RTCCertificate>(identity.release());
}
@@ -43,4 +45,24 @@ const SSLCertificate& RTCCertificate::ssl_certificate() const {
return identity_->certificate();
}
+RTCCertificatePEM RTCCertificate::ToPEM() const {
+ return RTCCertificatePEM(identity_->PrivateKeyToPEMString(),
+ ssl_certificate().ToPEMString());
+}
+
+scoped_refptr<RTCCertificate> RTCCertificate::FromPEM(
+ const RTCCertificatePEM& pem) {
+ std::unique_ptr<SSLIdentity> identity(SSLIdentity::FromPEMStrings(
+ pem.private_key(), pem.certificate()));
+ return new RefCountedObject<RTCCertificate>(identity.release());
+}
+
+bool RTCCertificate::operator==(const RTCCertificate& certificate) const {
+ return *this->identity_ == *certificate.identity_;
+}
+
+bool RTCCertificate::operator!=(const RTCCertificate& certificate) const {
+ return !(*this == certificate);
+}
+
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/rtccertificate.h b/chromium/third_party/webrtc/base/rtccertificate.h
index 600739bc86a..46d6fd427cf 100644
--- a/chromium/third_party/webrtc/base/rtccertificate.h
+++ b/chromium/third_party/webrtc/base/rtccertificate.h
@@ -11,21 +11,45 @@
#ifndef WEBRTC_BASE_RTCCERTIFICATE_H_
#define WEBRTC_BASE_RTCCERTIFICATE_H_
+#include <memory>
+
#include "webrtc/base/basictypes.h"
#include "webrtc/base/refcount.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/sslidentity.h"
namespace rtc {
+// This class contains PEM strings of an RTCCertificate's private key and
+// certificate and acts as a text representation of RTCCertificate. Certificates
+// can be serialized and deserialized to and from this format, which allows for
+// cloning and storing of certificates to disk. The PEM format is that of
+// |SSLIdentity::PrivateKeyToPEMString| and |SSLCertificate::ToPEMString|, e.g.
+// the string representations used by OpenSSL.
+class RTCCertificatePEM {
+ public:
+ RTCCertificatePEM(
+ const std::string& private_key,
+ const std::string& certificate)
+ : private_key_(private_key),
+ certificate_(certificate) {}
+
+ const std::string& private_key() const { return private_key_; }
+ const std::string& certificate() const { return certificate_; }
+
+ private:
+ std::string private_key_;
+ std::string certificate_;
+};
+
// A thin abstraction layer between "lower level crypto stuff" like
// SSLCertificate and WebRTC usage. Takes ownership of some lower level objects,
// reference counting protects these from premature destruction.
class RTCCertificate : public RefCountInterface {
public:
// Takes ownership of |identity|.
- static scoped_refptr<RTCCertificate> Create(scoped_ptr<SSLIdentity> identity);
+ static scoped_refptr<RTCCertificate> Create(
+ std::unique_ptr<SSLIdentity> identity);
// Returns the expiration time in ms relative to epoch, 1970-01-01T00:00:00Z.
uint64_t Expires() const;
@@ -40,6 +64,12 @@ class RTCCertificate : public RefCountInterface {
// However, some places might need SSLIdentity* for its public/private key...
SSLIdentity* identity() const { return identity_.get(); }
+ // To/from PEM, a text representation of the RTCCertificate.
+ RTCCertificatePEM ToPEM() const;
+ static scoped_refptr<RTCCertificate> FromPEM(const RTCCertificatePEM& pem);
+ bool operator==(const RTCCertificate& certificate) const;
+ bool operator!=(const RTCCertificate& certificate) const;
+
protected:
explicit RTCCertificate(SSLIdentity* identity);
~RTCCertificate() override;
@@ -47,7 +77,7 @@ class RTCCertificate : public RefCountInterface {
private:
// The SSLIdentity is the owner of the SSLCertificate. To protect our
// ssl_certificate() we take ownership of |identity_|.
- scoped_ptr<SSLIdentity> identity_;
+ std::unique_ptr<SSLIdentity> identity_;
};
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/rtccertificate_unittests.cc b/chromium/third_party/webrtc/base/rtccertificate_unittest.cc
index 84c854478b2..f5df7f1130b 100644
--- a/chromium/third_party/webrtc/base/rtccertificate_unittests.cc
+++ b/chromium/third_party/webrtc/base/rtccertificate_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <utility>
#include "webrtc/base/checks.h"
@@ -16,7 +17,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/rtccertificate.h"
#include "webrtc/base/safe_conversions.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sslidentity.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
@@ -35,6 +35,13 @@ class RTCCertificateTest : public testing::Test {
~RTCCertificateTest() {}
protected:
+ scoped_refptr<RTCCertificate> GenerateECDSA() {
+ std::unique_ptr<SSLIdentity> identity(
+ SSLIdentity::Generate(kTestCertCommonName, KeyParams::ECDSA()));
+ RTC_CHECK(identity);
+ return RTCCertificate::Create(std::move(identity));
+ }
+
// Timestamp note:
// All timestamps in this unittest are expressed in number of seconds since
// epoch, 1970-01-01T00:00:00Z (UTC). The RTCCertificate interface uses ms,
@@ -77,7 +84,7 @@ class RTCCertificateTest : public testing::Test {
// is fast to generate.
params.key_params = KeyParams::ECDSA();
- scoped_ptr<SSLIdentity> identity(SSLIdentity::GenerateForTest(params));
+ std::unique_ptr<SSLIdentity> identity(SSLIdentity::GenerateForTest(params));
return RTCCertificate::Create(std::move(identity));
}
};
@@ -85,10 +92,7 @@ class RTCCertificateTest : public testing::Test {
TEST_F(RTCCertificateTest, NewCertificateNotExpired) {
// Generate a real certificate without specifying the expiration time.
// Certificate type doesn't matter, using ECDSA because it's fast to generate.
- scoped_ptr<SSLIdentity> identity(
- SSLIdentity::Generate(kTestCertCommonName, KeyParams::ECDSA()));
- scoped_refptr<RTCCertificate> certificate =
- RTCCertificate::Create(std::move(identity));
+ scoped_refptr<RTCCertificate> certificate = GenerateECDSA();
uint64_t now = NowSeconds();
EXPECT_FALSE(HasExpiredSeconds(certificate, now));
@@ -115,4 +119,22 @@ TEST_F(RTCCertificateTest, ExpiresInOneSecond) {
EXPECT_TRUE(HasExpiredSeconds(certificate, now + 2));
}
+TEST_F(RTCCertificateTest, DifferentCertificatesNotEqual) {
+ scoped_refptr<RTCCertificate> a = GenerateECDSA();
+ scoped_refptr<RTCCertificate> b = GenerateECDSA();
+ EXPECT_TRUE(*a != *b);
+}
+
+TEST_F(RTCCertificateTest, CloneWithPEMSerialization) {
+ scoped_refptr<RTCCertificate> orig = GenerateECDSA();
+
+ // To PEM.
+ RTCCertificatePEM orig_pem = orig->ToPEM();
+ // Clone from PEM.
+ scoped_refptr<RTCCertificate> clone = RTCCertificate::FromPEM(orig_pem);
+ EXPECT_TRUE(clone);
+ EXPECT_TRUE(*orig == *clone);
+ EXPECT_EQ(orig->Expires(), clone->Expires());
+}
+
} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/rtccertificategenerator.cc b/chromium/third_party/webrtc/base/rtccertificategenerator.cc
new file mode 100644
index 00000000000..f0d72e7fe4d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/rtccertificategenerator.cc
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/rtccertificategenerator.h"
+
+#include <algorithm>
+#include <memory>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/sslidentity.h"
+
+namespace rtc {
+
+namespace {
+
+// A certificates' subject and issuer name.
+const char kIdentityName[] = "WebRTC";
+
+uint64_t kYearInSeconds = 365 * 24 * 60 * 60;
+
+enum {
+ MSG_GENERATE,
+ MSG_GENERATE_DONE,
+};
+
+// Helper class for generating certificates asynchronously; a single task
+// instance is responsible for a single asynchronous certificate generation
+// request. We are using a separate helper class so that a generation request
+// can outlive the |RTCCertificateGenerator| that spawned it.
+class RTCCertificateGenerationTask : public RefCountInterface,
+ public MessageHandler {
+ public:
+ RTCCertificateGenerationTask(
+ Thread* signaling_thread,
+ Thread* worker_thread,
+ const KeyParams& key_params,
+ const Optional<uint64_t>& expires_ms,
+ const scoped_refptr<RTCCertificateGeneratorCallback>& callback)
+ : signaling_thread_(signaling_thread),
+ worker_thread_(worker_thread),
+ key_params_(key_params),
+ expires_ms_(expires_ms),
+ callback_(callback) {
+ RTC_DCHECK(signaling_thread_);
+ RTC_DCHECK(worker_thread_);
+ RTC_DCHECK(callback_);
+ }
+ ~RTCCertificateGenerationTask() override {}
+
+ // Handles |MSG_GENERATE| and its follow-up |MSG_GENERATE_DONE|.
+ void OnMessage(Message* msg) override {
+ switch (msg->message_id) {
+ case MSG_GENERATE:
+ RTC_DCHECK(worker_thread_->IsCurrent());
+
+ // Perform the certificate generation work here on the worker thread.
+ certificate_ = RTCCertificateGenerator::GenerateCertificate(
+ key_params_, expires_ms_);
+
+ // Handle callbacks on signaling thread. Pass on the |msg->pdata|
+ // (which references |this| with ref counting) to that thread.
+ signaling_thread_->Post(this, MSG_GENERATE_DONE, msg->pdata);
+ break;
+ case MSG_GENERATE_DONE:
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+
+ // Perform callback with result here on the signaling thread.
+ if (certificate_) {
+ callback_->OnSuccess(certificate_);
+ } else {
+ callback_->OnFailure();
+ }
+
+ // Destroy |msg->pdata| which references |this| with ref counting. This
+ // may result in |this| being deleted - do not touch member variables
+ // after this line.
+ delete msg->pdata;
+ return;
+ default:
+ RTC_NOTREACHED();
+ }
+ }
+
+ private:
+ Thread* const signaling_thread_;
+ Thread* const worker_thread_;
+ const KeyParams key_params_;
+ const Optional<uint64_t> expires_ms_;
+ const scoped_refptr<RTCCertificateGeneratorCallback> callback_;
+ scoped_refptr<RTCCertificate> certificate_;
+};
+
+} // namespace
+
+// static
+scoped_refptr<RTCCertificate>
+RTCCertificateGenerator::GenerateCertificate(
+ const KeyParams& key_params,
+ const Optional<uint64_t>& expires_ms) {
+ if (!key_params.IsValid())
+ return nullptr;
+ SSLIdentity* identity;
+ if (!expires_ms) {
+ identity = SSLIdentity::Generate(kIdentityName, key_params);
+ } else {
+ uint64_t expires_s = *expires_ms / 1000;
+ // Limit the expiration time to something reasonable (a year). This was
+ // somewhat arbitrarily chosen. It also ensures that the value is not too
+ // large for the unspecified |time_t|.
+ expires_s = std::min(expires_s, kYearInSeconds);
+ // TODO(torbjorng): Stop using |time_t|, its type is unspecified. It it safe
+ // to assume it can hold up to a year's worth of seconds (and more), but
+ // |SSLIdentity::Generate| should stop relying on |time_t|.
+ // See bugs.webrtc.org/5720.
+ time_t cert_lifetime_s = static_cast<time_t>(expires_s);
+ identity = SSLIdentity::GenerateWithExpiration(
+ kIdentityName, key_params, cert_lifetime_s);
+ }
+ if (!identity)
+ return nullptr;
+ std::unique_ptr<SSLIdentity> identity_sptr(identity);
+ return RTCCertificate::Create(std::move(identity_sptr));
+}
+
+RTCCertificateGenerator::RTCCertificateGenerator(
+ Thread* signaling_thread, Thread* worker_thread)
+ : signaling_thread_(signaling_thread),
+ worker_thread_(worker_thread) {
+ RTC_DCHECK(signaling_thread_);
+ RTC_DCHECK(worker_thread_);
+}
+
+void RTCCertificateGenerator::GenerateCertificateAsync(
+ const KeyParams& key_params,
+ const Optional<uint64_t>& expires_ms,
+ const scoped_refptr<RTCCertificateGeneratorCallback>& callback) {
+ RTC_DCHECK(signaling_thread_->IsCurrent());
+ RTC_DCHECK(callback);
+
+ // Create a new |RTCCertificateGenerationTask| for this generation request. It
+ // is reference counted and referenced by the message data, ensuring it lives
+ // until the task has completed (independent of |RTCCertificateGenerator|).
+ ScopedRefMessageData<RTCCertificateGenerationTask>* msg_data =
+ new ScopedRefMessageData<RTCCertificateGenerationTask>(
+ new RefCountedObject<RTCCertificateGenerationTask>(
+ signaling_thread_, worker_thread_, key_params, expires_ms,
+ callback));
+ worker_thread_->Post(msg_data->data().get(), MSG_GENERATE, msg_data);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/rtccertificategenerator.h b/chromium/third_party/webrtc/base/rtccertificategenerator.h
new file mode 100644
index 00000000000..08fe67108d4
--- /dev/null
+++ b/chromium/third_party/webrtc/base/rtccertificategenerator.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_RTCCERTIFICATEGENERATOR_H_
+#define WEBRTC_BASE_RTCCERTIFICATEGENERATOR_H_
+
+#include "webrtc/base/optional.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/rtccertificate.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/sslidentity.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+class RTCCertificateGeneratorCallback : public RefCountInterface {
+ public:
+ virtual void OnSuccess(
+ const scoped_refptr<RTCCertificate>& certificate) = 0;
+ virtual void OnFailure() = 0;
+
+ protected:
+ ~RTCCertificateGeneratorCallback() override {}
+};
+
+// Generates |RTCCertificate|s.
+// The static function |GenerateCertificate| generates a certificate on the
+// current thread. The |RTCCertificateGenerator| instance generates certificates
+// asynchronously on the worker thread with |GenerateCertificateAsync|.
+class RTCCertificateGenerator {
+ public:
+ // Generates a certificate on the current thread. Returns null on failure.
+ // If |expires_ms| is specified, the certificate will expire in approximately
+ // that many milliseconds from now. |expires_ms| is limited to a year, a
+ // larger value than that is clamped down to a year. If |expires_ms| is not
+ // specified, a default expiration time is used.
+ static scoped_refptr<RTCCertificate> GenerateCertificate(
+ const KeyParams& key_params,
+ const Optional<uint64_t>& expires_ms);
+
+ RTCCertificateGenerator(Thread* signaling_thread, Thread* worker_thread);
+
+ // Generates a certificate asynchronously on the worker thread.
+ // Must be called on the signaling thread. The |callback| is invoked with the
+ // result on the signaling thread. If |expires_ms| is specified, the
+ // certificate will expire in approximately that many milliseconds from now.
+ // |expires_ms| is limited to a year, a larger value than that is clamped down
+ // to a year. If |expires_ms| is not specified, a default expiration time is
+ // used.
+ void GenerateCertificateAsync(
+ const KeyParams& key_params,
+ const Optional<uint64_t>& expires_ms,
+ const scoped_refptr<RTCCertificateGeneratorCallback>& callback);
+
+ private:
+ Thread* const signaling_thread_;
+ Thread* const worker_thread_;
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_RTCCERTIFICATEGENERATOR_H_
diff --git a/chromium/third_party/webrtc/base/rtccertificategenerator_unittest.cc b/chromium/third_party/webrtc/base/rtccertificategenerator_unittest.cc
new file mode 100644
index 00000000000..a6e88a1a866
--- /dev/null
+++ b/chromium/third_party/webrtc/base/rtccertificategenerator_unittest.cc
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/rtccertificategenerator.h"
+
+#include <memory>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/optional.h"
+#include "webrtc/base/thread.h"
+
+namespace rtc {
+
+class RTCCertificateGeneratorFixture : public RTCCertificateGeneratorCallback {
+ public:
+ RTCCertificateGeneratorFixture()
+ : signaling_thread_(Thread::Current()),
+ worker_thread_(new Thread()),
+ generate_async_completed_(false) {
+ RTC_CHECK(signaling_thread_);
+ RTC_CHECK(worker_thread_->Start());
+ generator_.reset(
+ new RTCCertificateGenerator(signaling_thread_, worker_thread_.get()));
+ }
+ ~RTCCertificateGeneratorFixture() override {}
+
+ RTCCertificateGenerator* generator() const { return generator_.get(); }
+ RTCCertificate* certificate() const { return certificate_.get(); }
+
+ void OnSuccess(const scoped_refptr<RTCCertificate>& certificate) override {
+ RTC_CHECK(signaling_thread_->IsCurrent());
+ RTC_CHECK(certificate);
+ certificate_ = certificate;
+ generate_async_completed_ = true;
+ }
+ void OnFailure() override {
+ RTC_CHECK(signaling_thread_->IsCurrent());
+ certificate_ = nullptr;
+ generate_async_completed_ = true;
+ }
+
+ bool GenerateAsyncCompleted() {
+ RTC_CHECK(signaling_thread_->IsCurrent());
+ if (generate_async_completed_) {
+ // Reset flag so that future generation requests are not considered done.
+ generate_async_completed_ = false;
+ return true;
+ }
+ return false;
+ }
+
+ protected:
+ Thread* const signaling_thread_;
+ std::unique_ptr<Thread> worker_thread_;
+ std::unique_ptr<RTCCertificateGenerator> generator_;
+ scoped_refptr<RTCCertificate> certificate_;
+ bool generate_async_completed_;
+};
+
+class RTCCertificateGeneratorTest
+ : public testing::Test {
+ public:
+ RTCCertificateGeneratorTest()
+ : fixture_(new RefCountedObject<RTCCertificateGeneratorFixture>()) {}
+ ~RTCCertificateGeneratorTest() {}
+
+ protected:
+ static const int kGenerationTimeoutMs = 1000;
+
+ scoped_refptr<RTCCertificateGeneratorFixture> fixture_;
+};
+
+TEST_F(RTCCertificateGeneratorTest, GenerateECDSA) {
+ EXPECT_TRUE(RTCCertificateGenerator::GenerateCertificate(
+ KeyParams::ECDSA(),
+ Optional<uint64_t>()));
+}
+
+TEST_F(RTCCertificateGeneratorTest, GenerateRSA) {
+ EXPECT_TRUE(RTCCertificateGenerator::GenerateCertificate(
+ KeyParams::RSA(),
+ Optional<uint64_t>()));
+}
+
+TEST_F(RTCCertificateGeneratorTest, GenerateAsyncECDSA) {
+ EXPECT_FALSE(fixture_->certificate());
+ fixture_->generator()->GenerateCertificateAsync(
+ KeyParams::ECDSA(),
+ Optional<uint64_t>(),
+ fixture_);
+ // Until generation has completed, the certificate is null. Since this is an
+ // async call, generation must not have completed until we process messages
+ // posted to this thread (which is done by |EXPECT_TRUE_WAIT|).
+ EXPECT_FALSE(fixture_->GenerateAsyncCompleted());
+ EXPECT_FALSE(fixture_->certificate());
+ EXPECT_TRUE_WAIT(fixture_->GenerateAsyncCompleted(), kGenerationTimeoutMs);
+ EXPECT_TRUE(fixture_->certificate());
+}
+
+TEST_F(RTCCertificateGeneratorTest, GenerateWithExpires) {
+ // By generating two certificates with different expiration we can compare the
+ // two expiration times relative to each other without knowing the current
+ // time relative to epoch, 1970-01-01T00:00:00Z. This verifies that the
+ // expiration parameter is correctly used relative to the generator's clock,
+ // but does not verify that this clock is relative to epoch.
+
+ // Generate a certificate that expires immediately.
+ scoped_refptr<RTCCertificate> cert_a =
+ RTCCertificateGenerator::GenerateCertificate(
+ KeyParams::ECDSA(), Optional<uint64_t>(0));
+ EXPECT_TRUE(cert_a);
+
+ // Generate a certificate that expires in one minute.
+ const uint64_t kExpiresMs = 60000;
+ scoped_refptr<RTCCertificate> cert_b =
+ RTCCertificateGenerator::GenerateCertificate(
+ KeyParams::ECDSA(), Optional<uint64_t>(kExpiresMs));
+ EXPECT_TRUE(cert_b);
+
+ // Verify that |cert_b| expires approximately |kExpiresMs| after |cert_a|
+ // (allowing a +/- 1 second plus maximum generation time difference).
+ EXPECT_GT(cert_b->Expires(), cert_a->Expires());
+ uint64_t expires_diff = cert_b->Expires() - cert_a->Expires();
+ EXPECT_GE(expires_diff, kExpiresMs);
+ EXPECT_LE(expires_diff, kExpiresMs + 2*kGenerationTimeoutMs + 1000);
+}
+
+TEST_F(RTCCertificateGeneratorTest, GenerateWithInvalidParamsShouldFail) {
+ KeyParams invalid_params = KeyParams::RSA(0, 0);
+ EXPECT_FALSE(invalid_params.IsValid());
+
+ EXPECT_FALSE(RTCCertificateGenerator::GenerateCertificate(
+ invalid_params, Optional<uint64_t>()));
+
+ fixture_->generator()->GenerateCertificateAsync(
+ invalid_params,
+ Optional<uint64_t>(),
+ fixture_);
+ EXPECT_TRUE_WAIT(fixture_->GenerateAsyncCompleted(), kGenerationTimeoutMs);
+ EXPECT_FALSE(fixture_->certificate());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/scoped_autorelease_pool.h b/chromium/third_party/webrtc/base/scoped_autorelease_pool.h
index 9aac112793e..808231b5d25 100644
--- a/chromium/third_party/webrtc/base/scoped_autorelease_pool.h
+++ b/chromium/third_party/webrtc/base/scoped_autorelease_pool.h
@@ -23,6 +23,7 @@
#if defined(WEBRTC_MAC)
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
// This header may be included from Obj-C files or C++ files.
#ifdef __OBJC__
diff --git a/chromium/third_party/webrtc/base/scoped_ptr.h b/chromium/third_party/webrtc/base/scoped_ptr.h
deleted file mode 100644
index b7e94989e57..00000000000
--- a/chromium/third_party/webrtc/base/scoped_ptr.h
+++ /dev/null
@@ -1,626 +0,0 @@
-/*
- * Copyright 2012 The WebRTC Project Authors. All rights reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Borrowed from Chromium's src/base/memory/scoped_ptr.h.
-
-// Scopers help you manage ownership of a pointer, helping you easily manage a
-// pointer within a scope, and automatically destroying the pointer at the end
-// of a scope. There are two main classes you will use, which correspond to the
-// operators new/delete and new[]/delete[].
-//
-// Example usage (scoped_ptr<T>):
-// {
-// scoped_ptr<Foo> foo(new Foo("wee"));
-// } // foo goes out of scope, releasing the pointer with it.
-//
-// {
-// scoped_ptr<Foo> foo; // No pointer managed.
-// foo.reset(new Foo("wee")); // Now a pointer is managed.
-// foo.reset(new Foo("wee2")); // Foo("wee") was destroyed.
-// foo.reset(new Foo("wee3")); // Foo("wee2") was destroyed.
-// foo->Method(); // Foo::Method() called.
-// foo.get()->Method(); // Foo::Method() called.
-// SomeFunc(foo.release()); // SomeFunc takes ownership, foo no longer
-// // manages a pointer.
-// foo.reset(new Foo("wee4")); // foo manages a pointer again.
-// foo.reset(); // Foo("wee4") destroyed, foo no longer
-// // manages a pointer.
-// } // foo wasn't managing a pointer, so nothing was destroyed.
-//
-// Example usage (scoped_ptr<T[]>):
-// {
-// scoped_ptr<Foo[]> foo(new Foo[100]);
-// foo.get()->Method(); // Foo::Method on the 0th element.
-// foo[10].Method(); // Foo::Method on the 10th element.
-// }
-//
-// These scopers also implement part of the functionality of C++11 unique_ptr
-// in that they are "movable but not copyable." You can use the scopers in the
-// parameter and return types of functions to signify ownership transfer in to
-// and out of a function. When calling a function that has a scoper as the
-// argument type, it must be called with the result of calling std::move on an
-// analogous scoper, or another function that generates a temporary; passing by
-// copy will NOT work. Here is an example using scoped_ptr:
-//
-// void TakesOwnership(scoped_ptr<Foo> arg) {
-// // Do something with arg
-// }
-// scoped_ptr<Foo> CreateFoo() {
-// // No need for calling std::move because we are constructing a temporary
-// // for the return value.
-// return scoped_ptr<Foo>(new Foo("new"));
-// }
-// scoped_ptr<Foo> PassThru(scoped_ptr<Foo> arg) {
-// return std::move(arg);
-// }
-//
-// {
-// scoped_ptr<Foo> ptr(new Foo("yay")); // ptr manages Foo("yay").
-// TakesOwnership(std::move(ptr)); // ptr no longer owns Foo("yay").
-// scoped_ptr<Foo> ptr2 = CreateFoo(); // ptr2 owns the return Foo.
-// scoped_ptr<Foo> ptr3 = // ptr3 now owns what was in ptr2.
-// PassThru(std::move(ptr2)); // ptr2 is correspondingly nullptr.
-// }
-//
-// Notice that if you do not call std::move when returning from PassThru(), or
-// when invoking TakesOwnership(), the code will not compile because scopers
-// are not copyable; they only implement move semantics which require calling
-// std::move to signify a destructive transfer of state. CreateFoo() is
-// different though because we are constructing a temporary on the return line
-// and thus can avoid needing to call std::move.
-
-#ifndef WEBRTC_BASE_SCOPED_PTR_H__
-#define WEBRTC_BASE_SCOPED_PTR_H__
-
-// This is an implementation designed to match the anticipated future TR2
-// implementation of the scoped_ptr class.
-
-#include <assert.h>
-#include <stddef.h>
-#include <stdlib.h>
-
-#include <algorithm> // For std::swap().
-#include <cstddef>
-#include <memory>
-
-#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/template_util.h"
-#include "webrtc/typedefs.h"
-
-namespace rtc {
-
-// Function object which deletes its parameter, which must be a pointer.
-// If C is an array type, invokes 'delete[]' on the parameter; otherwise,
-// invokes 'delete'. The default deleter for scoped_ptr<T>.
-template <class T>
-struct DefaultDeleter {
- DefaultDeleter() {}
- template <typename U> DefaultDeleter(const DefaultDeleter<U>& other) {
- // IMPLEMENTATION NOTE: C++11 20.7.1.1.2p2 only provides this constructor
- // if U* is implicitly convertible to T* and U is not an array type.
- //
- // Correct implementation should use SFINAE to disable this
- // constructor. However, since there are no other 1-argument constructors,
- // using a static_assert based on is_convertible<> and requiring
- // complete types is simpler and will cause compile failures for equivalent
- // misuses.
- //
- // Note, the is_convertible<U*, T*> check also ensures that U is not an
- // array. T is guaranteed to be a non-array, so any U* where U is an array
- // cannot convert to T*.
- enum { T_must_be_complete = sizeof(T) };
- enum { U_must_be_complete = sizeof(U) };
- static_assert(rtc::is_convertible<U*, T*>::value,
- "U* must implicitly convert to T*");
- }
- inline void operator()(T* ptr) const {
- enum { type_must_be_complete = sizeof(T) };
- delete ptr;
- }
-};
-
-// Specialization of DefaultDeleter for array types.
-template <class T>
-struct DefaultDeleter<T[]> {
- inline void operator()(T* ptr) const {
- enum { type_must_be_complete = sizeof(T) };
- delete[] ptr;
- }
-
- private:
- // Disable this operator for any U != T because it is undefined to execute
- // an array delete when the static type of the array mismatches the dynamic
- // type.
- //
- // References:
- // C++98 [expr.delete]p3
- // http://cplusplus.github.com/LWG/lwg-defects.html#938
- template <typename U> void operator()(U* array) const;
-};
-
-template <class T, int n>
-struct DefaultDeleter<T[n]> {
- // Never allow someone to declare something like scoped_ptr<int[10]>.
- static_assert(sizeof(T) == -1, "do not use array with size as type");
-};
-
-// Function object which invokes 'free' on its parameter, which must be
-// a pointer. Can be used to store malloc-allocated pointers in scoped_ptr:
-//
-// scoped_ptr<int, rtc::FreeDeleter> foo_ptr(
-// static_cast<int*>(malloc(sizeof(int))));
-struct FreeDeleter {
- inline void operator()(void* ptr) const {
- free(ptr);
- }
-};
-
-namespace internal {
-
-template <typename T>
-struct ShouldAbortOnSelfReset {
- template <typename U>
- static rtc::internal::NoType Test(const typename U::AllowSelfReset*);
-
- template <typename U>
- static rtc::internal::YesType Test(...);
-
- static const bool value =
- sizeof(Test<T>(0)) == sizeof(rtc::internal::YesType);
-};
-
-// Minimal implementation of the core logic of scoped_ptr, suitable for
-// reuse in both scoped_ptr and its specializations.
-template <class T, class D>
-class scoped_ptr_impl {
- public:
- explicit scoped_ptr_impl(T* p) : data_(p) {}
-
- // Initializer for deleters that have data parameters.
- scoped_ptr_impl(T* p, const D& d) : data_(p, d) {}
-
- // Templated constructor that destructively takes the value from another
- // scoped_ptr_impl.
- template <typename U, typename V>
- scoped_ptr_impl(scoped_ptr_impl<U, V>* other)
- : data_(other->release(), other->get_deleter()) {
- // We do not support move-only deleters. We could modify our move
- // emulation to have rtc::subtle::move() and rtc::subtle::forward()
- // functions that are imperfect emulations of their C++11 equivalents,
- // but until there's a requirement, just assume deleters are copyable.
- }
-
- template <typename U, typename V>
- void TakeState(scoped_ptr_impl<U, V>* other) {
- // See comment in templated constructor above regarding lack of support
- // for move-only deleters.
- reset(other->release());
- get_deleter() = other->get_deleter();
- }
-
- ~scoped_ptr_impl() {
- if (data_.ptr != nullptr) {
- // Not using get_deleter() saves one function call in non-optimized
- // builds.
- static_cast<D&>(data_)(data_.ptr);
- }
- }
-
- void reset(T* p) {
- // This is a self-reset, which is no longer allowed for default deleters:
- // https://crbug.com/162971
- assert(!ShouldAbortOnSelfReset<D>::value || p == nullptr || p != data_.ptr);
-
- // Note that running data_.ptr = p can lead to undefined behavior if
- // get_deleter()(get()) deletes this. In order to prevent this, reset()
- // should update the stored pointer before deleting its old value.
- //
- // However, changing reset() to use that behavior may cause current code to
- // break in unexpected ways. If the destruction of the owned object
- // dereferences the scoped_ptr when it is destroyed by a call to reset(),
- // then it will incorrectly dispatch calls to |p| rather than the original
- // value of |data_.ptr|.
- //
- // During the transition period, set the stored pointer to nullptr while
- // deleting the object. Eventually, this safety check will be removed to
- // prevent the scenario initially described from occurring and
- // http://crbug.com/176091 can be closed.
- T* old = data_.ptr;
- data_.ptr = nullptr;
- if (old != nullptr)
- static_cast<D&>(data_)(old);
- data_.ptr = p;
- }
-
- T* get() const { return data_.ptr; }
-
- D& get_deleter() { return data_; }
- const D& get_deleter() const { return data_; }
-
- void swap(scoped_ptr_impl& p2) {
- // Standard swap idiom: 'using std::swap' ensures that std::swap is
- // present in the overload set, but we call swap unqualified so that
- // any more-specific overloads can be used, if available.
- using std::swap;
- swap(static_cast<D&>(data_), static_cast<D&>(p2.data_));
- swap(data_.ptr, p2.data_.ptr);
- }
-
- T* release() {
- T* old_ptr = data_.ptr;
- data_.ptr = nullptr;
- return old_ptr;
- }
-
- T** accept() {
- reset(nullptr);
- return &(data_.ptr);
- }
-
- T** use() {
- return &(data_.ptr);
- }
-
- private:
- // Needed to allow type-converting constructor.
- template <typename U, typename V> friend class scoped_ptr_impl;
-
- // Use the empty base class optimization to allow us to have a D
- // member, while avoiding any space overhead for it when D is an
- // empty class. See e.g. http://www.cantrip.org/emptyopt.html for a good
- // discussion of this technique.
- struct Data : public D {
- explicit Data(T* ptr_in) : ptr(ptr_in) {}
- Data(T* ptr_in, const D& other) : D(other), ptr(ptr_in) {}
- T* ptr;
- };
-
- Data data_;
-
- RTC_DISALLOW_COPY_AND_ASSIGN(scoped_ptr_impl);
-};
-
-} // namespace internal
-
-// A scoped_ptr<T> is like a T*, except that the destructor of scoped_ptr<T>
-// automatically deletes the pointer it holds (if any).
-// That is, scoped_ptr<T> owns the T object that it points to.
-// Like a T*, a scoped_ptr<T> may hold either nullptr or a pointer to a T
-// object. Also like T*, scoped_ptr<T> is thread-compatible, and once you
-// dereference it, you get the thread safety guarantees of T.
-//
-// The size of scoped_ptr is small. On most compilers, when using the
-// DefaultDeleter, sizeof(scoped_ptr<T>) == sizeof(T*). Custom deleters will
-// increase the size proportional to whatever state they need to have. See
-// comments inside scoped_ptr_impl<> for details.
-//
-// Current implementation targets having a strict subset of C++11's
-// unique_ptr<> features. Known deficiencies include not supporting move-only
-// deleters, function pointers as deleters, and deleters with reference
-// types.
-template <class T, class D = rtc::DefaultDeleter<T> >
-class scoped_ptr {
-
- // TODO(ajm): If we ever import RefCountedBase, this check needs to be
- // enabled.
- //static_assert(rtc::internal::IsNotRefCounted<T>::value,
- // "T is refcounted type and needs scoped refptr");
-
- public:
- // The element and deleter types.
- typedef T element_type;
- typedef D deleter_type;
-
- // Constructor. Defaults to initializing with nullptr.
- scoped_ptr() : impl_(nullptr) {}
-
- // Constructor. Takes ownership of p.
- explicit scoped_ptr(element_type* p) : impl_(p) {}
-
- // Constructor. Allows initialization of a stateful deleter.
- scoped_ptr(element_type* p, const D& d) : impl_(p, d) {}
-
- // Constructor. Allows construction from a nullptr.
- scoped_ptr(std::nullptr_t) : impl_(nullptr) {}
-
- // Constructor. Allows construction from a scoped_ptr rvalue for a
- // convertible type and deleter.
- //
- // IMPLEMENTATION NOTE: C++11 unique_ptr<> keeps this constructor distinct
- // from the normal move constructor. By C++11 20.7.1.2.1.21, this constructor
- // has different post-conditions if D is a reference type. Since this
- // implementation does not support deleters with reference type,
- // we do not need a separate move constructor allowing us to avoid one
- // use of SFINAE. You only need to care about this if you modify the
- // implementation of scoped_ptr.
- template <typename U, typename V>
- scoped_ptr(scoped_ptr<U, V>&& other)
- : impl_(&other.impl_) {
- static_assert(!rtc::is_array<U>::value, "U cannot be an array");
- }
-
- // operator=. Allows assignment from a scoped_ptr rvalue for a convertible
- // type and deleter.
- //
- // IMPLEMENTATION NOTE: C++11 unique_ptr<> keeps this operator= distinct from
- // the normal move assignment operator. By C++11 20.7.1.2.3.4, this templated
- // form has different requirements on for move-only Deleters. Since this
- // implementation does not support move-only Deleters, we do not need a
- // separate move assignment operator allowing us to avoid one use of SFINAE.
- // You only need to care about this if you modify the implementation of
- // scoped_ptr.
- template <typename U, typename V>
- scoped_ptr& operator=(scoped_ptr<U, V>&& rhs) {
- static_assert(!rtc::is_array<U>::value, "U cannot be an array");
- impl_.TakeState(&rhs.impl_);
- return *this;
- }
-
- // operator=. Allows assignment from a nullptr. Deletes the currently owned
- // object, if any.
- scoped_ptr& operator=(std::nullptr_t) {
- reset();
- return *this;
- }
-
- // Deleted copy constructor and copy assignment, to make the type move-only.
- scoped_ptr(const scoped_ptr& other) = delete;
- scoped_ptr& operator=(const scoped_ptr& other) = delete;
-
- // Reset. Deletes the currently owned object, if any.
- // Then takes ownership of a new object, if given.
- void reset(element_type* p = nullptr) { impl_.reset(p); }
-
- // Accessors to get the owned object.
- // operator* and operator-> will assert() if there is no current object.
- element_type& operator*() const {
- assert(impl_.get() != nullptr);
- return *impl_.get();
- }
- element_type* operator->() const {
- assert(impl_.get() != nullptr);
- return impl_.get();
- }
- element_type* get() const { return impl_.get(); }
-
- // Access to the deleter.
- deleter_type& get_deleter() { return impl_.get_deleter(); }
- const deleter_type& get_deleter() const { return impl_.get_deleter(); }
-
- // Allow scoped_ptr<element_type> to be used in boolean expressions, but not
- // implicitly convertible to a real bool (which is dangerous).
- //
- // Note that this trick is only safe when the == and != operators
- // are declared explicitly, as otherwise "scoped_ptr1 ==
- // scoped_ptr2" will compile but do the wrong thing (i.e., convert
- // to Testable and then do the comparison).
- private:
- typedef rtc::internal::scoped_ptr_impl<element_type, deleter_type>
- scoped_ptr::*Testable;
-
- public:
- operator Testable() const {
- return impl_.get() ? &scoped_ptr::impl_ : nullptr;
- }
-
- // Comparison operators.
- // These return whether two scoped_ptr refer to the same object, not just to
- // two different but equal objects.
- bool operator==(const element_type* p) const { return impl_.get() == p; }
- bool operator!=(const element_type* p) const { return impl_.get() != p; }
-
- // Swap two scoped pointers.
- void swap(scoped_ptr& p2) {
- impl_.swap(p2.impl_);
- }
-
- // Release a pointer.
- // The return value is the current pointer held by this object. If this object
- // holds a nullptr, the return value is nullptr. After this operation, this
- // object will hold a nullptr, and will not own the object any more.
- element_type* release() WARN_UNUSED_RESULT {
- return impl_.release();
- }
-
- // Delete the currently held pointer and return a pointer
- // to allow overwriting of the current pointer address.
- element_type** accept() WARN_UNUSED_RESULT {
- return impl_.accept();
- }
-
- // Return a pointer to the current pointer address.
- element_type** use() WARN_UNUSED_RESULT {
- return impl_.use();
- }
-
- private:
- // Needed to reach into |impl_| in the constructor.
- template <typename U, typename V> friend class scoped_ptr;
- rtc::internal::scoped_ptr_impl<element_type, deleter_type> impl_;
-
- // Forbidden for API compatibility with std::unique_ptr.
- explicit scoped_ptr(int disallow_construction_from_null);
-
- // Forbid comparison of scoped_ptr types. If U != T, it totally
- // doesn't make sense, and if U == T, it still doesn't make sense
- // because you should never have the same object owned by two different
- // scoped_ptrs.
- template <class U> bool operator==(scoped_ptr<U> const& p2) const;
- template <class U> bool operator!=(scoped_ptr<U> const& p2) const;
-};
-
-template <class T, class D>
-class scoped_ptr<T[], D> {
- public:
- // The element and deleter types.
- typedef T element_type;
- typedef D deleter_type;
-
- // Constructor. Defaults to initializing with nullptr.
- scoped_ptr() : impl_(nullptr) {}
-
- // Constructor. Stores the given array. Note that the argument's type
- // must exactly match T*. In particular:
- // - it cannot be a pointer to a type derived from T, because it is
- // inherently unsafe in the general case to access an array through a
- // pointer whose dynamic type does not match its static type (eg., if
- // T and the derived types had different sizes access would be
- // incorrectly calculated). Deletion is also always undefined
- // (C++98 [expr.delete]p3). If you're doing this, fix your code.
- // - it cannot be const-qualified differently from T per unique_ptr spec
- // (http://cplusplus.github.com/LWG/lwg-active.html#2118). Users wanting
- // to work around this may use implicit_cast<const T*>().
- // However, because of the first bullet in this comment, users MUST
- // NOT use implicit_cast<Base*>() to upcast the static type of the array.
- explicit scoped_ptr(element_type* array) : impl_(array) {}
-
- // Constructor. Allows construction from a nullptr.
- scoped_ptr(std::nullptr_t) : impl_(nullptr) {}
-
- // Constructor. Allows construction from a scoped_ptr rvalue.
- scoped_ptr(scoped_ptr&& other) : impl_(&other.impl_) {}
-
- // operator=. Allows assignment from a scoped_ptr rvalue.
- scoped_ptr& operator=(scoped_ptr&& rhs) {
- impl_.TakeState(&rhs.impl_);
- return *this;
- }
-
- // operator=. Allows assignment from a nullptr. Deletes the currently owned
- // array, if any.
- scoped_ptr& operator=(std::nullptr_t) {
- reset();
- return *this;
- }
-
- // Deleted copy constructor and copy assignment, to make the type move-only.
- scoped_ptr(const scoped_ptr& other) = delete;
- scoped_ptr& operator=(const scoped_ptr& other) = delete;
-
- // Reset. Deletes the currently owned array, if any.
- // Then takes ownership of a new object, if given.
- void reset(element_type* array = nullptr) { impl_.reset(array); }
-
- // Accessors to get the owned array.
- element_type& operator[](size_t i) const {
- assert(impl_.get() != nullptr);
- return impl_.get()[i];
- }
- element_type* get() const { return impl_.get(); }
-
- // Access to the deleter.
- deleter_type& get_deleter() { return impl_.get_deleter(); }
- const deleter_type& get_deleter() const { return impl_.get_deleter(); }
-
- // Allow scoped_ptr<element_type> to be used in boolean expressions, but not
- // implicitly convertible to a real bool (which is dangerous).
- private:
- typedef rtc::internal::scoped_ptr_impl<element_type, deleter_type>
- scoped_ptr::*Testable;
-
- public:
- operator Testable() const {
- return impl_.get() ? &scoped_ptr::impl_ : nullptr;
- }
-
- // Comparison operators.
- // These return whether two scoped_ptr refer to the same object, not just to
- // two different but equal objects.
- bool operator==(element_type* array) const { return impl_.get() == array; }
- bool operator!=(element_type* array) const { return impl_.get() != array; }
-
- // Swap two scoped pointers.
- void swap(scoped_ptr& p2) {
- impl_.swap(p2.impl_);
- }
-
- // Release a pointer.
- // The return value is the current pointer held by this object. If this object
- // holds a nullptr, the return value is nullptr. After this operation, this
- // object will hold a nullptr, and will not own the object any more.
- element_type* release() WARN_UNUSED_RESULT {
- return impl_.release();
- }
-
- // Delete the currently held pointer and return a pointer
- // to allow overwriting of the current pointer address.
- element_type** accept() WARN_UNUSED_RESULT {
- return impl_.accept();
- }
-
- // Return a pointer to the current pointer address.
- element_type** use() WARN_UNUSED_RESULT {
- return impl_.use();
- }
-
- private:
- // Force element_type to be a complete type.
- enum { type_must_be_complete = sizeof(element_type) };
-
- // Actually hold the data.
- rtc::internal::scoped_ptr_impl<element_type, deleter_type> impl_;
-
- // Disable initialization from any type other than element_type*, by
- // providing a constructor that matches such an initialization, but is
- // private and has no definition. This is disabled because it is not safe to
- // call delete[] on an array whose static type does not match its dynamic
- // type.
- template <typename U> explicit scoped_ptr(U* array);
- explicit scoped_ptr(int disallow_construction_from_null);
-
- // Disable reset() from any type other than element_type*, for the same
- // reasons as the constructor above.
- template <typename U> void reset(U* array);
- void reset(int disallow_reset_from_null);
-
- // Forbid comparison of scoped_ptr types. If U != T, it totally
- // doesn't make sense, and if U == T, it still doesn't make sense
- // because you should never have the same object owned by two different
- // scoped_ptrs.
- template <class U> bool operator==(scoped_ptr<U> const& p2) const;
- template <class U> bool operator!=(scoped_ptr<U> const& p2) const;
-};
-
-template <class T, class D>
-void swap(rtc::scoped_ptr<T, D>& p1, rtc::scoped_ptr<T, D>& p2) {
- p1.swap(p2);
-}
-
-// Convert between the most common kinds of scoped_ptr and unique_ptr.
-template <typename T>
-std::unique_ptr<T> ScopedToUnique(scoped_ptr<T> sp) {
- return std::unique_ptr<T>(sp.release());
-}
-template <typename T>
-scoped_ptr<T> UniqueToScoped(std::unique_ptr<T> up) {
- return scoped_ptr<T>(up.release());
-}
-
-} // namespace rtc
-
-template <class T, class D>
-bool operator==(T* p1, const rtc::scoped_ptr<T, D>& p2) {
- return p1 == p2.get();
-}
-
-template <class T, class D>
-bool operator!=(T* p1, const rtc::scoped_ptr<T, D>& p2) {
- return p1 != p2.get();
-}
-
-// A function to convert T* into scoped_ptr<T>
-// Doing e.g. make_scoped_ptr(new FooBarBaz<type>(arg)) is a shorter notation
-// for scoped_ptr<FooBarBaz<type> >(new FooBarBaz<type>(arg))
-template <typename T>
-rtc::scoped_ptr<T> rtc_make_scoped_ptr(T* ptr) {
- return rtc::scoped_ptr<T>(ptr);
-}
-
-#endif // #ifndef WEBRTC_BASE_SCOPED_PTR_H__
diff --git a/chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc b/chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc
index 933173e3faf..929fd0f324c 100644
--- a/chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc
+++ b/chromium/third_party/webrtc/base/scopedptrcollection_unittest.cc
@@ -8,6 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scopedptrcollection.h"
#include "webrtc/base/gunit.h"
@@ -41,7 +44,7 @@ class ScopedPtrCollectionTest : public testing::Test {
}
int num_instances_;
- scoped_ptr<ScopedPtrCollection<InstanceCounter> > collection_;
+ std::unique_ptr<ScopedPtrCollection<InstanceCounter> > collection_;
};
TEST_F(ScopedPtrCollectionTest, PushBack) {
diff --git a/chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc b/chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc
index 9b64ed760af..45902af39e6 100644
--- a/chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc
+++ b/chromium/third_party/webrtc/base/sharedexclusivelock_unittest.cc
@@ -8,15 +8,26 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/common.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/messagehandler.h"
#include "webrtc/base/messagequeue.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sharedexclusivelock.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
+#if defined(MEMORY_SANITIZER)
+// Flaky under MemorySanitizer, see
+// https://bugs.chromium.org/p/webrtc/issues/detail?id=5824
+#define MAYBE_TestSharedExclusive DISABLED_TestSharedExclusive
+#define MAYBE_TestExclusiveExclusive DISABLED_TestExclusiveExclusive
+#else
+#define MAYBE_TestSharedExclusive TestSharedExclusive
+#define MAYBE_TestExclusiveExclusive TestExclusiveExclusive
+#endif
+
namespace rtc {
static const uint32_t kMsgRead = 0;
@@ -39,12 +50,12 @@ class SharedExclusiveTask : public MessageHandler {
worker_thread_->Start();
}
- int waiting_time_in_ms() const { return waiting_time_in_ms_; }
+ int64_t waiting_time_in_ms() const { return waiting_time_in_ms_; }
protected:
- scoped_ptr<Thread> worker_thread_;
+ std::unique_ptr<Thread> worker_thread_;
SharedExclusiveLock* shared_exclusive_lock_;
- int waiting_time_in_ms_;
+ int64_t waiting_time_in_ms_;
int* value_;
bool* done_;
};
@@ -68,10 +79,10 @@ class ReadTask : public SharedExclusiveTask {
TypedMessageData<int*>* message_data =
static_cast<TypedMessageData<int*>*>(message->pdata);
- uint32_t start_time = Time();
+ int64_t start_time = TimeMillis();
{
SharedScope ss(shared_exclusive_lock_);
- waiting_time_in_ms_ = TimeDiff(Time(), start_time);
+ waiting_time_in_ms_ = TimeDiff(TimeMillis(), start_time);
Thread::SleepMs(kProcessTimeInMs);
*message_data->data() = *value_;
@@ -101,10 +112,10 @@ class WriteTask : public SharedExclusiveTask {
TypedMessageData<int>* message_data =
static_cast<TypedMessageData<int>*>(message->pdata);
- uint32_t start_time = Time();
+ int64_t start_time = TimeMillis();
{
ExclusiveScope es(shared_exclusive_lock_);
- waiting_time_in_ms_ = TimeDiff(Time(), start_time);
+ waiting_time_in_ms_ = TimeDiff(TimeMillis(), start_time);
Thread::SleepMs(kProcessTimeInMs);
*value_ = message_data->data();
@@ -127,7 +138,7 @@ class SharedExclusiveLockTest
}
protected:
- scoped_ptr<SharedExclusiveLock> shared_exclusive_lock_;
+ std::unique_ptr<SharedExclusiveLock> shared_exclusive_lock_;
int value_;
};
@@ -157,7 +168,7 @@ TEST_F(SharedExclusiveLockTest, TestSharedShared) {
EXPECT_LE(reader1.waiting_time_in_ms(), kNoWaitThresholdInMs);
}
-TEST_F(SharedExclusiveLockTest, TestSharedExclusive) {
+TEST_F(SharedExclusiveLockTest, MAYBE_TestSharedExclusive) {
bool done;
WriteTask writer(shared_exclusive_lock_.get(), &value_, &done);
@@ -196,7 +207,7 @@ TEST_F(SharedExclusiveLockTest, TestExclusiveShared) {
EXPECT_GE(reader.waiting_time_in_ms(), kWaitThresholdInMs);
}
-TEST_F(SharedExclusiveLockTest, TestExclusiveExclusive) {
+TEST_F(SharedExclusiveLockTest, MAYBE_TestExclusiveExclusive) {
bool done;
WriteTask writer(shared_exclusive_lock_.get(), &value_, &done);
diff --git a/chromium/third_party/webrtc/base/signalthread_unittest.cc b/chromium/third_party/webrtc/base/signalthread_unittest.cc
index a583aefcb5c..57190128c53 100644
--- a/chromium/third_party/webrtc/base/signalthread_unittest.cc
+++ b/chromium/third_party/webrtc/base/signalthread_unittest.cc
@@ -8,6 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/signalthread.h"
#include "webrtc/base/thread.h"
@@ -135,7 +138,7 @@ class OwnerThread : public Thread, public sigslot::has_slots<> {
// when shutting down the process.
TEST_F(SignalThreadTest, OwnerThreadGoesAway) {
{
- scoped_ptr<OwnerThread> owner(new OwnerThread(this));
+ std::unique_ptr<OwnerThread> owner(new OwnerThread(this));
main_thread_ = owner.get();
owner->Start();
while (!owner->has_run()) {
diff --git a/chromium/third_party/webrtc/base/socket_unittest.cc b/chromium/third_party/webrtc/base/socket_unittest.cc
index d1369e2f78c..5f6de42a2f1 100644
--- a/chromium/third_party/webrtc/base/socket_unittest.cc
+++ b/chromium/third_party/webrtc/base/socket_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/socket_unittest.h"
#include "webrtc/base/arraysize.h"
@@ -198,14 +200,14 @@ void SocketTest::ConnectInternal(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create client.
- scoped_ptr<AsyncSocket> client(ss_->CreateAsyncSocket(loopback.family(),
- SOCK_STREAM));
+ std::unique_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
EXPECT_EQ(AsyncSocket::CS_CLOSED, client->GetState());
EXPECT_PRED1(IsUnspecOrEmptyIP, client->GetLocalAddress().ipaddr());
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -229,7 +231,7 @@ void SocketTest::ConnectInternal(const IPAddress& loopback) {
// Server has pending connection, accept it.
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(accepted);
EXPECT_FALSE(accept_addr.IsNil());
EXPECT_EQ(accepted->GetRemoteAddress(), accept_addr);
@@ -253,12 +255,12 @@ void SocketTest::ConnectWithDnsLookupInternal(const IPAddress& loopback,
SocketAddress accept_addr;
// Create client.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -278,7 +280,7 @@ void SocketTest::ConnectWithDnsLookupInternal(const IPAddress& loopback,
// Server has pending connection, accept it.
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(accepted);
EXPECT_FALSE(accept_addr.IsNil());
EXPECT_EQ(accepted->GetRemoteAddress(), accept_addr);
@@ -301,12 +303,12 @@ void SocketTest::ConnectFailInternal(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create client.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
// Create server, but don't listen yet.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -334,12 +336,12 @@ void SocketTest::ConnectWithDnsLookupFailInternal(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create client.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
// Create server, but don't listen yet.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -372,13 +374,13 @@ void SocketTest::ConnectWithDnsLookupFailInternal(const IPAddress& loopback) {
void SocketTest::ConnectWithClosedSocketInternal(const IPAddress& loopback) {
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
EXPECT_EQ(0, server->Listen(5));
// Create a client and put in to CS_CLOSED state.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
EXPECT_EQ(0, client->Close());
EXPECT_EQ(AsyncSocket::CS_CLOSED, client->GetState());
@@ -391,13 +393,13 @@ void SocketTest::ConnectWithClosedSocketInternal(const IPAddress& loopback) {
void SocketTest::ConnectWhileNotClosedInternal(const IPAddress& loopback) {
// Create server and listen.
testing::StreamSink sink;
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
EXPECT_EQ(0, server->Listen(5));
// Create client, connect.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
EXPECT_EQ(0, client->Connect(SocketAddress(server->GetLocalAddress())));
EXPECT_EQ(AsyncSocket::CS_CONNECTING, client->GetState());
@@ -408,7 +410,7 @@ void SocketTest::ConnectWhileNotClosedInternal(const IPAddress& loopback) {
// Accept the original connection.
SocketAddress accept_addr;
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(accepted);
EXPECT_FALSE(accept_addr.IsNil());
@@ -435,12 +437,12 @@ void SocketTest::ServerCloseDuringConnectInternal(const IPAddress& loopback) {
testing::StreamSink sink;
// Create client.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -464,12 +466,12 @@ void SocketTest::ClientCloseDuringConnectInternal(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create client.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -483,7 +485,7 @@ void SocketTest::ClientCloseDuringConnectInternal(const IPAddress& loopback) {
client->Close();
// The connection should still be able to be accepted.
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(accepted);
sink.Monitor(accepted.get());
EXPECT_EQ(AsyncSocket::CS_CONNECTED, accepted->GetState());
@@ -502,12 +504,12 @@ void SocketTest::ServerCloseInternal(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create client.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -518,7 +520,7 @@ void SocketTest::ServerCloseInternal(const IPAddress& loopback) {
// Accept connection.
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(accepted);
sink.Monitor(accepted.get());
@@ -576,13 +578,13 @@ void SocketTest::CloseInClosedCallbackInternal(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create client.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
client->SignalCloseEvent.connect(&closer, &SocketCloser::OnClose);
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -593,7 +595,7 @@ void SocketTest::CloseInClosedCallbackInternal(const IPAddress& loopback) {
// Accept connection.
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(accepted);
sink.Monitor(accepted.get());
@@ -630,9 +632,9 @@ void SocketTest::SocketServerWaitInternal(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create & connect server and client sockets.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
sink.Monitor(server.get());
@@ -642,7 +644,7 @@ void SocketTest::SocketServerWaitInternal(const IPAddress& loopback) {
EXPECT_EQ(0, client->Connect(server->GetLocalAddress()));
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(accepted);
sink.Monitor(accepted.get());
EXPECT_EQ(AsyncSocket::CS_CONNECTED, accepted->GetState());
@@ -663,7 +665,7 @@ void SocketTest::SocketServerWaitInternal(const IPAddress& loopback) {
EXPECT_FALSE(sink.Check(accepted.get(), testing::SSE_READ));
// Shouldn't signal when blocked in a thread Send, where process_io is false.
- scoped_ptr<Thread> thread(new Thread());
+ std::unique_ptr<Thread> thread(new Thread());
thread->Start();
Sleeper sleeper;
TypedMessageData<AsyncSocket*> data(client.get());
@@ -681,12 +683,12 @@ void SocketTest::TcpInternal(const IPAddress& loopback, size_t data_size,
SocketAddress accept_addr;
// Create receiving client.
- scoped_ptr<AsyncSocket> receiver(
+ std::unique_ptr<AsyncSocket> receiver(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(receiver.get());
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -697,7 +699,7 @@ void SocketTest::TcpInternal(const IPAddress& loopback, size_t data_size,
// Accept connection which will be used for sending.
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
- scoped_ptr<AsyncSocket> sender(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> sender(server->Accept(&accept_addr));
ASSERT_TRUE(sender);
sink.Monitor(sender.get());
@@ -809,12 +811,12 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) {
SocketAddress accept_addr;
// Create client.
- scoped_ptr<AsyncSocket> client(
+ std::unique_ptr<AsyncSocket> client(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(client.get());
// Create server and listen.
- scoped_ptr<AsyncSocket> server(
+ std::unique_ptr<AsyncSocket> server(
ss_->CreateAsyncSocket(loopback.family(), SOCK_STREAM));
sink.Monitor(server.get());
EXPECT_EQ(0, server->Bind(SocketAddress(loopback, 0)));
@@ -825,7 +827,7 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) {
// Accept connection.
EXPECT_TRUE_WAIT((sink.Check(server.get(), testing::SSE_READ)), kTimeout);
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(accepted);
sink.Monitor(accepted.get());
@@ -887,9 +889,9 @@ void SocketTest::UdpInternal(const IPAddress& loopback) {
delete socket;
// Test send/receive behavior.
- scoped_ptr<TestClient> client1(
+ std::unique_ptr<TestClient> client1(
new TestClient(AsyncUDPSocket::Create(ss_, addr1)));
- scoped_ptr<TestClient> client2(
+ std::unique_ptr<TestClient> client2(
new TestClient(AsyncUDPSocket::Create(ss_, empty)));
SocketAddress addr2;
@@ -928,10 +930,10 @@ void SocketTest::UdpReadyToSend(const IPAddress& loopback) {
SocketAddress test_addr(dest, 2345);
// Test send
- scoped_ptr<TestClient> client(
+ std::unique_ptr<TestClient> client(
new TestClient(AsyncUDPSocket::Create(ss_, empty)));
int test_packet_size = 1200;
- rtc::scoped_ptr<char[]> test_packet(new char[test_packet_size]);
+ std::unique_ptr<char[]> test_packet(new char[test_packet_size]);
// Init the test packet just to avoid memcheck warning.
memset(test_packet.get(), 0, test_packet_size);
// Set the send buffer size to the same size as the test packet to have a
@@ -965,7 +967,7 @@ void SocketTest::UdpReadyToSend(const IPAddress& loopback) {
}
void SocketTest::GetSetOptionsInternal(const IPAddress& loopback) {
- rtc::scoped_ptr<AsyncSocket> socket(
+ std::unique_ptr<AsyncSocket> socket(
ss_->CreateAsyncSocket(loopback.family(), SOCK_DGRAM));
socket->Bind(SocketAddress(loopback, 0));
@@ -1000,9 +1002,8 @@ void SocketTest::GetSetOptionsInternal(const IPAddress& loopback) {
// Skip the esimate MTU test for IPv6 for now.
if (loopback.family() != AF_INET6) {
// Try estimating MTU.
- rtc::scoped_ptr<AsyncSocket>
- mtu_socket(
- ss_->CreateAsyncSocket(loopback.family(), SOCK_DGRAM));
+ std::unique_ptr<AsyncSocket> mtu_socket(
+ ss_->CreateAsyncSocket(loopback.family(), SOCK_DGRAM));
mtu_socket->Bind(SocketAddress(loopback, 0));
uint16_t mtu;
// should fail until we connect
diff --git a/chromium/third_party/webrtc/base/socketadapters.h b/chromium/third_party/webrtc/base/socketadapters.h
index ae0ed9ac392..970a3b5eb6c 100644
--- a/chromium/third_party/webrtc/base/socketadapters.h
+++ b/chromium/third_party/webrtc/base/socketadapters.h
@@ -15,6 +15,7 @@
#include <string>
#include "webrtc/base/asyncsocket.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/cryptstring.h"
#include "webrtc/base/logging.h"
diff --git a/chromium/third_party/webrtc/base/socketserver.h b/chromium/third_party/webrtc/base/socketserver.h
index 1b4136d74e3..7071f225268 100644
--- a/chromium/third_party/webrtc/base/socketserver.h
+++ b/chromium/third_party/webrtc/base/socketserver.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_BASE_SOCKETSERVER_H_
#define WEBRTC_BASE_SOCKETSERVER_H_
+#include <memory>
#include "webrtc/base/socketfactory.h"
namespace rtc {
@@ -27,6 +28,7 @@ class SocketServer : public SocketFactory {
public:
static const int kForever = -1;
+ static std::unique_ptr<SocketServer> CreateDefault();
// When the socket server is installed into a Thread, this function is
// called to allow the socket server to use the thread's message queue for
// any messaging that it might need to perform.
diff --git a/chromium/third_party/webrtc/base/socketstream.h b/chromium/third_party/webrtc/base/socketstream.h
index fd8b559007b..c64656345d2 100644
--- a/chromium/third_party/webrtc/base/socketstream.h
+++ b/chromium/third_party/webrtc/base/socketstream.h
@@ -13,6 +13,7 @@
#include "webrtc/base/asyncsocket.h"
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/stream.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/ssladapter_unittest.cc b/chromium/third_party/webrtc/base/ssladapter_unittest.cc
index 7869b6eb63b..16e5c2ea322 100644
--- a/chromium/third_party/webrtc/base/ssladapter_unittest.cc
+++ b/chromium/third_party/webrtc/base/ssladapter_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/base/gunit.h"
@@ -123,7 +124,7 @@ class SSLAdapterTestDummyClient : public sigslot::has_slots<> {
private:
const rtc::SSLMode ssl_mode_;
- rtc::scoped_ptr<rtc::SSLAdapter> ssl_adapter_;
+ std::unique_ptr<rtc::SSLAdapter> ssl_adapter_;
std::string data_;
};
@@ -259,10 +260,10 @@ class SSLAdapterTestDummyServer : public sigslot::has_slots<> {
const rtc::SSLMode ssl_mode_;
- rtc::scoped_ptr<rtc::AsyncSocket> server_socket_;
- rtc::scoped_ptr<rtc::SSLStreamAdapter> ssl_stream_adapter_;
+ std::unique_ptr<rtc::AsyncSocket> server_socket_;
+ std::unique_ptr<rtc::SSLStreamAdapter> ssl_stream_adapter_;
- rtc::scoped_ptr<rtc::SSLIdentity> ssl_identity_;
+ std::unique_ptr<rtc::SSLIdentity> ssl_identity_;
std::string data_;
};
@@ -339,8 +340,8 @@ class SSLAdapterTestBase : public testing::Test,
const rtc::SocketServerScope ss_scope_;
- rtc::scoped_ptr<SSLAdapterTestDummyServer> server_;
- rtc::scoped_ptr<SSLAdapterTestDummyClient> client_;
+ std::unique_ptr<SSLAdapterTestDummyServer> server_;
+ std::unique_ptr<SSLAdapterTestDummyClient> client_;
int handshake_wait_;
};
diff --git a/chromium/third_party/webrtc/base/sslfingerprint.cc b/chromium/third_party/webrtc/base/sslfingerprint.cc
index 1939b4fd0bd..2c3e1e974be 100644
--- a/chromium/third_party/webrtc/base/sslfingerprint.cc
+++ b/chromium/third_party/webrtc/base/sslfingerprint.cc
@@ -85,7 +85,7 @@ std::string SSLFingerprint::GetRfc4572Fingerprint() const {
return fingerprint;
}
-std::string SSLFingerprint::ToString() {
+std::string SSLFingerprint::ToString() const {
std::string fp_str = algorithm;
fp_str.append(" ");
fp_str.append(GetRfc4572Fingerprint());
diff --git a/chromium/third_party/webrtc/base/sslfingerprint.h b/chromium/third_party/webrtc/base/sslfingerprint.h
index 1413a4cd272..4ffb2b0524e 100644
--- a/chromium/third_party/webrtc/base/sslfingerprint.h
+++ b/chromium/third_party/webrtc/base/sslfingerprint.h
@@ -41,7 +41,7 @@ struct SSLFingerprint {
std::string GetRfc4572Fingerprint() const;
- std::string ToString();
+ std::string ToString() const;
std::string algorithm;
rtc::CopyOnWriteBuffer digest;
diff --git a/chromium/third_party/webrtc/base/sslidentity.cc b/chromium/third_party/webrtc/base/sslidentity.cc
index 5fa8bbf6b43..5f3a73fad17 100644
--- a/chromium/third_party/webrtc/base/sslidentity.cc
+++ b/chromium/third_party/webrtc/base/sslidentity.cc
@@ -187,6 +187,14 @@ SSLIdentity* SSLIdentity::FromPEMStrings(const std::string& private_key,
return OpenSSLIdentity::FromPEMStrings(private_key, certificate);
}
+bool operator==(const SSLIdentity& a, const SSLIdentity& b) {
+ return static_cast<const OpenSSLIdentity&>(a) ==
+ static_cast<const OpenSSLIdentity&>(b);
+}
+bool operator!=(const SSLIdentity& a, const SSLIdentity& b) {
+ return !(a == b);
+}
+
#else // !SSL_USE_OPENSSL
#error "No SSL implementation"
diff --git a/chromium/third_party/webrtc/base/sslidentity.h b/chromium/third_party/webrtc/base/sslidentity.h
index 77c9e186c1c..8e7e7861308 100644
--- a/chromium/third_party/webrtc/base/sslidentity.h
+++ b/chromium/third_party/webrtc/base/sslidentity.h
@@ -14,12 +14,13 @@
#define WEBRTC_BASE_SSLIDENTITY_H_
#include <algorithm>
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/messagedigest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/timeutils.h"
namespace rtc {
@@ -53,7 +54,7 @@ class SSLCertificate {
// Provides the cert chain, or null. The chain includes a copy of each
// certificate, excluding the leaf.
- virtual rtc::scoped_ptr<SSLCertChain> GetChain() const = 0;
+ virtual std::unique_ptr<SSLCertChain> GetChain() const = 0;
// Returns a PEM encoded string representation of the certificate.
virtual std::string ToPEMString() const = 0;
@@ -113,13 +114,23 @@ class SSLCertChain {
RTC_DISALLOW_COPY_AND_ASSIGN(SSLCertChain);
};
-// KT_DEFAULT is currently an alias for KT_RSA. This is likely to change.
// KT_LAST is intended for vector declarations and loops over all key types;
// it does not represent any key type in itself.
-// TODO(hbos,torbjorng): Don't change KT_DEFAULT without first updating
-// PeerConnectionFactory_nativeCreatePeerConnection's certificate generation
-// code.
-enum KeyType { KT_RSA, KT_ECDSA, KT_LAST, KT_DEFAULT = KT_RSA };
+// KT_DEFAULT is used as the default KeyType for KeyParams.
+enum KeyType {
+ KT_RSA, KT_ECDSA, KT_LAST,
+#if defined(WEBRTC_CHROMIUM_BUILD)
+ // TODO(hbos): Because of an experiment running in Chromium which relies on
+ // RSA being the default (for performance reasons) we have this #if. ECDSA
+ // launches in Chromium by flipping a flag which overrides the default. As
+ // soon as the experiment has ended and there is no risk of RSA being the
+ // default we should make KT_DEFAULT = KT_ECDSA unconditionally.
+ // crbug.com/611698
+ KT_DEFAULT = KT_RSA
+#else
+ KT_DEFAULT = KT_ECDSA
+#endif
+};
static const int kRsaDefaultModSize = 1024;
static const int kRsaDefaultExponent = 0x10001; // = 2^16+1 = 65537
@@ -226,6 +237,8 @@ class SSLIdentity {
// Returns a temporary reference to the certificate.
virtual const SSLCertificate& certificate() const = 0;
+ virtual std::string PrivateKeyToPEMString() const = 0;
+ virtual std::string PublicKeyToPEMString() const = 0;
// Helpers for parsing converting between PEM and DER format.
static bool PemToDer(const std::string& pem_type,
@@ -236,6 +249,9 @@ class SSLIdentity {
size_t length);
};
+bool operator==(const SSLIdentity& a, const SSLIdentity& b);
+bool operator!=(const SSLIdentity& a, const SSLIdentity& b);
+
// Convert from ASN1 time as restricted by RFC 5280 to seconds from 1970-01-01
// 00.00 ("epoch"). If the ASN1 time cannot be read, return -1. The data at
// |s| is not 0-terminated; its char count is defined by |length|.
diff --git a/chromium/third_party/webrtc/base/sslidentity_unittest.cc b/chromium/third_party/webrtc/base/sslidentity_unittest.cc
index f110f76861e..399fe9d4745 100644
--- a/chromium/third_party/webrtc/base/sslidentity_unittest.cc
+++ b/chromium/third_party/webrtc/base/sslidentity_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/base/gunit.h"
@@ -173,12 +174,43 @@ class SSLIdentityTest : public testing::Test {
EXPECT_EQ(0, memcmp(digest, expected_digest, expected_len));
}
- private:
- rtc::scoped_ptr<SSLIdentity> identity_rsa1_;
- rtc::scoped_ptr<SSLIdentity> identity_rsa2_;
- rtc::scoped_ptr<SSLIdentity> identity_ecdsa1_;
- rtc::scoped_ptr<SSLIdentity> identity_ecdsa2_;
- rtc::scoped_ptr<rtc::SSLCertificate> test_cert_;
+ void TestCloningIdentity(const SSLIdentity& identity) {
+ // Convert |identity| to PEM strings and create a new identity by converting
+ // back from the string format.
+ std::string priv_pem = identity.PrivateKeyToPEMString();
+ std::string publ_pem = identity.PublicKeyToPEMString();
+ std::string cert_pem = identity.certificate().ToPEMString();
+ std::unique_ptr<SSLIdentity> clone(
+ SSLIdentity::FromPEMStrings(priv_pem, cert_pem));
+ EXPECT_TRUE(clone);
+
+ // Make sure the clone is identical to the original.
+ EXPECT_TRUE(identity == *clone);
+ ASSERT_EQ(identity.certificate().CertificateExpirationTime(),
+ clone->certificate().CertificateExpirationTime());
+
+ // At this point we are confident that the identities are identical. To be
+ // extra sure, we compare PEM strings of the clone with the original. Note
+ // that the PEM strings of two identities are not strictly guaranteed to be
+ // equal (they describe structs whose members could be listed in a different
+ // order, for example). But because the same function is used to produce
+ // both PEMs, its a good enough bet that this comparison will work. If the
+ // assumption stops holding in the future we can always remove this from the
+ // unittest.
+ std::string clone_priv_pem = clone->PrivateKeyToPEMString();
+ std::string clone_publ_pem = clone->PublicKeyToPEMString();
+ std::string clone_cert_pem = clone->certificate().ToPEMString();
+ ASSERT_EQ(priv_pem, clone_priv_pem);
+ ASSERT_EQ(publ_pem, clone_publ_pem);
+ ASSERT_EQ(cert_pem, clone_cert_pem);
+ }
+
+ protected:
+ std::unique_ptr<SSLIdentity> identity_rsa1_;
+ std::unique_ptr<SSLIdentity> identity_rsa2_;
+ std::unique_ptr<SSLIdentity> identity_ecdsa1_;
+ std::unique_ptr<SSLIdentity> identity_ecdsa2_;
+ std::unique_ptr<rtc::SSLCertificate> test_cert_;
};
TEST_F(SSLIdentityTest, FixedDigestSHA1) {
@@ -219,71 +251,118 @@ TEST_F(SSLIdentityTest, DigestSHA512) {
TestDigestForGeneratedCert(rtc::DIGEST_SHA_512, 64);
}
+TEST_F(SSLIdentityTest, IdentityComparison) {
+ EXPECT_TRUE(*identity_rsa1_ == *identity_rsa1_);
+ EXPECT_FALSE(*identity_rsa1_ == *identity_rsa2_);
+ EXPECT_FALSE(*identity_rsa1_ == *identity_ecdsa1_);
+ EXPECT_FALSE(*identity_rsa1_ == *identity_ecdsa2_);
+
+ EXPECT_TRUE(*identity_rsa2_ == *identity_rsa2_);
+ EXPECT_FALSE(*identity_rsa2_ == *identity_ecdsa1_);
+ EXPECT_FALSE(*identity_rsa2_ == *identity_ecdsa2_);
+
+ EXPECT_TRUE(*identity_ecdsa1_ == *identity_ecdsa1_);
+ EXPECT_FALSE(*identity_ecdsa1_ == *identity_ecdsa2_);
+}
+
TEST_F(SSLIdentityTest, FromPEMStringsRSA) {
+ // These PEM strings were created by generating an identity with
+ // |SSLIdentity::Generate| and invoking |identity->PrivateKeyToPEMString()|,
+ // |identity->PublicKeyToPEMString()| and
+ // |identity->certificate().ToPEMString()|. If the crypto library is updated,
+ // and the update changes the string form of the keys, these will have to be
+ // updated too.
static const char kRSA_PRIVATE_KEY_PEM[] =
- "-----BEGIN RSA PRIVATE KEY-----\n"
- "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n"
- "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
- "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
- "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n"
- "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n"
- "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n"
- "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n"
- "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n"
- "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n"
- "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n"
- "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n"
- "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n"
- "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n"
- "UCXiYxSsu20QNVw=\n"
- "-----END RSA PRIVATE KEY-----\n";
-
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBAMQPqDStRlYeDpkX\n"
+ "erRmv+a1naM8vSVSY0gG2plnrnofViWRW3MRqWC+020MsIj3hPZeSAnt/y/FL/nr\n"
+ "4Ea7NXcwdRo1/1xEK7U/f/cjSg1aunyvHCHwcFcMr31HLFvHr0ZgcFwbgIuFLNEl\n"
+ "7kK5HMO9APz1ntUjek8BmBj8yMl9AgMBAAECgYA8FWBC5GcNtSBcIinkZyigF0A7\n"
+ "6j081sa+J/uNz4xUuI257ZXM6biygUhhvuXK06/XoIULJfhyN0fAm1yb0HtNhiUs\n"
+ "kMOYeon6b8FqFaPjrQf7Gr9FMiIHXNK19uegTMKztXyPZoUWlX84X0iawY95x0Y3\n"
+ "73f6P2rN2UOjlVVjAQJBAOKy3l2w3Zj2w0oAJox0eMwl+RxBNt1C42SHrob2mFUT\n"
+ "rytpVVYOasr8CoDI0kjacjI94sLum+buJoXXX6YTGO0CQQDdZwlYIEkoS3ftfxPa\n"
+ "Ai0YTBzAWvHJg0r8Gk/TkHo6IM+LSsZ9ZYUv/vBe4BKLw1I4hZ+bQvBiq+f8ROtk\n"
+ "+TDRAkAPL3ghwoU1h+IRBO2QHwUwd6K2N9AbBi4BP+168O3HVSg4ujeTKigRLMzv\n"
+ "T4R2iNt5bhfQgvdCgtVlxcWMdF8JAkBwDCg3eEdt5BuyjwBt8XH+/O4ED0KUWCTH\n"
+ "x00k5dZlupsuhE5Fwe4QpzXg3gekwdnHjyCCQ/NCDHvgOMTkmhQxAkA9V03KRX9b\n"
+ "bhvEzY/fu8gEp+EzsER96/D79az5z1BaMGL5OPM2xHBPJATKlswnAa7Lp3QKGZGk\n"
+ "TxslfL18J71s\n"
+ "-----END PRIVATE KEY-----\n";
+ static const char kRSA_PUBLIC_KEY_PEM[] =
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDED6g0rUZWHg6ZF3q0Zr/mtZ2j\n"
+ "PL0lUmNIBtqZZ656H1YlkVtzEalgvtNtDLCI94T2XkgJ7f8vxS/56+BGuzV3MHUa\n"
+ "Nf9cRCu1P3/3I0oNWrp8rxwh8HBXDK99Ryxbx69GYHBcG4CLhSzRJe5CuRzDvQD8\n"
+ "9Z7VI3pPAZgY/MjJfQIDAQAB\n"
+ "-----END PUBLIC KEY-----\n";
static const char kCERT_PEM[] =
"-----BEGIN CERTIFICATE-----\n"
- "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n"
- "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n"
- "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n"
- "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n"
- "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n"
- "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n"
- "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n"
- "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n"
- "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n"
+ "MIIBnDCCAQWgAwIBAgIJAOEHLgeWYwrpMA0GCSqGSIb3DQEBCwUAMBAxDjAMBgNV\n"
+ "BAMMBXRlc3QxMB4XDTE2MDQyNDE4MTAyMloXDTE2MDUyNTE4MTAyMlowEDEOMAwG\n"
+ "A1UEAwwFdGVzdDEwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMQPqDStRlYe\n"
+ "DpkXerRmv+a1naM8vSVSY0gG2plnrnofViWRW3MRqWC+020MsIj3hPZeSAnt/y/F\n"
+ "L/nr4Ea7NXcwdRo1/1xEK7U/f/cjSg1aunyvHCHwcFcMr31HLFvHr0ZgcFwbgIuF\n"
+ "LNEl7kK5HMO9APz1ntUjek8BmBj8yMl9AgMBAAEwDQYJKoZIhvcNAQELBQADgYEA\n"
+ "C3ehaZFl+oEYN069C2ht/gMzuC77L854RF/x7xRtNZzkcg9TVgXXdM3auUvJi8dx\n"
+ "yTpU3ixErjQvoZew5ngXTEvTY8BSQUijJEaLWh8n6NDKRbEGTdAk8nPAmq9hdCFq\n"
+ "e3UkexqNHm3g/VxG4NUC1Y+w29ai0/Rgh+VvgbDwK+Q=\n"
"-----END CERTIFICATE-----\n";
- rtc::scoped_ptr<SSLIdentity> identity(
+ std::unique_ptr<SSLIdentity> identity(
SSLIdentity::FromPEMStrings(kRSA_PRIVATE_KEY_PEM, kCERT_PEM));
EXPECT_TRUE(identity);
+ EXPECT_EQ(kRSA_PRIVATE_KEY_PEM, identity->PrivateKeyToPEMString());
+ EXPECT_EQ(kRSA_PUBLIC_KEY_PEM, identity->PublicKeyToPEMString());
EXPECT_EQ(kCERT_PEM, identity->certificate().ToPEMString());
}
TEST_F(SSLIdentityTest, FromPEMStringsEC) {
- static const char kRSA_PRIVATE_KEY_PEM[] =
- "-----BEGIN EC PRIVATE KEY-----\n"
- "MHcCAQEEIKkIztWLPbs4Y2zWv7VW2Ov4is2ifleCuPgRB8fRv3IkoAoGCCqGSM49\n"
- "AwEHoUQDQgAEDPV33NrhSdhg9cBRkUWUXnVMXc3h17i9ARbSmNgminKcBXb8/y8L\n"
- "A76cMWQPPM0ybHO8OS7ZVg2U/m+TwE1M2g==\n"
- "-----END EC PRIVATE KEY-----\n";
+ // These PEM strings were created by generating an identity with
+ // |SSLIdentity::Generate| and invoking |identity->PrivateKeyToPEMString()|,
+ // |identity->PublicKeyToPEMString()| and
+ // |identity->certificate().ToPEMString()|. If the crypto library is updated,
+ // and the update changes the string form of the keys, these will have to be
+ // updated too.
+ static const char kECDSA_PRIVATE_KEY_PEM[] =
+ "-----BEGIN PRIVATE KEY-----\n"
+ "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg/AkEA2hklq7dQ2rN\n"
+ "ZxYL6hOUACL4pn7P4FYlA3ZQhIChRANCAAR7YgdO3utP/8IqVRq8G4VZKreMAxeN\n"
+ "rUa12twthv4uFjuHAHa9D9oyAjncmn+xvZZRyVmKrA56jRzENcEEHoAg\n"
+ "-----END PRIVATE KEY-----\n";
+ static const char kECDSA_PUBLIC_KEY_PEM[] =
+ "-----BEGIN PUBLIC KEY-----\n"
+ "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEe2IHTt7rT//CKlUavBuFWSq3jAMX\n"
+ "ja1GtdrcLYb+LhY7hwB2vQ/aMgI53Jp/sb2WUclZiqwOeo0cxDXBBB6AIA==\n"
+ "-----END PUBLIC KEY-----\n";
static const char kCERT_PEM[] =
"-----BEGIN CERTIFICATE-----\n"
- "MIIB0jCCAXmgAwIBAgIJAMCjpFt9t6LMMAoGCCqGSM49BAMCMEUxCzAJBgNVBAYT\n"
- "AkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRn\n"
- "aXRzIFB0eSBMdGQwIBcNMTUwNjMwMTMwMTIyWhgPMjI4OTA0MTMxMzAxMjJaMEUx\n"
- "CzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRl\n"
- "cm5ldCBXaWRnaXRzIFB0eSBMdGQwWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAAQM\n"
- "9Xfc2uFJ2GD1wFGRRZRedUxdzeHXuL0BFtKY2CaKcpwFdvz/LwsDvpwxZA88zTJs\n"
- "c7w5LtlWDZT+b5PATUzao1AwTjAdBgNVHQ4EFgQUYHq6nxNNIE832ZmaHc/noODO\n"
- "rtAwHwYDVR0jBBgwFoAUYHq6nxNNIE832ZmaHc/noODOrtAwDAYDVR0TBAUwAwEB\n"
- "/zAKBggqhkjOPQQDAgNHADBEAiAQRojsTyZG0BlKoU7gOt5h+yAMLl2cxmDtOIQr\n"
- "GWP/PwIgJynB4AUDsPT0DWmethOXYijB5sY5UPd9DvgmiS/Mr6s=\n"
+ "MIIBFDCBu6ADAgECAgkArpkxjw62sW4wCgYIKoZIzj0EAwIwEDEOMAwGA1UEAwwF\n"
+ "dGVzdDMwHhcNMTYwNDI0MTgxNDM4WhcNMTYwNTI1MTgxNDM4WjAQMQ4wDAYDVQQD\n"
+ "DAV0ZXN0MzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABHtiB07e60//wipVGrwb\n"
+ "hVkqt4wDF42tRrXa3C2G/i4WO4cAdr0P2jICOdyaf7G9llHJWYqsDnqNHMQ1wQQe\n"
+ "gCAwCgYIKoZIzj0EAwIDSAAwRQIhANyreQ/K5yuPPpirsd0e/4WGLHou6bIOSQks\n"
+ "DYzo56NmAiAKOr3u8ol3LmygbUCwEvtWrS8QcJDygxHPACo99hkekw==\n"
"-----END CERTIFICATE-----\n";
- rtc::scoped_ptr<SSLIdentity> identity(
- SSLIdentity::FromPEMStrings(kRSA_PRIVATE_KEY_PEM, kCERT_PEM));
+ std::unique_ptr<SSLIdentity> identity(
+ SSLIdentity::FromPEMStrings(kECDSA_PRIVATE_KEY_PEM, kCERT_PEM));
EXPECT_TRUE(identity);
+ EXPECT_EQ(kECDSA_PRIVATE_KEY_PEM, identity->PrivateKeyToPEMString());
+ EXPECT_EQ(kECDSA_PUBLIC_KEY_PEM, identity->PublicKeyToPEMString());
EXPECT_EQ(kCERT_PEM, identity->certificate().ToPEMString());
}
+TEST_F(SSLIdentityTest, CloneIdentityRSA) {
+ TestCloningIdentity(*identity_rsa1_);
+ TestCloningIdentity(*identity_rsa2_);
+}
+
+TEST_F(SSLIdentityTest, CloneIdentityECDSA) {
+ TestCloningIdentity(*identity_ecdsa1_);
+ TestCloningIdentity(*identity_ecdsa2_);
+}
+
TEST_F(SSLIdentityTest, PemDerConversion) {
std::string der;
EXPECT_TRUE(SSLIdentity::PemToDer("CERTIFICATE", kTestCertificate, &der));
diff --git a/chromium/third_party/webrtc/base/sslsocketfactory.cc b/chromium/third_party/webrtc/base/sslsocketfactory.cc
index d6ec56fd406..7ab58fd04c6 100644
--- a/chromium/third_party/webrtc/base/sslsocketfactory.cc
+++ b/chromium/third_party/webrtc/base/sslsocketfactory.cc
@@ -8,10 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/autodetectproxy.h"
#include "webrtc/base/httpcommon.h"
#include "webrtc/base/httpcommon-inl.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketadapters.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/sslsocketfactory.h"
@@ -167,7 +168,7 @@ AsyncSocket* SslSocketFactory::CreateProxySocket(const ProxyInfo& proxy,
}
if (!hostname_.empty()) {
- rtc::scoped_ptr<SSLAdapter> ssl_adapter(SSLAdapter::Create(socket));
+ std::unique_ptr<SSLAdapter> ssl_adapter(SSLAdapter::Create(socket));
if (!ssl_adapter) {
LOG_F(LS_ERROR) << "SSL unavailable";
delete socket;
diff --git a/chromium/third_party/webrtc/base/sslstreamadapter.h b/chromium/third_party/webrtc/base/sslstreamadapter.h
index f6f0befa05a..c5045f184fb 100644
--- a/chromium/third_party/webrtc/base/sslstreamadapter.h
+++ b/chromium/third_party/webrtc/base/sslstreamadapter.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_BASE_SSLSTREAMADAPTER_H_
#define WEBRTC_BASE_SSLSTREAMADAPTER_H_
+#include <memory>
#include <string>
#include <vector>
@@ -24,8 +25,12 @@ const int TLS_NULL_WITH_NULL_NULL = 0;
// Constants for SRTP profiles.
const int SRTP_INVALID_CRYPTO_SUITE = 0;
+#ifndef SRTP_AES128_CM_SHA1_80
const int SRTP_AES128_CM_SHA1_80 = 0x0001;
+#endif
+#ifndef SRTP_AES128_CM_SHA1_32
const int SRTP_AES128_CM_SHA1_32 = 0x0002;
+#endif
// Cipher suite to use for SRTP. Typically a 80-bit HMAC will be used, except
// in applications (voice) where the additional bandwidth may be significant.
@@ -155,7 +160,7 @@ class SSLStreamAdapter : public StreamAdapterInterface {
// Retrieves the peer's X.509 certificate, if a connection has been
// established. It returns the transmitted over SSL, including the entire
// chain.
- virtual rtc::scoped_ptr<SSLCertificate> GetPeerCertificate() const = 0;
+ virtual std::unique_ptr<SSLCertificate> GetPeerCertificate() const = 0;
// Retrieves the IANA registration id of the cipher suite used for the
// connection (e.g. 0x2F for "TLS_RSA_WITH_AES_128_CBC_SHA").
diff --git a/chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc b/chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc
index 8d5b275db8c..dc62ac081b7 100644
--- a/chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc
+++ b/chromium/third_party/webrtc/base/sslstreamadapter_unittest.cc
@@ -10,13 +10,13 @@
#include <algorithm>
+#include <memory>
#include <set>
#include <string>
#include "webrtc/base/bufferqueue.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/sslconfig.h"
#include "webrtc/base/sslidentity.h"
@@ -474,7 +474,7 @@ class SSLStreamAdapterTestBase : public testing::Test,
return server_ssl_->GetDtlsSrtpCryptoSuite(retval);
}
- rtc::scoped_ptr<rtc::SSLCertificate> GetPeerCertificate(bool client) {
+ std::unique_ptr<rtc::SSLCertificate> GetPeerCertificate(bool client) {
if (client)
return client_ssl_->GetPeerCertificate();
else
@@ -526,8 +526,8 @@ class SSLStreamAdapterTestBase : public testing::Test,
rtc::KeyParams server_key_type_;
SSLDummyStreamBase *client_stream_; // freed by client_ssl_ destructor
SSLDummyStreamBase *server_stream_; // freed by server_ssl_ destructor
- rtc::scoped_ptr<rtc::SSLStreamAdapter> client_ssl_;
- rtc::scoped_ptr<rtc::SSLStreamAdapter> server_ssl_;
+ std::unique_ptr<rtc::SSLStreamAdapter> client_ssl_;
+ std::unique_ptr<rtc::SSLStreamAdapter> server_ssl_;
rtc::SSLIdentity *client_identity_; // freed by client_ssl_ destructor
rtc::SSLIdentity *server_identity_; // freed by server_ssl_ destructor
int delay_;
@@ -562,7 +562,7 @@ class SSLStreamAdapterTestTLS
}
// Test data transfer for TLS
- virtual void TestTransfer(int size) {
+ void TestTransfer(int size) override {
LOG(LS_INFO) << "Starting transfer test with " << size << " bytes";
// Create some dummy data to send.
size_t received;
@@ -591,7 +591,7 @@ class SSLStreamAdapterTestTLS
recv_stream_.GetBuffer(), size));
}
- void WriteData() {
+ void WriteData() override {
size_t position, tosend, size;
rtc::StreamResult rv;
size_t sent;
@@ -627,7 +627,7 @@ class SSLStreamAdapterTestTLS
}
};
- virtual void ReadData(rtc::StreamInterface *stream) {
+ void ReadData(rtc::StreamInterface *stream) override {
char buffer[1600];
size_t bread;
int err2;
@@ -691,7 +691,7 @@ class SSLStreamAdapterTestDTLS
new SSLDummyStreamDTLS(this, "s2c", &server_buffer_, &client_buffer_);
}
- virtual void WriteData() {
+ void WriteData() override {
unsigned char *packet = new unsigned char[1600];
while (sent_ < count_) {
@@ -720,7 +720,7 @@ class SSLStreamAdapterTestDTLS
delete [] packet;
}
- virtual void ReadData(rtc::StreamInterface *stream) {
+ void ReadData(rtc::StreamInterface *stream) override {
unsigned char buffer[2000];
size_t bread;
int err2;
@@ -756,7 +756,7 @@ class SSLStreamAdapterTestDTLS
}
}
- virtual void TestTransfer(int count) {
+ void TestTransfer(int count) override {
count_ = count;
WriteData();
@@ -1043,7 +1043,7 @@ TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) {
TestHandshake();
// The client should have a peer certificate after the handshake.
- rtc::scoped_ptr<rtc::SSLCertificate> client_peer_cert =
+ std::unique_ptr<rtc::SSLCertificate> client_peer_cert =
GetPeerCertificate(true);
ASSERT_TRUE(client_peer_cert);
@@ -1055,7 +1055,7 @@ TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) {
ASSERT_FALSE(client_peer_cert->GetChain());
// The server should have a peer certificate after the handshake.
- rtc::scoped_ptr<rtc::SSLCertificate> server_peer_cert =
+ std::unique_ptr<rtc::SSLCertificate> server_peer_cert =
GetPeerCertificate(false);
ASSERT_TRUE(server_peer_cert);
diff --git a/chromium/third_party/webrtc/base/stream.h b/chromium/third_party/webrtc/base/stream.h
index e624df55044..bc949065c8f 100644
--- a/chromium/third_party/webrtc/base/stream.h
+++ b/chromium/third_party/webrtc/base/stream.h
@@ -11,15 +11,16 @@
#ifndef WEBRTC_BASE_STREAM_H_
#define WEBRTC_BASE_STREAM_H_
+#include <memory>
#include <stdio.h>
#include "webrtc/base/basictypes.h"
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/messagehandler.h"
#include "webrtc/base/messagequeue.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
namespace rtc {
@@ -334,7 +335,7 @@ class StreamTap : public StreamAdapterInterface {
int* error) override;
private:
- scoped_ptr<StreamInterface> tap_;
+ std::unique_ptr<StreamInterface> tap_;
StreamResult tap_result_;
int tap_error_;
RTC_DISALLOW_COPY_AND_ASSIGN(StreamTap);
@@ -554,7 +555,7 @@ class FifoBuffer : public StreamInterface {
// keeps the opened/closed state of the stream
StreamState state_ GUARDED_BY(crit_);
// the allocated buffer
- scoped_ptr<char[]> buffer_ GUARDED_BY(crit_);
+ std::unique_ptr<char[]> buffer_ GUARDED_BY(crit_);
// size of the allocated buffer
size_t buffer_length_ GUARDED_BY(crit_);
// amount of readable data in the buffer
diff --git a/chromium/third_party/webrtc/base/swap_queue.h b/chromium/third_party/webrtc/base/swap_queue.h
index d3af225eb17..1851309bd73 100644
--- a/chromium/third_party/webrtc/base/swap_queue.h
+++ b/chromium/third_party/webrtc/base/swap_queue.h
@@ -16,6 +16,7 @@
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/base/task.h b/chromium/third_party/webrtc/base/task.h
index 28702e49a7a..b2a60671534 100644
--- a/chromium/third_party/webrtc/base/task.h
+++ b/chromium/third_party/webrtc/base/task.h
@@ -13,7 +13,6 @@
#include <string>
#include "webrtc/base/basictypes.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/taskparent.h"
diff --git a/chromium/third_party/webrtc/base/task_queue.h b/chromium/third_party/webrtc/base/task_queue.h
new file mode 100644
index 00000000000..dad4f431b75
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task_queue.h
@@ -0,0 +1,277 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TASK_QUEUE_H_
+#define WEBRTC_BASE_TASK_QUEUE_H_
+
+#include <list>
+#include <memory>
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_BUILD_LIBEVENT)
+#include <dispatch/dispatch.h>
+#endif
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+
+#if defined(WEBRTC_WIN) || defined(WEBRTC_BUILD_LIBEVENT)
+#include "webrtc/base/platform_thread.h"
+#endif
+
+#if defined(WEBRTC_BUILD_LIBEVENT)
+struct event_base;
+struct event;
+#endif
+
+namespace rtc {
+
+// Base interface for asynchronously executed tasks.
+// The interface basically consists of a single function, Run(), that executes
+// on the target queue. For more details see the Run() method and TaskQueue.
+class QueuedTask {
+ public:
+ QueuedTask() {}
+ virtual ~QueuedTask() {}
+
+ // Main routine that will run when the task is executed on the desired queue.
+ // The task should return |true| to indicate that it should be deleted or
+ // |false| to indicate that the queue should consider ownership of the task
+ // having been transferred. Returning |false| can be useful if a task has
+ // re-posted itself to a different queue or is otherwise being re-used.
+ virtual bool Run() = 0;
+
+ private:
+ RTC_DISALLOW_COPY_AND_ASSIGN(QueuedTask);
+};
+
+// Simple implementation of QueuedTask for use with rtc::Bind and lambdas.
+template <class Closure>
+class ClosureTask : public QueuedTask {
+ public:
+ explicit ClosureTask(const Closure& closure) : closure_(closure) {}
+
+ private:
+ bool Run() override {
+ closure_();
+ return true;
+ }
+
+ Closure closure_;
+};
+
+// Extends ClosureTask to also allow specifying cleanup code.
+// This is useful when using lambdas if guaranteeing cleanup, even if a task
+// was dropped (queue is too full), is required.
+template <class Closure, class Cleanup>
+class ClosureTaskWithCleanup : public ClosureTask<Closure> {
+ public:
+ ClosureTaskWithCleanup(const Closure& closure, Cleanup cleanup)
+ : ClosureTask<Closure>(closure), cleanup_(cleanup) {}
+ ~ClosureTaskWithCleanup() { cleanup_(); }
+
+ private:
+ Cleanup cleanup_;
+};
+
+// Convenience function to construct closures that can be passed directly
+// to methods that support std::unique_ptr<QueuedTask> but not template
+// based parameters.
+template <class Closure>
+static std::unique_ptr<QueuedTask> NewClosure(const Closure& closure) {
+ return std::unique_ptr<QueuedTask>(new ClosureTask<Closure>(closure));
+}
+
+template <class Closure, class Cleanup>
+static std::unique_ptr<QueuedTask> NewClosure(const Closure& closure,
+ const Cleanup& cleanup) {
+ return std::unique_ptr<QueuedTask>(
+ new ClosureTaskWithCleanup<Closure, Cleanup>(closure, cleanup));
+}
+
+// Implements a task queue that asynchronously executes tasks in a way that
+// guarantees that they're executed in FIFO order and that tasks never overlap.
+// Tasks may always execute on the same worker thread and they may not.
+// To DCHECK that tasks are executing on a known task queue, use IsCurrent().
+//
+// Here are some usage examples:
+//
+// 1) Asynchronously running a lambda:
+//
+// class MyClass {
+// ...
+// TaskQueue queue_("MyQueue");
+// };
+//
+// void MyClass::StartWork() {
+// queue_.PostTask([]() { Work(); });
+// ...
+//
+// 2) Doing work asynchronously on a worker queue and providing a notification
+// callback on the current queue, when the work has been done:
+//
+// void MyClass::StartWorkAndLetMeKnowWhenDone(
+// std::unique_ptr<QueuedTask> callback) {
+// DCHECK(TaskQueue::Current()) << "Need to be running on a queue";
+// queue_.PostTaskAndReply([]() { Work(); }, std::move(callback));
+// }
+// ...
+// my_class->StartWorkAndLetMeKnowWhenDone(
+// NewClosure([]() { LOG(INFO) << "The work is done!";}));
+//
+// 3) Posting a custom task on a timer. The task posts itself again after
+// every running:
+//
+// class TimerTask : public QueuedTask {
+// public:
+// TimerTask() {}
+// private:
+// bool Run() override {
+// ++count_;
+// TaskQueue::Current()->PostDelayedTask(
+// std::unique_ptr<QueuedTask>(this), 1000);
+// // Ownership has been transferred to the next occurance,
+// // so return false to prevent from being deleted now.
+// return false;
+// }
+// int count_ = 0;
+// };
+// ...
+// queue_.PostDelayedTask(
+// std::unique_ptr<QueuedTask>(new TimerTask()), 1000);
+//
+// For more examples, see task_queue_unittests.cc.
+//
+// A note on destruction:
+//
+// When a TaskQueue is deleted, pending tasks will not be executed but they will
+// be deleted. The deletion of tasks may happen asynchronously after the
+// TaskQueue itself has been deleted or it may happen synchronously while the
+// TaskQueue instance is being deleted. This may vary from one OS to the next
+// so assumptions about lifetimes of pending tasks should not be made.
+class TaskQueue {
+ public:
+ explicit TaskQueue(const char* queue_name);
+ // TODO(tommi): Implement move semantics?
+ ~TaskQueue();
+
+ static TaskQueue* Current();
+
+ // Used for DCHECKing the current queue.
+ static bool IsCurrent(const char* queue_name);
+ bool IsCurrent() const;
+
+ // TODO(tommi): For better debuggability, implement FROM_HERE.
+
+ // Ownership of the task is passed to PostTask.
+ void PostTask(std::unique_ptr<QueuedTask> task);
+ void PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply,
+ TaskQueue* reply_queue);
+ void PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply);
+
+ void PostDelayedTask(std::unique_ptr<QueuedTask> task, uint32_t milliseconds);
+
+ template <class Closure>
+ void PostTask(const Closure& closure) {
+ PostTask(std::unique_ptr<QueuedTask>(new ClosureTask<Closure>(closure)));
+ }
+
+ template <class Closure>
+ void PostDelayedTask(const Closure& closure, uint32_t milliseconds) {
+ PostDelayedTask(
+ std::unique_ptr<QueuedTask>(new ClosureTask<Closure>(closure)),
+ milliseconds);
+ }
+
+ template <class Closure1, class Closure2>
+ void PostTaskAndReply(const Closure1& task,
+ const Closure2& reply,
+ TaskQueue* reply_queue) {
+ PostTaskAndReply(
+ std::unique_ptr<QueuedTask>(new ClosureTask<Closure1>(task)),
+ std::unique_ptr<QueuedTask>(new ClosureTask<Closure2>(reply)),
+ reply_queue);
+ }
+
+ template <class Closure>
+ void PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ const Closure& reply) {
+ PostTaskAndReply(std::move(task), std::unique_ptr<QueuedTask>(
+ new ClosureTask<Closure>(reply)));
+ }
+
+ template <class Closure>
+ void PostTaskAndReply(const Closure& task,
+ std::unique_ptr<QueuedTask> reply) {
+ PostTaskAndReply(
+ std::unique_ptr<QueuedTask>(new ClosureTask<Closure>(task)),
+ std::move(reply));
+ }
+
+ template <class Closure1, class Closure2>
+ void PostTaskAndReply(const Closure1& task, const Closure2& reply) {
+ PostTaskAndReply(
+ std::unique_ptr<QueuedTask>(new ClosureTask<Closure1>(task)),
+ std::unique_ptr<QueuedTask>(new ClosureTask<Closure2>(reply)));
+ }
+
+ private:
+#if defined(WEBRTC_BUILD_LIBEVENT)
+ static bool ThreadMain(void* context);
+ static void OnWakeup(int socket, short flags, void* context); // NOLINT
+ static void RunTask(int fd, short flags, void* context); // NOLINT
+ static void RunTimer(int fd, short flags, void* context); // NOLINT
+
+ class PostAndReplyTask;
+ class SetTimerTask;
+
+ void PrepareReplyTask(PostAndReplyTask* reply_task);
+ void ReplyTaskDone(PostAndReplyTask* reply_task);
+
+ struct QueueContext;
+
+ int wakeup_pipe_in_ = -1;
+ int wakeup_pipe_out_ = -1;
+ event_base* event_base_;
+ std::unique_ptr<event> wakeup_event_;
+ PlatformThread thread_;
+ rtc::CriticalSection pending_lock_;
+ std::list<std::unique_ptr<QueuedTask>> pending_ GUARDED_BY(pending_lock_);
+ std::list<PostAndReplyTask*> pending_replies_ GUARDED_BY(pending_lock_);
+#elif defined(WEBRTC_MAC)
+ struct QueueContext;
+ struct TaskContext;
+ struct PostTaskAndReplyContext;
+ dispatch_queue_t queue_;
+ QueueContext* const context_;
+#elif defined(WEBRTC_WIN)
+ static bool ThreadMain(void* context);
+
+ class WorkerThread : public PlatformThread {
+ public:
+ WorkerThread(ThreadRunFunction func, void* obj, const char* thread_name)
+ : PlatformThread(func, obj, thread_name) {}
+
+ bool QueueAPC(PAPCFUNC apc_function, ULONG_PTR data) {
+ return PlatformThread::QueueAPC(apc_function, data);
+ }
+ };
+ WorkerThread thread_;
+#else
+#error not supported.
+#endif
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(TaskQueue);
+};
+
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TASK_QUEUE_H_
diff --git a/chromium/third_party/webrtc/base/task_queue_gcd.cc b/chromium/third_party/webrtc/base/task_queue_gcd.cc
new file mode 100644
index 00000000000..2c7d649fc95
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task_queue_gcd.cc
@@ -0,0 +1,167 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains the implementation of TaskQueue for Mac and iOS.
+// The implementation uses Grand Central Dispatch queues (GCD) to
+// do the actual task queuing.
+
+#include "webrtc/base/task_queue.h"
+
+#include <string.h>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/task_queue_posix.h"
+
+namespace rtc {
+using internal::GetQueuePtrTls;
+using internal::AutoSetCurrentQueuePtr;
+
+struct TaskQueue::QueueContext {
+ explicit QueueContext(TaskQueue* q) : queue(q), is_active(true) {}
+
+ static void SetNotActive(void* context) {
+ QueueContext* qc = static_cast<QueueContext*>(context);
+ qc->is_active = false;
+ }
+
+ static void DeleteContext(void* context) {
+ QueueContext* qc = static_cast<QueueContext*>(context);
+ delete qc;
+ }
+
+ TaskQueue* const queue;
+ bool is_active;
+};
+
+struct TaskQueue::TaskContext {
+ TaskContext(QueueContext* queue_ctx, std::unique_ptr<QueuedTask> task)
+ : queue_ctx(queue_ctx), task(std::move(task)) {}
+ virtual ~TaskContext() {}
+
+ static void RunTask(void* context) {
+ std::unique_ptr<TaskContext> tc(static_cast<TaskContext*>(context));
+ if (tc->queue_ctx->is_active) {
+ AutoSetCurrentQueuePtr set_current(tc->queue_ctx->queue);
+ if (!tc->task->Run())
+ tc->task.release();
+ }
+ }
+
+ QueueContext* const queue_ctx;
+ std::unique_ptr<QueuedTask> task;
+};
+
+// Special case context for holding two tasks, a |first_task| + the task
+// that's owned by the parent struct, TaskContext, that then becomes the
+// second (i.e. 'reply') task.
+struct TaskQueue::PostTaskAndReplyContext : public TaskQueue::TaskContext {
+ explicit PostTaskAndReplyContext(QueueContext* first_queue_ctx,
+ std::unique_ptr<QueuedTask> first_task,
+ QueueContext* second_queue_ctx,
+ std::unique_ptr<QueuedTask> second_task)
+ : TaskContext(second_queue_ctx, std::move(second_task)),
+ first_queue_ctx(first_queue_ctx),
+ first_task(std::move(first_task)) {
+ // Retain the reply queue for as long as this object lives.
+ // If we don't, we may have memory leaks and/or failures.
+ dispatch_retain(first_queue_ctx->queue->queue_);
+ }
+ ~PostTaskAndReplyContext() override {
+ dispatch_release(first_queue_ctx->queue->queue_);
+ }
+
+ static void RunTask(void* context) {
+ auto* rc = static_cast<PostTaskAndReplyContext*>(context);
+ if (rc->first_queue_ctx->is_active) {
+ AutoSetCurrentQueuePtr set_current(rc->first_queue_ctx->queue);
+ if (!rc->first_task->Run())
+ rc->first_task.release();
+ }
+ // Post the reply task. This hands the work over to the parent struct.
+ // This task will eventually delete |this|.
+ dispatch_async_f(rc->queue_ctx->queue->queue_, rc, &TaskContext::RunTask);
+ }
+
+ QueueContext* const first_queue_ctx;
+ std::unique_ptr<QueuedTask> first_task;
+};
+
+TaskQueue::TaskQueue(const char* queue_name)
+ : queue_(dispatch_queue_create(queue_name, DISPATCH_QUEUE_SERIAL)),
+ context_(new QueueContext(this)) {
+ RTC_DCHECK(queue_name);
+ RTC_CHECK(queue_);
+ dispatch_set_context(queue_, context_);
+ // Assign a finalizer that will delete the context when the last reference
+ // to the queue is released. This may run after the TaskQueue object has
+ // been deleted.
+ dispatch_set_finalizer_f(queue_, &QueueContext::DeleteContext);
+}
+
+TaskQueue::~TaskQueue() {
+ RTC_DCHECK(!IsCurrent());
+ // Implementation/behavioral note:
+ // Dispatch queues are reference counted via calls to dispatch_retain and
+ // dispatch_release. Pending blocks submitted to a queue also hold a
+ // reference to the queue until they have finished. Once all references to a
+ // queue have been released, the queue will be deallocated by the system.
+ // This is why we check the context before running tasks.
+
+ // Use dispatch_sync to set the context to null to guarantee that there's not
+ // a race between checking the context and using it from a task.
+ dispatch_sync_f(queue_, context_, &QueueContext::SetNotActive);
+ dispatch_release(queue_);
+}
+
+// static
+TaskQueue* TaskQueue::Current() {
+ return static_cast<TaskQueue*>(pthread_getspecific(GetQueuePtrTls()));
+}
+
+// static
+bool TaskQueue::IsCurrent(const char* queue_name) {
+ TaskQueue* current = Current();
+ return current &&
+ strcmp(queue_name, dispatch_queue_get_label(current->queue_)) == 0;
+}
+
+bool TaskQueue::IsCurrent() const {
+ RTC_DCHECK(queue_);
+ return this == Current();
+}
+
+void TaskQueue::PostTask(std::unique_ptr<QueuedTask> task) {
+ auto* context = new TaskContext(context_, std::move(task));
+ dispatch_async_f(queue_, context, &TaskContext::RunTask);
+}
+
+void TaskQueue::PostDelayedTask(std::unique_ptr<QueuedTask> task,
+ uint32_t milliseconds) {
+ auto* context = new TaskContext(context_, std::move(task));
+ dispatch_after_f(
+ dispatch_time(DISPATCH_TIME_NOW, milliseconds * NSEC_PER_MSEC), queue_,
+ context, &TaskContext::RunTask);
+}
+
+void TaskQueue::PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply,
+ TaskQueue* reply_queue) {
+ auto* context = new PostTaskAndReplyContext(
+ context_, std::move(task), reply_queue->context_, std::move(reply));
+ dispatch_async_f(queue_, context, &PostTaskAndReplyContext::RunTask);
+}
+
+void TaskQueue::PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply) {
+ return PostTaskAndReply(std::move(task), std::move(reply), Current());
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/task_queue_libevent.cc b/chromium/third_party/webrtc/base/task_queue_libevent.cc
new file mode 100644
index 00000000000..a59b450828c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task_queue_libevent.cc
@@ -0,0 +1,318 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/task_queue.h"
+
+#include <fcntl.h>
+#include <string.h>
+#include <unistd.h>
+
+#include "base/third_party/libevent/event.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/task_queue_posix.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+using internal::GetQueuePtrTls;
+using internal::AutoSetCurrentQueuePtr;
+
+namespace {
+static const char kQuit = 1;
+static const char kRunTask = 2;
+
+struct TimerEvent {
+ explicit TimerEvent(std::unique_ptr<QueuedTask> task)
+ : task(std::move(task)) {}
+ ~TimerEvent() { event_del(&ev); }
+ event ev;
+ std::unique_ptr<QueuedTask> task;
+};
+
+bool SetNonBlocking(int fd) {
+ const int flags = fcntl(fd, F_GETFL);
+ RTC_CHECK(flags != -1);
+ return (flags & O_NONBLOCK) || fcntl(fd, F_SETFL, flags | O_NONBLOCK) != -1;
+}
+} // namespace
+
+struct TaskQueue::QueueContext {
+ explicit QueueContext(TaskQueue* q) : queue(q), is_active(true) {}
+ TaskQueue* queue;
+ bool is_active;
+ // Holds a list of events pending timers for cleanup when the loop exits.
+ std::list<TimerEvent*> pending_timers_;
+};
+
+class TaskQueue::PostAndReplyTask : public QueuedTask {
+ public:
+ PostAndReplyTask(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply,
+ TaskQueue* reply_queue)
+ : task_(std::move(task)),
+ reply_(std::move(reply)),
+ reply_queue_(reply_queue) {
+ reply_queue->PrepareReplyTask(this);
+ }
+
+ ~PostAndReplyTask() override {
+ CritScope lock(&lock_);
+ if (reply_queue_)
+ reply_queue_->ReplyTaskDone(this);
+ }
+
+ void OnReplyQueueGone() {
+ CritScope lock(&lock_);
+ reply_queue_ = nullptr;
+ }
+
+ private:
+ bool Run() override {
+ if (!task_->Run())
+ task_.release();
+
+ CritScope lock(&lock_);
+ if (reply_queue_)
+ reply_queue_->PostTask(std::move(reply_));
+ return true;
+ }
+
+ CriticalSection lock_;
+ std::unique_ptr<QueuedTask> task_;
+ std::unique_ptr<QueuedTask> reply_;
+ TaskQueue* reply_queue_ GUARDED_BY(lock_);
+};
+
+class TaskQueue::SetTimerTask : public QueuedTask {
+ public:
+ SetTimerTask(std::unique_ptr<QueuedTask> task, uint32_t milliseconds)
+ : task_(std::move(task)),
+ milliseconds_(milliseconds),
+ posted_(Time32()) {}
+
+ private:
+ bool Run() override {
+ // Compensate for the time that has passed since construction
+ // and until we got here.
+ uint32_t post_time = Time32() - posted_;
+ TaskQueue::Current()->PostDelayedTask(
+ std::move(task_),
+ post_time > milliseconds_ ? 0 : milliseconds_ - post_time);
+ return true;
+ }
+
+ std::unique_ptr<QueuedTask> task_;
+ const uint32_t milliseconds_;
+ const uint32_t posted_;
+};
+
+TaskQueue::TaskQueue(const char* queue_name)
+ : event_base_(event_base_new()),
+ wakeup_event_(new event()),
+ thread_(&TaskQueue::ThreadMain, this, queue_name) {
+ RTC_DCHECK(queue_name);
+ int fds[2];
+ RTC_CHECK(pipe(fds) == 0);
+ SetNonBlocking(fds[0]);
+ SetNonBlocking(fds[1]);
+ wakeup_pipe_out_ = fds[0];
+ wakeup_pipe_in_ = fds[1];
+ event_set(wakeup_event_.get(), wakeup_pipe_out_, EV_READ | EV_PERSIST,
+ OnWakeup, this);
+ event_base_set(event_base_, wakeup_event_.get());
+ event_add(wakeup_event_.get(), 0);
+ thread_.Start();
+}
+
+TaskQueue::~TaskQueue() {
+ RTC_DCHECK(!IsCurrent());
+ struct timespec ts;
+ char message = kQuit;
+ while (write(wakeup_pipe_in_, &message, sizeof(message)) != sizeof(message)) {
+ // The queue is full, so we have no choice but to wait and retry.
+ RTC_CHECK_EQ(EAGAIN, errno);
+ ts.tv_sec = 0;
+ ts.tv_nsec = 1000000;
+ nanosleep(&ts, nullptr);
+ }
+
+ thread_.Stop();
+
+ event_del(wakeup_event_.get());
+ close(wakeup_pipe_in_);
+ close(wakeup_pipe_out_);
+ wakeup_pipe_in_ = -1;
+ wakeup_pipe_out_ = -1;
+
+ {
+ // Synchronize against any pending reply tasks that might be running on
+ // other queues.
+ CritScope lock(&pending_lock_);
+ for (auto* reply : pending_replies_)
+ reply->OnReplyQueueGone();
+ pending_replies_.clear();
+ }
+
+ event_base_free(event_base_);
+}
+
+// static
+TaskQueue* TaskQueue::Current() {
+ QueueContext* ctx =
+ static_cast<QueueContext*>(pthread_getspecific(GetQueuePtrTls()));
+ return ctx ? ctx->queue : nullptr;
+}
+
+// static
+bool TaskQueue::IsCurrent(const char* queue_name) {
+ TaskQueue* current = Current();
+ return current && current->thread_.name().compare(queue_name) == 0;
+}
+
+bool TaskQueue::IsCurrent() const {
+ return IsThreadRefEqual(thread_.GetThreadRef(), CurrentThreadRef());
+}
+
+void TaskQueue::PostTask(std::unique_ptr<QueuedTask> task) {
+ RTC_DCHECK(task.get());
+ // libevent isn't thread safe. This means that we can't use methods such
+ // as event_base_once to post tasks to the worker thread from a different
+ // thread. However, we can use it when posting from the worker thread itself.
+ if (IsCurrent()) {
+ if (event_base_once(event_base_, -1, EV_TIMEOUT, &TaskQueue::RunTask,
+ task.get(), nullptr) == 0) {
+ task.release();
+ }
+ } else {
+ QueuedTask* task_id = task.get(); // Only used for comparison.
+ {
+ CritScope lock(&pending_lock_);
+ pending_.push_back(std::move(task));
+ }
+ char message = kRunTask;
+ if (write(wakeup_pipe_in_, &message, sizeof(message)) != sizeof(message)) {
+ LOG(WARNING) << "Failed to queue task.";
+ CritScope lock(&pending_lock_);
+ pending_.remove_if([task_id](std::unique_ptr<QueuedTask>& t) {
+ return t.get() == task_id;
+ });
+ }
+ }
+}
+
+void TaskQueue::PostDelayedTask(std::unique_ptr<QueuedTask> task,
+ uint32_t milliseconds) {
+ if (IsCurrent()) {
+ TimerEvent* timer = new TimerEvent(std::move(task));
+ evtimer_set(&timer->ev, &TaskQueue::RunTimer, timer);
+ event_base_set(event_base_, &timer->ev);
+ QueueContext* ctx =
+ static_cast<QueueContext*>(pthread_getspecific(GetQueuePtrTls()));
+ ctx->pending_timers_.push_back(timer);
+ timeval tv = {milliseconds / 1000, (milliseconds % 1000) * 1000};
+ event_add(&timer->ev, &tv);
+ } else {
+ PostTask(std::unique_ptr<QueuedTask>(
+ new SetTimerTask(std::move(task), milliseconds)));
+ }
+}
+
+void TaskQueue::PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply,
+ TaskQueue* reply_queue) {
+ std::unique_ptr<QueuedTask> wrapper_task(
+ new PostAndReplyTask(std::move(task), std::move(reply), reply_queue));
+ PostTask(std::move(wrapper_task));
+}
+
+void TaskQueue::PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply) {
+ return PostTaskAndReply(std::move(task), std::move(reply), Current());
+}
+
+// static
+bool TaskQueue::ThreadMain(void* context) {
+ TaskQueue* me = static_cast<TaskQueue*>(context);
+
+ QueueContext queue_context(me);
+ pthread_setspecific(GetQueuePtrTls(), &queue_context);
+
+ while (queue_context.is_active)
+ event_base_loop(me->event_base_, 0);
+
+ pthread_setspecific(GetQueuePtrTls(), nullptr);
+
+ for (TimerEvent* timer : queue_context.pending_timers_)
+ delete timer;
+
+ return false;
+}
+
+// static
+void TaskQueue::OnWakeup(int socket, short flags, void* context) { // NOLINT
+ QueueContext* ctx =
+ static_cast<QueueContext*>(pthread_getspecific(GetQueuePtrTls()));
+ RTC_DCHECK(ctx->queue->wakeup_pipe_out_ == socket);
+ char buf;
+ RTC_CHECK(sizeof(buf) == read(socket, &buf, sizeof(buf)));
+ switch (buf) {
+ case kQuit:
+ ctx->is_active = false;
+ event_base_loopbreak(ctx->queue->event_base_);
+ break;
+ case kRunTask: {
+ std::unique_ptr<QueuedTask> task;
+ {
+ CritScope lock(&ctx->queue->pending_lock_);
+ RTC_DCHECK(!ctx->queue->pending_.empty());
+ task = std::move(ctx->queue->pending_.front());
+ ctx->queue->pending_.pop_front();
+ RTC_DCHECK(task.get());
+ }
+ if (!task->Run())
+ task.release();
+ break;
+ }
+ default:
+ RTC_NOTREACHED();
+ break;
+ }
+}
+
+// static
+void TaskQueue::RunTask(int fd, short flags, void* context) { // NOLINT
+ auto* task = static_cast<QueuedTask*>(context);
+ if (task->Run())
+ delete task;
+}
+
+// static
+void TaskQueue::RunTimer(int fd, short flags, void* context) { // NOLINT
+ TimerEvent* timer = static_cast<TimerEvent*>(context);
+ if (!timer->task->Run())
+ timer->task.release();
+ QueueContext* ctx =
+ static_cast<QueueContext*>(pthread_getspecific(GetQueuePtrTls()));
+ ctx->pending_timers_.remove(timer);
+ delete timer;
+}
+
+void TaskQueue::PrepareReplyTask(PostAndReplyTask* reply_task) {
+ RTC_DCHECK(reply_task);
+ CritScope lock(&pending_lock_);
+ pending_replies_.push_back(reply_task);
+}
+
+void TaskQueue::ReplyTaskDone(PostAndReplyTask* reply_task) {
+ CritScope lock(&pending_lock_);
+ pending_replies_.remove(reply_task);
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/task_queue_posix.cc b/chromium/third_party/webrtc/base/task_queue_posix.cc
new file mode 100644
index 00000000000..3b00ac8e12d
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task_queue_posix.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/task_queue_posix.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/task_queue.h"
+
+namespace rtc {
+namespace internal {
+pthread_key_t g_queue_ptr_tls = 0;
+
+void InitializeTls() {
+ RTC_CHECK(pthread_key_create(&g_queue_ptr_tls, nullptr) == 0);
+}
+
+pthread_key_t GetQueuePtrTls() {
+ static pthread_once_t init_once = PTHREAD_ONCE_INIT;
+ RTC_CHECK(pthread_once(&init_once, &InitializeTls) == 0);
+ return g_queue_ptr_tls;
+}
+
+AutoSetCurrentQueuePtr::AutoSetCurrentQueuePtr(TaskQueue* q)
+ : prev_(TaskQueue::Current()) {
+ pthread_setspecific(GetQueuePtrTls(), q);
+}
+
+AutoSetCurrentQueuePtr::~AutoSetCurrentQueuePtr() {
+ pthread_setspecific(GetQueuePtrTls(), prev_);
+}
+
+} // namespace internal
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/task_queue_posix.h b/chromium/third_party/webrtc/base/task_queue_posix.h
new file mode 100644
index 00000000000..b677b78a38c
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task_queue_posix.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_BASE_TASK_QUEUE_POSIX_H_
+#define WEBRTC_BASE_TASK_QUEUE_POSIX_H_
+
+#include <pthread.h>
+
+namespace rtc {
+
+class TaskQueue;
+
+namespace internal {
+
+class AutoSetCurrentQueuePtr {
+ public:
+ explicit AutoSetCurrentQueuePtr(TaskQueue* q);
+ ~AutoSetCurrentQueuePtr();
+
+ private:
+ TaskQueue* const prev_;
+};
+
+pthread_key_t GetQueuePtrTls();
+
+} // namespace internal
+} // namespace rtc
+
+#endif // WEBRTC_BASE_TASK_QUEUE_POSIX_H_
diff --git a/chromium/third_party/webrtc/base/task_queue_unittest.cc b/chromium/third_party/webrtc/base/task_queue_unittest.cc
new file mode 100644
index 00000000000..db4e6c2f7ed
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task_queue_unittest.cc
@@ -0,0 +1,261 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <vector>
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/task_queue.h"
+#include "webrtc/base/timeutils.h"
+
+namespace rtc {
+
+namespace {
+void CheckCurrent(const char* expected_queue, Event* signal, TaskQueue* queue) {
+ EXPECT_TRUE(TaskQueue::IsCurrent(expected_queue));
+ EXPECT_TRUE(queue->IsCurrent());
+ if (signal)
+ signal->Set();
+}
+
+} // namespace
+
+TEST(TaskQueueTest, Construct) {
+ static const char kQueueName[] = "Construct";
+ TaskQueue queue(kQueueName);
+ EXPECT_FALSE(queue.IsCurrent());
+}
+
+TEST(TaskQueueTest, PostAndCheckCurrent) {
+ static const char kQueueName[] = "PostAndCheckCurrent";
+ TaskQueue queue(kQueueName);
+
+ // We're not running a task, so there shouldn't be a current queue.
+ EXPECT_FALSE(queue.IsCurrent());
+ EXPECT_FALSE(TaskQueue::Current());
+
+ Event event(false, false);
+ queue.PostTask(Bind(&CheckCurrent, kQueueName, &event, &queue));
+ EXPECT_TRUE(event.Wait(1000));
+}
+
+TEST(TaskQueueTest, PostCustomTask) {
+ static const char kQueueName[] = "PostCustomImplementation";
+ TaskQueue queue(kQueueName);
+
+ Event event(false, false);
+
+ class CustomTask : public QueuedTask {
+ public:
+ explicit CustomTask(Event* event) : event_(event) {}
+
+ private:
+ bool Run() override {
+ event_->Set();
+ return false; // Never allows the task to be deleted by the queue.
+ }
+
+ Event* const event_;
+ } my_task(&event);
+
+ // Please don't do this in production code! :)
+ queue.PostTask(std::unique_ptr<QueuedTask>(&my_task));
+ EXPECT_TRUE(event.Wait(1000));
+}
+
+TEST(TaskQueueTest, PostLambda) {
+ static const char kQueueName[] = "PostLambda";
+ TaskQueue queue(kQueueName);
+
+ Event event(false, false);
+ queue.PostTask([&event]() { event.Set(); });
+ EXPECT_TRUE(event.Wait(1000));
+}
+
+TEST(TaskQueueTest, PostFromQueue) {
+ static const char kQueueName[] = "PostFromQueue";
+ TaskQueue queue(kQueueName);
+
+ Event event(false, false);
+ queue.PostTask(
+ [&event, &queue]() { queue.PostTask([&event]() { event.Set(); }); });
+ EXPECT_TRUE(event.Wait(1000));
+}
+
+TEST(TaskQueueTest, PostDelayed) {
+ static const char kQueueName[] = "PostDelayed";
+ TaskQueue queue(kQueueName);
+
+ Event event(false, false);
+ uint32_t start = Time();
+ queue.PostDelayedTask(Bind(&CheckCurrent, kQueueName, &event, &queue), 100);
+ EXPECT_TRUE(event.Wait(1000));
+ uint32_t end = Time();
+ EXPECT_GE(end - start, 100u);
+ EXPECT_NEAR(end - start, 200u, 100u); // Accept 100-300.
+}
+
+TEST(TaskQueueTest, PostMultipleDelayed) {
+ static const char kQueueName[] = "PostMultipleDelayed";
+ TaskQueue queue(kQueueName);
+
+ std::vector<std::unique_ptr<Event>> events;
+ for (int i = 0; i < 10; ++i) {
+ events.push_back(std::unique_ptr<Event>(new Event(false, false)));
+ queue.PostDelayedTask(
+ Bind(&CheckCurrent, kQueueName, events.back().get(), &queue), 10);
+ }
+
+ for (const auto& e : events)
+ EXPECT_TRUE(e->Wait(100));
+}
+
+TEST(TaskQueueTest, PostDelayedAfterDestruct) {
+ static const char kQueueName[] = "PostDelayedAfterDestruct";
+ Event event(false, false);
+ {
+ TaskQueue queue(kQueueName);
+ queue.PostDelayedTask(Bind(&CheckCurrent, kQueueName, &event, &queue), 100);
+ }
+ EXPECT_FALSE(event.Wait(200)); // Task should not run.
+}
+
+TEST(TaskQueueTest, PostAndReply) {
+ static const char kPostQueue[] = "PostQueue";
+ static const char kReplyQueue[] = "ReplyQueue";
+ TaskQueue post_queue(kPostQueue);
+ TaskQueue reply_queue(kReplyQueue);
+
+ Event event(false, false);
+ post_queue.PostTaskAndReply(
+ Bind(&CheckCurrent, kPostQueue, nullptr, &post_queue),
+ Bind(&CheckCurrent, kReplyQueue, &event, &reply_queue), &reply_queue);
+ EXPECT_TRUE(event.Wait(1000));
+}
+
+TEST(TaskQueueTest, PostAndReuse) {
+ static const char kPostQueue[] = "PostQueue";
+ static const char kReplyQueue[] = "ReplyQueue";
+ TaskQueue post_queue(kPostQueue);
+ TaskQueue reply_queue(kReplyQueue);
+
+ int call_count = 0;
+
+ class ReusedTask : public QueuedTask {
+ public:
+ ReusedTask(int* counter, TaskQueue* reply_queue, Event* event)
+ : counter_(counter), reply_queue_(reply_queue), event_(event) {
+ EXPECT_EQ(0, *counter_);
+ }
+
+ private:
+ bool Run() override {
+ if (++(*counter_) == 1) {
+ std::unique_ptr<QueuedTask> myself(this);
+ reply_queue_->PostTask(std::move(myself));
+ // At this point, the object is owned by reply_queue_ and it's
+ // theoratically possible that the object has been deleted (e.g. if
+ // posting wasn't possible). So, don't touch any member variables here.
+
+ // Indicate to the current queue that ownership has been transferred.
+ return false;
+ } else {
+ EXPECT_EQ(2, *counter_);
+ EXPECT_TRUE(reply_queue_->IsCurrent());
+ event_->Set();
+ return true; // Indicate that the object should be deleted.
+ }
+ }
+
+ int* const counter_;
+ TaskQueue* const reply_queue_;
+ Event* const event_;
+ };
+
+ Event event(false, false);
+ std::unique_ptr<QueuedTask> task(
+ new ReusedTask(&call_count, &reply_queue, &event));
+
+ post_queue.PostTask(std::move(task));
+ EXPECT_TRUE(event.Wait(1000));
+}
+
+TEST(TaskQueueTest, PostAndReplyLambda) {
+ static const char kPostQueue[] = "PostQueue";
+ static const char kReplyQueue[] = "ReplyQueue";
+ TaskQueue post_queue(kPostQueue);
+ TaskQueue reply_queue(kReplyQueue);
+
+ Event event(false, false);
+ bool my_flag = false;
+ post_queue.PostTaskAndReply([&my_flag]() { my_flag = true; },
+ [&event]() { event.Set(); }, &reply_queue);
+ EXPECT_TRUE(event.Wait(1000));
+ EXPECT_TRUE(my_flag);
+}
+
+void TestPostTaskAndReply(TaskQueue* work_queue,
+ const char* work_queue_name,
+ Event* event) {
+ ASSERT_FALSE(work_queue->IsCurrent());
+ work_queue->PostTaskAndReply(
+ Bind(&CheckCurrent, work_queue_name, nullptr, work_queue),
+ NewClosure([event]() { event->Set(); }));
+}
+
+// Does a PostTaskAndReply from within a task to post and reply to the current
+// queue. All in all there will be 3 tasks posted and run.
+TEST(TaskQueueTest, PostAndReply2) {
+ static const char kQueueName[] = "PostAndReply2";
+ static const char kWorkQueueName[] = "PostAndReply2_Worker";
+ TaskQueue queue(kQueueName);
+ TaskQueue work_queue(kWorkQueueName);
+
+ Event event(false, false);
+ queue.PostTask(
+ Bind(&TestPostTaskAndReply, &work_queue, kWorkQueueName, &event));
+ EXPECT_TRUE(event.Wait(1000));
+}
+
+// Tests posting more messages than a queue can queue up.
+// In situations like that, tasks will get dropped.
+TEST(TaskQueueTest, PostALot) {
+ // To destruct the event after the queue has gone out of scope.
+ Event event(false, false);
+
+ int tasks_executed = 0;
+ int tasks_cleaned_up = 0;
+ static const int kTaskCount = 0xffff;
+
+ {
+ static const char kQueueName[] = "PostALot";
+ TaskQueue queue(kQueueName);
+
+ // On linux, the limit of pending bytes in the pipe buffer is 0xffff.
+ // So here we post a total of 0xffff+1 messages, which triggers a failure
+ // case inside of the libevent queue implementation.
+
+ queue.PostTask([&event]() { event.Wait(Event::kForever); });
+ for (int i = 0; i < kTaskCount; ++i)
+ queue.PostTask(NewClosure([&tasks_executed]() { ++tasks_executed; },
+ [&tasks_cleaned_up]() { ++tasks_cleaned_up; }));
+ event.Set(); // Unblock the first task.
+ }
+
+ EXPECT_GE(tasks_cleaned_up, tasks_executed);
+ EXPECT_EQ(kTaskCount, tasks_cleaned_up);
+
+ LOG(INFO) << "tasks executed: " << tasks_executed
+ << ", tasks cleaned up: " << tasks_cleaned_up;
+}
+
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/task_queue_win.cc b/chromium/third_party/webrtc/base/task_queue_win.cc
new file mode 100644
index 00000000000..5ae6d9275ba
--- /dev/null
+++ b/chromium/third_party/webrtc/base/task_queue_win.cc
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/base/task_queue.h"
+
+#include <string.h>
+#include <unordered_map>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+
+namespace rtc {
+namespace {
+#define WM_RUN_TASK WM_USER + 1
+#define WM_QUEUE_DELAYED_TASK WM_USER + 2
+
+DWORD g_queue_ptr_tls = 0;
+
+BOOL CALLBACK InitializeTls(PINIT_ONCE init_once, void* param, void** context) {
+ g_queue_ptr_tls = TlsAlloc();
+ return TRUE;
+}
+
+DWORD GetQueuePtrTls() {
+ static INIT_ONCE init_once = INIT_ONCE_STATIC_INIT;
+ InitOnceExecuteOnce(&init_once, InitializeTls, nullptr, nullptr);
+ return g_queue_ptr_tls;
+}
+
+struct ThreadStartupData {
+ Event* started;
+ void* thread_context;
+};
+
+void CALLBACK InitializeQueueThread(ULONG_PTR param) {
+ MSG msg;
+ PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
+ ThreadStartupData* data = reinterpret_cast<ThreadStartupData*>(param);
+ TlsSetValue(GetQueuePtrTls(), data->thread_context);
+ data->started->Set();
+}
+} // namespace
+
+TaskQueue::TaskQueue(const char* queue_name)
+ : thread_(&TaskQueue::ThreadMain, this, queue_name) {
+ RTC_DCHECK(queue_name);
+ thread_.Start();
+ Event event(false, false);
+ ThreadStartupData startup = {&event, this};
+ RTC_CHECK(thread_.QueueAPC(&InitializeQueueThread,
+ reinterpret_cast<ULONG_PTR>(&startup)));
+ event.Wait(Event::kForever);
+}
+
+TaskQueue::~TaskQueue() {
+ RTC_DCHECK(!IsCurrent());
+ while (!PostThreadMessage(thread_.GetThreadRef(), WM_QUIT, 0, 0)) {
+ RTC_CHECK(ERROR_NOT_ENOUGH_QUOTA == ::GetLastError());
+ Sleep(1);
+ }
+ thread_.Stop();
+}
+
+// static
+TaskQueue* TaskQueue::Current() {
+ return static_cast<TaskQueue*>(TlsGetValue(GetQueuePtrTls()));
+}
+
+// static
+bool TaskQueue::IsCurrent(const char* queue_name) {
+ TaskQueue* current = Current();
+ return current && current->thread_.name().compare(queue_name) == 0;
+}
+
+bool TaskQueue::IsCurrent() const {
+ return IsThreadRefEqual(thread_.GetThreadRef(), CurrentThreadRef());
+}
+
+void TaskQueue::PostTask(std::unique_ptr<QueuedTask> task) {
+ if (PostThreadMessage(thread_.GetThreadRef(), WM_RUN_TASK, 0,
+ reinterpret_cast<LPARAM>(task.get()))) {
+ task.release();
+ }
+}
+
+void TaskQueue::PostDelayedTask(std::unique_ptr<QueuedTask> task,
+ uint32_t milliseconds) {
+ WPARAM wparam;
+#if defined(_WIN64)
+ // GetTickCount() returns a fairly coarse tick count (resolution or about 8ms)
+ // so this compensation isn't that accurate, but since we have unused 32 bits
+ // on Win64, we might as well use them.
+ wparam = (static_cast<WPARAM>(::GetTickCount()) << 32) | milliseconds;
+#else
+ wparam = milliseconds;
+#endif
+ if (PostThreadMessage(thread_.GetThreadRef(), WM_QUEUE_DELAYED_TASK, wparam,
+ reinterpret_cast<LPARAM>(task.get()))) {
+ task.release();
+ }
+}
+
+void TaskQueue::PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply,
+ TaskQueue* reply_queue) {
+ QueuedTask* task_ptr = task.release();
+ QueuedTask* reply_task_ptr = reply.release();
+ DWORD reply_thread_id = reply_queue->thread_.GetThreadRef();
+ PostTask([task_ptr, reply_task_ptr, reply_thread_id]() {
+ if (task_ptr->Run())
+ delete task_ptr;
+ // If the thread's message queue is full, we can't queue the task and will
+ // have to drop it (i.e. delete).
+ if (!PostThreadMessage(reply_thread_id, WM_RUN_TASK, 0,
+ reinterpret_cast<LPARAM>(reply_task_ptr))) {
+ delete reply_task_ptr;
+ }
+ });
+}
+
+void TaskQueue::PostTaskAndReply(std::unique_ptr<QueuedTask> task,
+ std::unique_ptr<QueuedTask> reply) {
+ return PostTaskAndReply(std::move(task), std::move(reply), Current());
+}
+
+// static
+bool TaskQueue::ThreadMain(void* context) {
+ std::unordered_map<UINT_PTR, std::unique_ptr<QueuedTask>> delayed_tasks;
+
+ BOOL ret;
+ MSG msg;
+
+ while ((ret = GetMessage(&msg, nullptr, 0, 0)) != 0 && ret != -1) {
+ if (!msg.hwnd) {
+ switch (msg.message) {
+ case WM_RUN_TASK: {
+ QueuedTask* task = reinterpret_cast<QueuedTask*>(msg.lParam);
+ if (task->Run())
+ delete task;
+ break;
+ }
+ case WM_QUEUE_DELAYED_TASK: {
+ QueuedTask* task = reinterpret_cast<QueuedTask*>(msg.lParam);
+ uint32_t milliseconds = msg.wParam & 0xFFFFFFFF;
+#if defined(_WIN64)
+ // Subtract the time it took to queue the timer.
+ const DWORD now = GetTickCount();
+ DWORD post_time = now - (msg.wParam >> 32);
+ milliseconds =
+ post_time > milliseconds ? 0 : milliseconds - post_time;
+#endif
+ UINT_PTR timer_id = SetTimer(nullptr, 0, milliseconds, nullptr);
+ delayed_tasks.insert(std::make_pair(timer_id, task));
+ break;
+ }
+ case WM_TIMER: {
+ KillTimer(nullptr, msg.wParam);
+ auto found = delayed_tasks.find(msg.wParam);
+ RTC_DCHECK(found != delayed_tasks.end());
+ if (!found->second->Run())
+ found->second.release();
+ delayed_tasks.erase(found);
+ break;
+ }
+ default:
+ RTC_NOTREACHED();
+ break;
+ }
+ } else {
+ TranslateMessage(&msg);
+ DispatchMessage(&msg);
+ }
+ }
+
+ return false;
+}
+} // namespace rtc
diff --git a/chromium/third_party/webrtc/base/task_unittest.cc b/chromium/third_party/webrtc/base/task_unittest.cc
index 7492436a5d1..f3ccc24a8df 100644
--- a/chromium/third_party/webrtc/base/task_unittest.cc
+++ b/chromium/third_party/webrtc/base/task_unittest.cc
@@ -22,6 +22,7 @@
#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/task.h"
@@ -32,7 +33,7 @@
namespace rtc {
static int64_t GetCurrentTime() {
- return static_cast<int64_t>(Time()) * 10000;
+ return TimeMillis() * 10000;
}
// feel free to change these numbers. Note that '0' won't work, though
diff --git a/chromium/third_party/webrtc/base/taskparent.h b/chromium/third_party/webrtc/base/taskparent.h
index 41008fa98ee..3c0b81cf6a9 100644
--- a/chromium/third_party/webrtc/base/taskparent.h
+++ b/chromium/third_party/webrtc/base/taskparent.h
@@ -11,10 +11,11 @@
#ifndef WEBRTC_BASE_TASKPARENT_H__
#define WEBRTC_BASE_TASKPARENT_H__
+#include <memory>
#include <set>
#include "webrtc/base/basictypes.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
namespace rtc {
@@ -52,7 +53,7 @@ class TaskParent {
TaskRunner *runner_;
bool child_error_;
typedef std::set<Task *> ChildSet;
- scoped_ptr<ChildSet> children_;
+ std::unique_ptr<ChildSet> children_;
RTC_DISALLOW_COPY_AND_ASSIGN(TaskParent);
};
diff --git a/chromium/third_party/webrtc/base/taskrunner.cc b/chromium/third_party/webrtc/base/taskrunner.cc
index c50c9f833ec..73916a07194 100644
--- a/chromium/third_party/webrtc/base/taskrunner.cc
+++ b/chromium/third_party/webrtc/base/taskrunner.cc
@@ -13,7 +13,6 @@
#include "webrtc/base/taskrunner.h"
#include "webrtc/base/common.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/task.h"
#include "webrtc/base/logging.h"
diff --git a/chromium/third_party/webrtc/base/testclient.h b/chromium/third_party/webrtc/base/testclient.h
index 5d8ee98a6fb..df831fefb75 100644
--- a/chromium/third_party/webrtc/base/testclient.h
+++ b/chromium/third_party/webrtc/base/testclient.h
@@ -13,6 +13,7 @@
#include <vector>
#include "webrtc/base/asyncudpsocket.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/testechoserver.h b/chromium/third_party/webrtc/base/testechoserver.h
index 51d7d539e4f..76d714b4f4a 100644
--- a/chromium/third_party/webrtc/base/testechoserver.h
+++ b/chromium/third_party/webrtc/base/testechoserver.h
@@ -12,7 +12,9 @@
#define WEBRTC_BASE_TESTECHOSERVER_H_
#include <list>
+#include <memory>
#include "webrtc/base/asynctcpsocket.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/thread.h"
@@ -63,7 +65,7 @@ class TestEchoServer : public sigslot::has_slots<> {
}
typedef std::list<AsyncTCPSocket*> ClientList;
- scoped_ptr<AsyncSocket> server_socket_;
+ std::unique_ptr<AsyncSocket> server_socket_;
ClientList client_sockets_;
RTC_DISALLOW_COPY_AND_ASSIGN(TestEchoServer);
};
diff --git a/chromium/third_party/webrtc/base/testutils.h b/chromium/third_party/webrtc/base/testutils.h
index 6e7e22a928c..e5e571b9bf9 100644
--- a/chromium/third_party/webrtc/base/testutils.h
+++ b/chromium/third_party/webrtc/base/testutils.h
@@ -24,6 +24,7 @@
#include <algorithm>
#include <map>
+#include <memory>
#include <vector>
#include "webrtc/base/arraysize.h"
#include "webrtc/base/asyncsocket.h"
@@ -371,7 +372,7 @@ private:
void OnCloseEvent(AsyncSocket* socket, int error) {
}
- scoped_ptr<AsyncSocket> socket_;
+ std::unique_ptr<AsyncSocket> socket_;
Buffer send_buffer_, recv_buffer_;
};
@@ -414,7 +415,7 @@ class SocketTestServer : public sigslot::has_slots<> {
clients_.push_back(new SocketTestClient(accepted));
}
- scoped_ptr<AsyncSocket> socket_;
+ std::unique_ptr<AsyncSocket> socket_;
std::vector<SocketTestClient*> clients_;
};
diff --git a/chromium/third_party/webrtc/base/thread.cc b/chromium/third_party/webrtc/base/thread.cc
index dc8ccdfd4dd..3971169ae7d 100644
--- a/chromium/third_party/webrtc/base/thread.cc
+++ b/chromium/third_party/webrtc/base/thread.cc
@@ -22,6 +22,7 @@
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/nullsocketserver.h"
#include "webrtc/base/platform_thread.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/timeutils.h"
@@ -138,7 +139,9 @@ Thread::ScopedDisallowBlockingCalls::~ScopedDisallowBlockingCalls() {
thread_->SetAllowBlockingCalls(previous_state_);
}
-Thread::Thread(SocketServer* ss, bool init_queue)
+Thread::Thread() : Thread(SocketServer::CreateDefault()) {}
+
+Thread::Thread(SocketServer* ss)
: MessageQueue(ss, false),
running_(true, false),
#if defined(WEBRTC_WIN)
@@ -148,9 +151,20 @@ Thread::Thread(SocketServer* ss, bool init_queue)
owned_(true),
blocking_calls_allowed_(true) {
SetName("Thread", this); // default name
- if (init_queue) {
- DoInit();
- }
+ DoInit();
+}
+
+Thread::Thread(std::unique_ptr<SocketServer> ss)
+ : MessageQueue(std::move(ss), false),
+ running_(true, false),
+#if defined(WEBRTC_WIN)
+ thread_(NULL),
+ thread_id_(0),
+#endif
+ owned_(true),
+ blocking_calls_allowed_(true) {
+ SetName("Thread", this); // default name
+ DoInit();
}
Thread::~Thread() {
@@ -158,6 +172,15 @@ Thread::~Thread() {
DoDestroy();
}
+std::unique_ptr<Thread> Thread::CreateWithSocketServer() {
+ return std::unique_ptr<Thread>(new Thread(SocketServer::CreateDefault()));
+}
+
+std::unique_ptr<Thread> Thread::Create() {
+ return std::unique_ptr<Thread>(
+ new Thread(std::unique_ptr<SocketServer>(new NullSocketServer())));
+}
+
bool Thread::SleepMs(int milliseconds) {
AssertBlockingIsAllowedOnCurrentThread();
@@ -459,7 +482,7 @@ void Thread::Clear(MessageHandler* phandler,
}
bool Thread::ProcessMessages(int cmsLoop) {
- uint32_t msEnd = (kForever == cmsLoop) ? 0 : TimeAfter(cmsLoop);
+ int64_t msEnd = (kForever == cmsLoop) ? 0 : TimeAfter(cmsLoop);
int cmsNext = cmsLoop;
while (true) {
@@ -513,7 +536,7 @@ bool Thread::WrapCurrentWithThreadManager(ThreadManager* thread_manager,
return true;
}
-AutoThread::AutoThread(SocketServer* ss) : Thread(ss) {
+AutoThread::AutoThread() {
if (!ThreadManager::Instance()->CurrentThread()) {
ThreadManager::Instance()->SetCurrentThread(this);
}
diff --git a/chromium/third_party/webrtc/base/thread.h b/chromium/third_party/webrtc/base/thread.h
index df5a686952f..1a0c447264e 100644
--- a/chromium/third_party/webrtc/base/thread.h
+++ b/chromium/third_party/webrtc/base/thread.h
@@ -13,6 +13,7 @@
#include <algorithm>
#include <list>
+#include <memory>
#include <string>
#include <vector>
@@ -95,11 +96,9 @@ class Runnable {
class Thread : public MessageQueue {
public:
// Create a new Thread and optionally assign it to the passed SocketServer.
- // Subclasses that override Clear should pass false for init_queue and call
- // DoInit() from their constructor to prevent races with the
- // MessageQueueManager already using the object while the vtable is still
- // being created.
- explicit Thread(SocketServer* ss = nullptr, bool init_queue = true);
+ Thread();
+ explicit Thread(SocketServer* ss);
+ explicit Thread(std::unique_ptr<SocketServer> ss);
// NOTE: ALL SUBCLASSES OF Thread MUST CALL Stop() IN THEIR DESTRUCTORS (or
// guarantee Stop() is explicitly called before the subclass is destroyed).
@@ -107,6 +106,8 @@ class Thread : public MessageQueue {
// vtable, and the Thread::PreRun calling the virtual method Run().
~Thread() override;
+ static std::unique_ptr<Thread> CreateWithSocketServer();
+ static std::unique_ptr<Thread> Create();
static Thread* Current();
// Used to catch performance regressions. Use this to disallow blocking calls
@@ -291,7 +292,7 @@ class Thread : public MessageQueue {
class AutoThread : public Thread {
public:
- explicit AutoThread(SocketServer* ss = nullptr);
+ AutoThread();
~AutoThread() override;
private:
diff --git a/chromium/third_party/webrtc/base/thread_checker_unittest.cc b/chromium/third_party/webrtc/base/thread_checker_unittest.cc
index 338190093d0..372f6f4a77f 100644
--- a/chromium/third_party/webrtc/base/thread_checker_unittest.cc
+++ b/chromium/third_party/webrtc/base/thread_checker_unittest.cc
@@ -10,11 +10,13 @@
// Borrowed from Chromium's src/base/threading/thread_checker_unittest.cc.
+#include <memory>
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/thread_checker.h"
-#include "webrtc/base/scoped_ptr.h"
// Duplicated from base/threading/thread_checker.h so that we can be
// good citizens there and undef the macro.
@@ -91,7 +93,7 @@ class DeleteThreadCheckerClassOnThread : public Thread {
}
private:
- scoped_ptr<ThreadCheckerClass> thread_checker_class_;
+ std::unique_ptr<ThreadCheckerClass> thread_checker_class_;
RTC_DISALLOW_COPY_AND_ASSIGN(DeleteThreadCheckerClassOnThread);
};
@@ -99,7 +101,7 @@ class DeleteThreadCheckerClassOnThread : public Thread {
} // namespace
TEST(ThreadCheckerTest, CallsAllowedOnSameThread) {
- scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ std::unique_ptr<ThreadCheckerClass> thread_checker_class(
new ThreadCheckerClass);
// Verify that DoStuff doesn't assert.
@@ -110,7 +112,7 @@ TEST(ThreadCheckerTest, CallsAllowedOnSameThread) {
}
TEST(ThreadCheckerTest, DestructorAllowedOnDifferentThread) {
- scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ std::unique_ptr<ThreadCheckerClass> thread_checker_class(
new ThreadCheckerClass);
// Verify that the destructor doesn't assert
@@ -123,7 +125,7 @@ TEST(ThreadCheckerTest, DestructorAllowedOnDifferentThread) {
}
TEST(ThreadCheckerTest, DetachFromThread) {
- scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ std::unique_ptr<ThreadCheckerClass> thread_checker_class(
new ThreadCheckerClass);
// Verify that DoStuff doesn't assert when called on a different thread after
@@ -138,7 +140,7 @@ TEST(ThreadCheckerTest, DetachFromThread) {
#if GTEST_HAS_DEATH_TEST || !ENABLE_THREAD_CHECKER
void ThreadCheckerClass::MethodOnDifferentThreadImpl() {
- scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ std::unique_ptr<ThreadCheckerClass> thread_checker_class(
new ThreadCheckerClass);
// DoStuff should assert in debug builds only when called on a
@@ -162,7 +164,7 @@ TEST(ThreadCheckerTest, MethodAllowedOnDifferentThreadInRelease) {
#endif // ENABLE_THREAD_CHECKER
void ThreadCheckerClass::DetachThenCallFromDifferentThreadImpl() {
- scoped_ptr<ThreadCheckerClass> thread_checker_class(
+ std::unique_ptr<ThreadCheckerClass> thread_checker_class(
new ThreadCheckerClass);
// DoStuff doesn't assert when called on a different thread
diff --git a/chromium/third_party/webrtc/base/thread_unittest.cc b/chromium/third_party/webrtc/base/thread_unittest.cc
index 7889e29da8d..bf3cbd08963 100644
--- a/chromium/third_party/webrtc/base/thread_unittest.cc
+++ b/chromium/third_party/webrtc/base/thread_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/asyncudpsocket.h"
#include "webrtc/base/event.h"
@@ -589,17 +591,18 @@ class GuardedAsyncInvokeTest : public testing::Test {
// Functor for creating an invoker.
struct CreateInvoker {
- CreateInvoker(scoped_ptr<GuardedAsyncInvoker>* invoker) : invoker_(invoker) {}
+ CreateInvoker(std::unique_ptr<GuardedAsyncInvoker>* invoker)
+ : invoker_(invoker) {}
void operator()() { invoker_->reset(new GuardedAsyncInvoker()); }
- scoped_ptr<GuardedAsyncInvoker>* invoker_;
+ std::unique_ptr<GuardedAsyncInvoker>* invoker_;
};
// Test that we can call AsyncInvoke<void>() after the thread died.
TEST_F(GuardedAsyncInvokeTest, KillThreadFireAndForget) {
// Create and start the thread.
- scoped_ptr<Thread> thread(new Thread());
+ std::unique_ptr<Thread> thread(new Thread());
thread->Start();
- scoped_ptr<GuardedAsyncInvoker> invoker;
+ std::unique_ptr<GuardedAsyncInvoker> invoker;
// Create the invoker on |thread|.
thread->Invoke<void>(CreateInvoker(&invoker));
// Kill |thread|.
@@ -615,9 +618,9 @@ TEST_F(GuardedAsyncInvokeTest, KillThreadFireAndForget) {
// Test that we can call AsyncInvoke with callback after the thread died.
TEST_F(GuardedAsyncInvokeTest, KillThreadWithCallback) {
// Create and start the thread.
- scoped_ptr<Thread> thread(new Thread());
+ std::unique_ptr<Thread> thread(new Thread());
thread->Start();
- scoped_ptr<GuardedAsyncInvoker> invoker;
+ std::unique_ptr<GuardedAsyncInvoker> invoker;
// Create the invoker on |thread|.
thread->Invoke<void>(CreateInvoker(&invoker));
// Kill |thread|.
diff --git a/chromium/third_party/webrtc/base/timeutils.cc b/chromium/third_party/webrtc/base/timeutils.cc
index de3e6afb28d..a9fe49d6075 100644
--- a/chromium/third_party/webrtc/base/timeutils.cc
+++ b/chromium/third_party/webrtc/base/timeutils.cc
@@ -30,8 +30,6 @@
namespace rtc {
-const uint32_t HALF = 0x80000000;
-
uint64_t TimeNanos() {
int64_t ticks = 0;
#if defined(WEBRTC_MAC)
@@ -82,7 +80,7 @@ uint32_t Time32() {
return static_cast<uint32_t>(TimeNanos() / kNumNanosecsPerMillisec);
}
-int64_t Time64() {
+int64_t TimeMillis() {
return static_cast<int64_t>(TimeNanos() / kNumNanosecsPerMillisec);
}
@@ -90,27 +88,16 @@ uint64_t TimeMicros() {
return static_cast<uint64_t>(TimeNanos() / kNumNanosecsPerMicrosec);
}
-uint32_t TimeAfter(int32_t elapsed) {
+int64_t TimeAfter(int64_t elapsed) {
RTC_DCHECK_GE(elapsed, 0);
- RTC_DCHECK_LT(static_cast<uint32_t>(elapsed), HALF);
- return Time() + elapsed;
-}
-
-bool TimeIsLaterOrEqual(uint32_t earlier, uint32_t later) {
- int32_t diff = later - earlier;
- return (diff >= 0 && static_cast<uint32_t>(diff) < HALF);
-}
-
-bool TimeIsLater(uint32_t earlier, uint32_t later) {
- int32_t diff = later - earlier;
- return (diff > 0 && static_cast<uint32_t>(diff) < HALF);
+ return TimeMillis() + elapsed;
}
-int32_t TimeDiff(uint32_t later, uint32_t earlier) {
+int32_t TimeDiff32(uint32_t later, uint32_t earlier) {
return later - earlier;
}
-int64_t TimeDiff64(int64_t later, int64_t earlier) {
+int64_t TimeDiff(int64_t later, int64_t earlier) {
return later - earlier;
}
diff --git a/chromium/third_party/webrtc/base/timeutils.h b/chromium/third_party/webrtc/base/timeutils.h
index 4af0d9538f0..222d5c26e47 100644
--- a/chromium/third_party/webrtc/base/timeutils.h
+++ b/chromium/third_party/webrtc/base/timeutils.h
@@ -29,58 +29,40 @@ static const int64_t kNumNanosecsPerMillisec =
static const int64_t kNumNanosecsPerMicrosec =
kNumNanosecsPerSec / kNumMicrosecsPerSec;
-typedef uint32_t TimeStamp;
+// TODO(honghaiz): Define a type for the time value specifically.
// Returns the current time in milliseconds in 32 bits.
uint32_t Time32();
// Returns the current time in milliseconds in 64 bits.
-int64_t Time64();
-
-// Returns the current time in milliseconds.
-// TODO(honghaiz): Returns Time64 once majority of the webrtc code migrates to
-// 64-bit timestamp.
-inline uint32_t Time() {
- return Time32();
+int64_t TimeMillis();
+// Deprecated. Do not use this in any new code.
+inline int64_t Time() {
+ return TimeMillis();
}
// Returns the current time in microseconds.
uint64_t TimeMicros();
+
// Returns the current time in nanoseconds.
uint64_t TimeNanos();
// Returns a future timestamp, 'elapsed' milliseconds from now.
-uint32_t TimeAfter(int32_t elapsed);
-
-bool TimeIsLaterOrEqual(uint32_t earlier, uint32_t later); // Inclusive
-bool TimeIsLater(uint32_t earlier, uint32_t later); // Exclusive
-
-// Returns the later of two timestamps.
-inline uint32_t TimeMax(uint32_t ts1, uint32_t ts2) {
- return TimeIsLaterOrEqual(ts1, ts2) ? ts2 : ts1;
-}
-
-// Returns the earlier of two timestamps.
-inline uint32_t TimeMin(uint32_t ts1, uint32_t ts2) {
- return TimeIsLaterOrEqual(ts1, ts2) ? ts1 : ts2;
-}
-
-// Number of milliseconds that would elapse between 'earlier' and 'later'
-// timestamps. The value is negative if 'later' occurs before 'earlier'.
-int32_t TimeDiff(uint32_t later, uint32_t earlier);
+int64_t TimeAfter(int64_t elapsed);
// Number of milliseconds that would elapse between 'earlier' and 'later'
// timestamps. The value is negative if 'later' occurs before 'earlier'.
-int64_t TimeDiff64(int64_t later, int64_t earlier);
+int64_t TimeDiff(int64_t later, int64_t earlier);
+int32_t TimeDiff32(uint32_t later, uint32_t earlier);
// The number of milliseconds that have elapsed since 'earlier'.
-inline int32_t TimeSince(uint32_t earlier) {
- return TimeDiff(Time(), earlier);
+inline int64_t TimeSince(int64_t earlier) {
+ return TimeMillis() - earlier;
}
// The number of milliseconds that will elapse between now and 'later'.
-inline int32_t TimeUntil(uint32_t later) {
- return TimeDiff(later, Time());
+inline int64_t TimeUntil(uint64_t later) {
+ return later - TimeMillis();
}
class TimestampWrapAroundHandler {
diff --git a/chromium/third_party/webrtc/base/timeutils_unittest.cc b/chromium/third_party/webrtc/base/timeutils_unittest.cc
index 61e41b7c644..0971c037fa5 100644
--- a/chromium/third_party/webrtc/base/timeutils_unittest.cc
+++ b/chromium/third_party/webrtc/base/timeutils_unittest.cc
@@ -17,59 +17,21 @@
namespace rtc {
TEST(TimeTest, TimeInMs) {
- uint32_t ts_earlier = Time();
+ int64_t ts_earlier = TimeMillis();
Thread::SleepMs(100);
- uint32_t ts_now = Time();
+ int64_t ts_now = TimeMillis();
// Allow for the thread to wakeup ~20ms early.
EXPECT_GE(ts_now, ts_earlier + 80);
// Make sure the Time is not returning in smaller unit like microseconds.
EXPECT_LT(ts_now, ts_earlier + 1000);
}
-TEST(TimeTest, Comparison) {
- // Obtain two different times, in known order
- TimeStamp ts_earlier = Time();
- Thread::SleepMs(100);
- TimeStamp ts_now = Time();
- EXPECT_NE(ts_earlier, ts_now);
-
- // Common comparisons
- EXPECT_TRUE( TimeIsLaterOrEqual(ts_earlier, ts_now));
- EXPECT_TRUE( TimeIsLater( ts_earlier, ts_now));
- EXPECT_FALSE(TimeIsLaterOrEqual(ts_now, ts_earlier));
- EXPECT_FALSE(TimeIsLater( ts_now, ts_earlier));
-
- // Edge cases
- EXPECT_TRUE( TimeIsLaterOrEqual(ts_earlier, ts_earlier));
- EXPECT_FALSE(TimeIsLater( ts_earlier, ts_earlier));
-
- // Obtain a third time
- TimeStamp ts_later = TimeAfter(100);
- EXPECT_NE(ts_now, ts_later);
- EXPECT_TRUE( TimeIsLater(ts_now, ts_later));
- EXPECT_TRUE( TimeIsLater(ts_earlier, ts_later));
-
- // Earlier of two times
- EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_earlier));
- EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_now));
- EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_later));
- EXPECT_EQ(ts_earlier, TimeMin(ts_now, ts_earlier));
- EXPECT_EQ(ts_earlier, TimeMin(ts_later, ts_earlier));
-
- // Later of two times
- EXPECT_EQ(ts_earlier, TimeMax(ts_earlier, ts_earlier));
- EXPECT_EQ(ts_now, TimeMax(ts_earlier, ts_now));
- EXPECT_EQ(ts_later, TimeMax(ts_earlier, ts_later));
- EXPECT_EQ(ts_now, TimeMax(ts_now, ts_earlier));
- EXPECT_EQ(ts_later, TimeMax(ts_later, ts_earlier));
-}
-
TEST(TimeTest, Intervals) {
- TimeStamp ts_earlier = Time();
- TimeStamp ts_later = TimeAfter(500);
+ int64_t ts_earlier = TimeMillis();
+ int64_t ts_later = TimeAfter(500);
// We can't depend on ts_later and ts_earlier to be exactly 500 apart
- // since time elapses between the calls to Time() and TimeAfter(500)
+ // since time elapses between the calls to TimeMillis() and TimeAfter(500)
EXPECT_LE(500, TimeDiff(ts_later, ts_earlier));
EXPECT_GE(-500, TimeDiff(ts_earlier, ts_later));
@@ -87,36 +49,9 @@ TEST(TimeTest, Intervals) {
EXPECT_LE(TimeUntil(ts_later), 500);
}
-TEST(TimeTest, BoundaryComparison) {
- // Obtain two different times, in known order
- TimeStamp ts_earlier = static_cast<TimeStamp>(-50);
- TimeStamp ts_later = ts_earlier + 100;
- EXPECT_NE(ts_earlier, ts_later);
-
- // Common comparisons
- EXPECT_TRUE( TimeIsLaterOrEqual(ts_earlier, ts_later));
- EXPECT_TRUE( TimeIsLater( ts_earlier, ts_later));
- EXPECT_FALSE(TimeIsLaterOrEqual(ts_later, ts_earlier));
- EXPECT_FALSE(TimeIsLater( ts_later, ts_earlier));
-
- // Earlier of two times
- EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_earlier));
- EXPECT_EQ(ts_earlier, TimeMin(ts_earlier, ts_later));
- EXPECT_EQ(ts_earlier, TimeMin(ts_later, ts_earlier));
-
- // Later of two times
- EXPECT_EQ(ts_earlier, TimeMax(ts_earlier, ts_earlier));
- EXPECT_EQ(ts_later, TimeMax(ts_earlier, ts_later));
- EXPECT_EQ(ts_later, TimeMax(ts_later, ts_earlier));
-
- // Interval
- EXPECT_EQ(100, TimeDiff(ts_later, ts_earlier));
- EXPECT_EQ(-100, TimeDiff(ts_earlier, ts_later));
-}
-
TEST(TimeTest, TestTimeDiff64) {
int64_t ts_diff = 100;
- int64_t ts_earlier = rtc::Time64();
+ int64_t ts_earlier = rtc::TimeMillis();
int64_t ts_later = ts_earlier + ts_diff;
EXPECT_EQ(ts_diff, rtc::TimeDiff(ts_later, ts_earlier));
EXPECT_EQ(-ts_diff, rtc::TimeDiff(ts_earlier, ts_later));
diff --git a/chromium/third_party/webrtc/base/virtualsocket_unittest.cc b/chromium/third_party/webrtc/base/virtualsocket_unittest.cc
index 2cd2b5e4de6..e63310423f0 100644
--- a/chromium/third_party/webrtc/base/virtualsocket_unittest.cc
+++ b/chromium/third_party/webrtc/base/virtualsocket_unittest.cc
@@ -14,6 +14,8 @@
#include <netinet/in.h>
#endif
+#include <memory>
+
#include "webrtc/base/arraysize.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/gunit.h"
@@ -33,7 +35,7 @@ struct Sender : public MessageHandler {
done(false),
rate(rt),
count(0) {
- last_send = rtc::Time();
+ last_send = rtc::TimeMillis();
thread->PostDelayed(NextDelay(), this, 1);
}
@@ -48,9 +50,9 @@ struct Sender : public MessageHandler {
if (done)
return;
- uint32_t cur_time = rtc::Time();
- uint32_t delay = cur_time - last_send;
- uint32_t size = rate * delay / 1000;
+ int64_t cur_time = rtc::TimeMillis();
+ int64_t delay = cur_time - last_send;
+ uint32_t size = static_cast<uint32_t>(rate * delay / 1000);
size = std::min<uint32_t>(size, 4096);
size = std::max<uint32_t>(size, sizeof(uint32_t));
@@ -63,12 +65,12 @@ struct Sender : public MessageHandler {
}
Thread* thread;
- scoped_ptr<AsyncUDPSocket> socket;
+ std::unique_ptr<AsyncUDPSocket> socket;
rtc::PacketOptions options;
bool done;
uint32_t rate; // bytes per second
uint32_t count;
- uint32_t last_send;
+ int64_t last_send;
char dummy[4096];
};
@@ -101,7 +103,7 @@ struct Receiver : public MessageHandler, public sigslot::has_slots<> {
sec_count += size;
uint32_t send_time = *reinterpret_cast<const uint32_t*>(data);
- uint32_t recv_time = rtc::Time();
+ uint32_t recv_time = rtc::TimeMillis();
uint32_t delay = recv_time - send_time;
sum += delay;
sum_sq += delay * delay;
@@ -123,7 +125,7 @@ struct Receiver : public MessageHandler, public sigslot::has_slots<> {
}
Thread* thread;
- scoped_ptr<AsyncUDPSocket> socket;
+ std::unique_ptr<AsyncUDPSocket> socket;
uint32_t bandwidth;
bool done;
size_t count;
@@ -345,11 +347,11 @@ class VirtualSocketServerTest : public testing::Test {
EmptySocketAddressWithFamily(initial_addr.family());
// Create client and server
- scoped_ptr<AsyncSocket> client(ss_->CreateAsyncSocket(initial_addr.family(),
- SOCK_STREAM));
+ std::unique_ptr<AsyncSocket> client(
+ ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM));
sink.Monitor(client.get());
- scoped_ptr<AsyncSocket> server(ss_->CreateAsyncSocket(initial_addr.family(),
- SOCK_STREAM));
+ std::unique_ptr<AsyncSocket> server(
+ ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM));
sink.Monitor(server.get());
// Initiate connect
@@ -406,7 +408,7 @@ class VirtualSocketServerTest : public testing::Test {
// Server accepts connection
EXPECT_TRUE(sink.Check(server.get(), testing::SSE_READ));
- scoped_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
+ std::unique_ptr<AsyncSocket> accepted(server->Accept(&accept_addr));
ASSERT_TRUE(NULL != accepted.get());
sink.Monitor(accepted.get());
@@ -435,9 +437,8 @@ class VirtualSocketServerTest : public testing::Test {
a->Bind(initial_addr);
EXPECT_EQ(a->GetLocalAddress().family(), initial_addr.family());
-
- scoped_ptr<AsyncSocket> b(ss_->CreateAsyncSocket(initial_addr.family(),
- SOCK_STREAM));
+ std::unique_ptr<AsyncSocket> b(
+ ss_->CreateAsyncSocket(initial_addr.family(), SOCK_STREAM));
sink.Monitor(b.get());
b->Bind(initial_addr);
EXPECT_EQ(b->GetLocalAddress().family(), initial_addr.family());
diff --git a/chromium/third_party/webrtc/base/virtualsocketserver.cc b/chromium/third_party/webrtc/base/virtualsocketserver.cc
index c6d402f1f3f..c76fe42f1ee 100644
--- a/chromium/third_party/webrtc/base/virtualsocketserver.cc
+++ b/chromium/third_party/webrtc/base/virtualsocketserver.cc
@@ -15,6 +15,7 @@
#include <algorithm>
#include <map>
+#include <memory>
#include <vector>
#include "webrtc/base/checks.h"
@@ -500,15 +501,25 @@ int VirtualSocket::SendTcp(const void* pv, size_t cb) {
}
VirtualSocketServer::VirtualSocketServer(SocketServer* ss)
- : server_(ss), server_owned_(false), msg_queue_(NULL), stop_on_idle_(false),
- network_delay_(Time()), next_ipv4_(kInitialNextIPv4),
- next_ipv6_(kInitialNextIPv6), next_port_(kFirstEphemeralPort),
- bindings_(new AddressMap()), connections_(new ConnectionMap()),
- bandwidth_(0), network_capacity_(kDefaultNetworkCapacity),
+ : server_(ss),
+ server_owned_(false),
+ msg_queue_(NULL),
+ stop_on_idle_(false),
+ network_delay_(TimeMillis()),
+ next_ipv4_(kInitialNextIPv4),
+ next_ipv6_(kInitialNextIPv6),
+ next_port_(kFirstEphemeralPort),
+ bindings_(new AddressMap()),
+ connections_(new ConnectionMap()),
+ bandwidth_(0),
+ network_capacity_(kDefaultNetworkCapacity),
send_buffer_capacity_(kDefaultTcpBufferSize),
recv_buffer_capacity_(kDefaultTcpBufferSize),
- delay_mean_(0), delay_stddev_(0), delay_samples_(NUM_SAMPLES),
- delay_dist_(NULL), drop_prob_(0.0) {
+ delay_mean_(0),
+ delay_stddev_(0),
+ delay_samples_(NUM_SAMPLES),
+ delay_dist_(NULL),
+ drop_prob_(0.0) {
if (!server_) {
server_ = new PhysicalSocketServer();
server_owned_ = true;
@@ -567,7 +578,9 @@ AsyncSocket* VirtualSocketServer::CreateAsyncSocket(int family, int type) {
}
VirtualSocket* VirtualSocketServer::CreateSocketInternal(int family, int type) {
- return new VirtualSocket(this, family, type, true);
+ VirtualSocket* socket = new VirtualSocket(this, family, type, true);
+ SignalSocketCreated(socket);
+ return socket;
}
void VirtualSocketServer::SetMessageQueue(MessageQueue* msg_queue) {
@@ -771,7 +784,7 @@ int VirtualSocketServer::SendUdp(VirtualSocket* socket,
VirtualSocket* recipient = LookupBinding(remote_addr);
if (!recipient) {
// Make a fake recipient for address family checking.
- scoped_ptr<VirtualSocket> dummy_socket(
+ std::unique_ptr<VirtualSocket> dummy_socket(
CreateSocketInternal(AF_INET, SOCK_DGRAM));
dummy_socket->SetLocalAddress(remote_addr);
if (!CanInteractWith(socket, dummy_socket.get())) {
@@ -791,7 +804,7 @@ int VirtualSocketServer::SendUdp(VirtualSocket* socket,
CritScope cs(&socket->crit_);
- uint32_t cur_time = Time();
+ int64_t cur_time = TimeMillis();
PurgeNetworkPackets(socket, cur_time);
// Determine whether we have enough bandwidth to accept this packet. To do
@@ -831,7 +844,7 @@ void VirtualSocketServer::SendTcp(VirtualSocket* socket) {
CritScope cs(&socket->crit_);
- uint32_t cur_time = Time();
+ int64_t cur_time = TimeMillis();
PurgeNetworkPackets(socket, cur_time);
while (true) {
@@ -866,7 +879,7 @@ void VirtualSocketServer::SendTcp(VirtualSocket* socket) {
void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender,
VirtualSocket* recipient,
- uint32_t cur_time,
+ int64_t cur_time,
const char* data,
size_t data_size,
size_t header_size,
@@ -894,19 +907,19 @@ void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender,
// Post the packet as a message to be delivered (on our own thread)
Packet* p = new Packet(data, data_size, sender_addr);
- uint32_t ts = TimeAfter(send_delay + transit_delay);
+ int64_t ts = TimeAfter(send_delay + transit_delay);
if (ordered) {
// Ensure that new packets arrive after previous ones
// TODO: consider ordering on a per-socket basis, since this
- // introduces artifical delay.
- ts = TimeMax(ts, network_delay_);
+ // introduces artificial delay.
+ ts = std::max(ts, network_delay_);
}
msg_queue_->PostAt(ts, recipient, MSG_ID_PACKET, p);
- network_delay_ = TimeMax(ts, network_delay_);
+ network_delay_ = std::max(ts, network_delay_);
}
void VirtualSocketServer::PurgeNetworkPackets(VirtualSocket* socket,
- uint32_t cur_time) {
+ int64_t cur_time) {
while (!socket->network_.empty() &&
(socket->network_.front().done_time <= cur_time)) {
ASSERT(socket->network_size_ >= socket->network_.front().size);
diff --git a/chromium/third_party/webrtc/base/virtualsocketserver.h b/chromium/third_party/webrtc/base/virtualsocketserver.h
index daf0145a26c..897ba9e5ebc 100644
--- a/chromium/third_party/webrtc/base/virtualsocketserver.h
+++ b/chromium/third_party/webrtc/base/virtualsocketserver.h
@@ -16,6 +16,7 @@
#include <deque>
#include <map>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/messagequeue.h"
#include "webrtc/base/socketserver.h"
@@ -121,6 +122,9 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> {
bool CloseTcpConnections(const SocketAddress& addr_local,
const SocketAddress& addr_remote);
+ // For testing purpose only. Fired when a client socket is created.
+ sigslot::signal1<VirtualSocket*> SignalSocketCreated;
+
protected:
// Returns a new IP not used before in this network.
IPAddress GetNextIP(int family);
@@ -168,14 +172,14 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> {
// Places a packet on the network.
void AddPacketToNetwork(VirtualSocket* socket,
VirtualSocket* recipient,
- uint32_t cur_time,
+ int64_t cur_time,
const char* data,
size_t data_size,
size_t header_size,
bool ordered);
// Removes stale packets from the network
- void PurgeNetworkPackets(VirtualSocket* socket, uint32_t cur_time);
+ void PurgeNetworkPackets(VirtualSocket* socket, int64_t cur_time);
// Computes the number of milliseconds required to send a packet of this size.
uint32_t SendDelay(uint32_t size);
@@ -226,7 +230,7 @@ class VirtualSocketServer : public SocketServer, public sigslot::has_slots<> {
bool server_owned_;
MessageQueue* msg_queue_;
bool stop_on_idle_;
- uint32_t network_delay_;
+ int64_t network_delay_;
in_addr next_ipv4_;
in6_addr next_ipv6_;
uint16_t next_port_;
@@ -292,7 +296,7 @@ class VirtualSocket : public AsyncSocket, public MessageHandler {
private:
struct NetworkEntry {
size_t size;
- uint32_t done_time;
+ int64_t done_time;
};
typedef std::deque<SocketAddress> ListenQueue;
diff --git a/chromium/third_party/webrtc/base/win32filesystem.cc b/chromium/third_party/webrtc/base/win32filesystem.cc
index b731974bac4..84574032e68 100644
--- a/chromium/third_party/webrtc/base/win32filesystem.cc
+++ b/chromium/third_party/webrtc/base/win32filesystem.cc
@@ -15,10 +15,11 @@
#include <shlobj.h>
#include <tchar.h>
+#include <memory>
+
#include "webrtc/base/arraysize.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/pathutils.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stream.h"
#include "webrtc/base/stringutils.h"
@@ -95,7 +96,7 @@ bool Win32Filesystem::CreatePrivateFile(const Pathname &filename) {
&token_user_size);
// Get the TOKEN_USER structure.
- scoped_ptr<char[]> token_user_bytes(new char[token_user_size]);
+ std::unique_ptr<char[]> token_user_bytes(new char[token_user_size]);
PTOKEN_USER token_user = reinterpret_cast<PTOKEN_USER>(
token_user_bytes.get());
memset(token_user, 0, token_user_size);
@@ -121,7 +122,7 @@ bool Win32Filesystem::CreatePrivateFile(const Pathname &filename) {
GetLengthSid(token_user->User.Sid);
// Allocate it.
- scoped_ptr<char[]> acl_bytes(new char[acl_size]);
+ std::unique_ptr<char[]> acl_bytes(new char[acl_size]);
PACL acl = reinterpret_cast<PACL>(acl_bytes.get());
memset(acl, 0, acl_size);
if (!::InitializeAcl(acl, acl_size, ACL_REVISION)) {
@@ -425,7 +426,7 @@ bool Win32Filesystem::GetDiskFreeSpace(const Pathname& path,
Pathname Win32Filesystem::GetCurrentDirectory() {
Pathname cwd;
int path_len = 0;
- scoped_ptr<wchar_t[]> path;
+ std::unique_ptr<wchar_t[]> path;
do {
int needed = ::GetCurrentDirectory(path_len, path.get());
if (needed == 0) {
diff --git a/chromium/third_party/webrtc/base/win32regkey.cc b/chromium/third_party/webrtc/base/win32regkey.cc
index c53386fffe4..447086aff5f 100644
--- a/chromium/third_party/webrtc/base/win32regkey.cc
+++ b/chromium/third_party/webrtc/base/win32regkey.cc
@@ -21,9 +21,10 @@
#include <shlwapi.h>
+#include <memory>
+
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
namespace rtc {
@@ -146,7 +147,7 @@ HRESULT RegKey::GetValue(const wchar_t* full_key_name,
byte* buffer_raw = nullptr;
HRESULT hr = GetValueStaticHelper(full_key_name, value_name,
REG_BINARY, &buffer_raw, &byte_count);
- scoped_ptr<byte[]> buffer(buffer_raw);
+ std::unique_ptr<byte[]> buffer(buffer_raw);
if (SUCCEEDED(hr)) {
ASSERT(byte_count == sizeof(*value));
if (byte_count == sizeof(*value)) {
@@ -166,7 +167,7 @@ HRESULT RegKey::GetValue(const wchar_t* full_key_name,
byte* buffer_raw = nullptr;
HRESULT hr = GetValueStaticHelper(full_key_name, value_name,
REG_BINARY, &buffer_raw, &byte_count);
- scoped_ptr<byte[]> buffer(buffer_raw);
+ std::unique_ptr<byte[]> buffer(buffer_raw);
if (SUCCEEDED(hr)) {
ASSERT(byte_count == sizeof(*value));
if (byte_count == sizeof(*value)) {
@@ -193,7 +194,7 @@ HRESULT RegKey::GetValue(const wchar_t* full_key_name,
wchar_t* buffer_raw = nullptr;
HRESULT hr = RegKey::GetValue(full_key_name, value_name, &buffer_raw);
- scoped_ptr<wchar_t[]> buffer(buffer_raw);
+ std::unique_ptr<wchar_t[]> buffer(buffer_raw);
if (SUCCEEDED(hr)) {
value->assign(buffer.get());
}
diff --git a/chromium/third_party/webrtc/base/win32socketserver.cc b/chromium/third_party/webrtc/base/win32socketserver.cc
index 72ce4ebb7c9..5423eed9bf0 100644
--- a/chromium/third_party/webrtc/base/win32socketserver.cc
+++ b/chromium/third_party/webrtc/base/win32socketserver.cc
@@ -627,7 +627,7 @@ void Win32Socket::OnSocketNotify(SOCKET socket, int event, int error) {
if (error != ERROR_SUCCESS) {
ReportWSAError("WSAAsync:connect notify", error, addr_);
#if !defined(NDEBUG)
- int32_t duration = TimeSince(connect_time_);
+ int64_t duration = TimeSince(connect_time_);
LOG(LS_INFO) << "WSAAsync:connect error (" << duration
<< " ms), faking close";
#endif
@@ -640,7 +640,7 @@ void Win32Socket::OnSocketNotify(SOCKET socket, int event, int error) {
SignalCloseEvent(this, error);
} else {
#if !defined(NDEBUG)
- int32_t duration = TimeSince(connect_time_);
+ int64_t duration = TimeSince(connect_time_);
LOG(LS_INFO) << "WSAAsync:connect (" << duration << " ms)";
#endif
state_ = CS_CONNECTED;
diff --git a/chromium/third_party/webrtc/base/win32socketserver.h b/chromium/third_party/webrtc/base/win32socketserver.h
index b468cfd9e3d..f47ed756964 100644
--- a/chromium/third_party/webrtc/base/win32socketserver.h
+++ b/chromium/third_party/webrtc/base/win32socketserver.h
@@ -159,6 +159,6 @@ class Win32Thread : public Thread {
} // namespace rtc
-#endif // WEBRTC_WIN
+#endif // WEBRTC_WIN
#endif // WEBRTC_BASE_WIN32SOCKETSERVER_H_
diff --git a/chromium/third_party/webrtc/base/windowpicker_unittest.cc b/chromium/third_party/webrtc/base/windowpicker_unittest.cc
index edd01bc0b2c..a1258327083 100644
--- a/chromium/third_party/webrtc/base/windowpicker_unittest.cc
+++ b/chromium/third_party/webrtc/base/windowpicker_unittest.cc
@@ -7,6 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/gunit.h"
#include "webrtc/base/testutils.h"
#include "webrtc/base/window.h"
@@ -25,7 +27,7 @@ TEST(WindowPickerTest, GetWindowList) {
LOG(LS_INFO) << "skipping test: window capturing is not supported with "
<< "current configuration.";
}
- rtc::scoped_ptr<rtc::WindowPicker> picker(
+ std::unique_ptr<rtc::WindowPicker> picker(
rtc::WindowPickerFactory::CreateWindowPicker());
EXPECT_TRUE(picker->Init());
rtc::WindowDescriptionList descriptions;
@@ -40,7 +42,7 @@ TEST(WindowPickerTest, DISABLE_ON_MAC(GetDesktopList)) {
LOG(LS_INFO) << "skipping test: window capturing is not supported with "
<< "current configuration.";
}
- rtc::scoped_ptr<rtc::WindowPicker> picker(
+ std::unique_ptr<rtc::WindowPicker> picker(
rtc::WindowPickerFactory::CreateWindowPicker());
EXPECT_TRUE(picker->Init());
rtc::DesktopDescriptionList descriptions;
diff --git a/chromium/third_party/webrtc/base/x11windowpicker.cc b/chromium/third_party/webrtc/base/x11windowpicker.cc
index 21f71c61e3e..e588304d035 100644
--- a/chromium/third_party/webrtc/base/x11windowpicker.cc
+++ b/chromium/third_party/webrtc/base/x11windowpicker.cc
@@ -21,6 +21,7 @@
#include <X11/extensions/Xrender.h>
#include <X11/Xutil.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/base/x11windowpicker.h b/chromium/third_party/webrtc/base/x11windowpicker.h
index 501adf5820b..d741759368b 100644
--- a/chromium/third_party/webrtc/base/x11windowpicker.h
+++ b/chromium/third_party/webrtc/base/x11windowpicker.h
@@ -11,8 +11,9 @@
#ifndef WEBRTC_BASE_LINUXWINDOWPICKER_H_
#define WEBRTC_BASE_LINUXWINDOWPICKER_H_
+#include <memory>
+
#include "webrtc/base/basictypes.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/windowpicker.h"
// Avoid include <X11/Xlib.h>.
@@ -44,7 +45,7 @@ class X11WindowPicker : public WindowPicker {
uint8_t* GetDesktopThumbnail(const DesktopId& id, int width, int height);
private:
- scoped_ptr<XWindowEnumerator> enumerator_;
+ std::unique_ptr<XWindowEnumerator> enumerator_;
};
} // namespace rtc
diff --git a/chromium/third_party/webrtc/build/apk_tests.gyp b/chromium/third_party/webrtc/build/apk_tests.gyp
index f7e9a902205..ba83acc14bb 100644
--- a/chromium/third_party/webrtc/build/apk_tests.gyp
+++ b/chromium/third_party/webrtc/build/apk_tests.gyp
@@ -17,13 +17,32 @@
'includes': [
'common.gypi',
],
+ 'variables': {
+ 'shard_timeout': 900,
+ },
'targets': [
{
+ 'target_name': 'audio_codec_speed_tests_apk',
+ 'type': 'none',
+ 'variables': {
+ 'test_suite_name': 'audio_codec_speed_tests',
+ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)audio_codec_speed_tests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../modules/audio_codec_speed_tests.isolate',
+ },
+ 'dependencies': [
+ '<(webrtc_root)/modules/modules.gyp:audio_codec_speed_tests',
+ ],
+ 'includes': [
+ '../../build/apk_test.gypi',
+ ],
+ },
+ {
'target_name': 'audio_decoder_unittests_apk',
'type': 'none',
'variables': {
'test_suite_name': 'audio_decoder_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)audio_decoder_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../modules/audio_decoder_unittests.isolate',
},
'dependencies': [
'<(webrtc_root)/modules/modules.gyp:audio_decoder_unittests',
@@ -38,6 +57,7 @@
'variables': {
'test_suite_name': 'common_audio_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)common_audio_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../common_audio/common_audio_unittests.isolate',
},
'dependencies': [
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio_unittests',
@@ -52,6 +72,7 @@
'variables': {
'test_suite_name': 'common_video_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)common_video_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../common_video/common_video_unittests.isolate',
},
'dependencies': [
'<(webrtc_root)/common_video/common_video_unittests.gyp:common_video_unittests',
@@ -61,44 +82,47 @@
],
},
{
- 'target_name': 'peerconnection_unittests_apk',
+ 'target_name': 'modules_tests_apk',
'type': 'none',
'variables': {
- 'test_suite_name': 'peerconnection_unittests',
- 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)peerconnection_unittests<(SHARED_LIB_SUFFIX)',
+ 'test_suite_name': 'modules_tests',
+ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)modules_tests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../modules/modules_tests.isolate',
},
'dependencies': [
- '<(webrtc_root)/api/api_tests.gyp:peerconnection_unittests',
- '<(webrtc_root)/api/api.gyp:libjingle_peerconnection_java',
+ '<(webrtc_root)/modules/modules.gyp:modules_tests',
],
'includes': [
'../../build/apk_test.gypi',
],
},
{
- 'target_name': 'modules_tests_apk',
+ 'target_name': 'modules_unittests_apk',
'type': 'none',
'variables': {
- 'test_suite_name': 'modules_tests',
- 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)modules_tests<(SHARED_LIB_SUFFIX)',
+ 'test_suite_name': 'modules_unittests',
+ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)modules_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../modules/modules_unittests.isolate',
},
'dependencies': [
- '<(webrtc_root)/modules/modules.gyp:modules_tests',
+ '<(webrtc_root)/modules/modules.gyp:modules_unittests',
+ 'audio_device_java',
],
'includes': [
'../../build/apk_test.gypi',
],
},
{
- 'target_name': 'modules_unittests_apk',
+ 'target_name': 'peerconnection_unittests_apk',
'type': 'none',
'variables': {
- 'test_suite_name': 'modules_unittests',
- 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)modules_unittests<(SHARED_LIB_SUFFIX)',
+ 'test_suite_name': 'peerconnection_unittests',
+ 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)peerconnection_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../api/peerconnection_unittests.isolate',
},
'dependencies': [
- '<(webrtc_root)/modules/modules.gyp:modules_unittests',
- 'audio_device_java',
+ '<(webrtc_root)/api/api_tests.gyp:peerconnection_unittests',
+ '<(webrtc_root)/api/api.gyp:libjingle_peerconnection_java',
],
'includes': [
'../../build/apk_test.gypi',
@@ -110,6 +134,7 @@
'variables': {
'test_suite_name': 'rtc_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)rtc_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../rtc_unittests.isolate',
},
'dependencies': [
'<(webrtc_root)/webrtc.gyp:rtc_unittests',
@@ -124,6 +149,7 @@
'variables': {
'test_suite_name': 'system_wrappers_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)system_wrappers_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../system_wrappers/system_wrappers_unittests.isolate',
},
'dependencies': [
'<(webrtc_root)/system_wrappers/system_wrappers_tests.gyp:system_wrappers_unittests',
@@ -138,6 +164,7 @@
'variables': {
'test_suite_name': 'test_support_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)test_support_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../test/test_support_unittests.isolate',
},
'dependencies': [
'<(webrtc_root)/test/test.gyp:test_support_unittests',
@@ -152,6 +179,7 @@
'variables': {
'test_suite_name': 'tools_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)tools_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../tools/tools_unittests.isolate',
},
'dependencies': [
'<(webrtc_root)/tools/tools.gyp:tools_unittests',
@@ -166,6 +194,7 @@
'variables': {
'test_suite_name': 'video_engine_tests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)video_engine_tests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../video_engine_tests.isolate',
},
'dependencies': [
'<(webrtc_root)/webrtc.gyp:video_engine_tests',
@@ -180,6 +209,7 @@
'variables': {
'test_suite_name': 'voice_engine_unittests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)voice_engine_unittests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../voice_engine/voice_engine_unittests.isolate',
},
'dependencies': [
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_unittests',
@@ -194,6 +224,8 @@
'variables': {
'test_suite_name': 'webrtc_perf_tests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)webrtc_perf_tests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../webrtc_perf_tests.isolate',
+ 'shard_timeout': 2700,
},
'dependencies': [
'<(webrtc_root)/webrtc.gyp:webrtc_perf_tests',
@@ -208,6 +240,7 @@
'variables': {
'test_suite_name': 'webrtc_nonparallel_tests',
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)webrtc_nonparallel_tests<(SHARED_LIB_SUFFIX)',
+ 'isolate_file': '../webrtc_nonparallel_tests.isolate',
},
'dependencies': [
'<(webrtc_root)/webrtc.gyp:webrtc_nonparallel_tests',
@@ -217,20 +250,6 @@
],
},
{
- 'target_name': 'audio_codec_speed_tests_apk',
- 'type': 'none',
- 'variables': {
- 'test_suite_name': 'audio_codec_speed_tests',
- 'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)audio_codec_speed_tests<(SHARED_LIB_SUFFIX)',
- },
- 'dependencies': [
- '<(webrtc_root)/modules/modules.gyp:audio_codec_speed_tests',
- ],
- 'includes': [
- '../../build/apk_test.gypi',
- ],
- },
- {
'target_name': 'audio_device_java',
'type': 'none',
'variables': {
diff --git a/chromium/third_party/webrtc/build/common.gypi b/chromium/third_party/webrtc/build/common.gypi
index 6ec0f1c9426..26acf4b5e27 100644
--- a/chromium/third_party/webrtc/build/common.gypi
+++ b/chromium/third_party/webrtc/build/common.gypi
@@ -41,10 +41,23 @@
'apk_tests_path%': '<(DEPTH)/webrtc/build/apk_tests.gyp',
'modules_java_gyp_path%': '<(DEPTH)/webrtc/modules/modules_java.gyp',
}],
+
+ # Controls whether we use libevent on posix platforms.
+ # TODO(phoglund): should arguably be controlled by platform #ifdefs
+ # in the code instead.
+ ['OS=="win" or OS=="mac" or OS=="ios"', {
+ 'build_libevent%': 0,
+ 'enable_libevent%': 0,
+ }, {
+ 'build_libevent%': 1,
+ 'enable_libevent%': 1,
+ }],
],
},
'build_with_chromium%': '<(build_with_chromium)',
'build_with_mozilla%': '<(build_with_mozilla)',
+ 'build_libevent%': '<(build_libevent)',
+ 'enable_libevent%': '<(enable_libevent)',
'webrtc_root%': '<(webrtc_root)',
'apk_tests_path%': '<(apk_tests_path)',
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
@@ -56,8 +69,11 @@
},
'build_with_chromium%': '<(build_with_chromium)',
'build_with_mozilla%': '<(build_with_mozilla)',
+ 'build_libevent%': '<(build_libevent)',
+ 'enable_libevent%': '<(enable_libevent)',
'webrtc_root%': '<(webrtc_root)',
'apk_tests_path%': '<(apk_tests_path)',
+ 'test_runner_path': '<(DEPTH)/webrtc/build/android/test_runner.py',
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
'webrtc_vp9_dir%': '<(webrtc_vp9_dir)',
@@ -98,9 +114,9 @@
# Disable these to not build components which can be externally provided.
'build_expat%': 1,
'build_json%': 1,
- 'build_libjpeg%': 1,
'build_libsrtp%': 1,
'build_libvpx%': 1,
+ 'libvpx_build_vp9%': 1,
'build_libyuv%': 1,
'build_openmax_dl%': 1,
'build_opus%': 1,
@@ -202,13 +218,10 @@
'include_tests%': 1,
'restrict_webrtc_logging%': 0,
}],
- ['OS=="ios"', {
- 'build_libjpeg%': 0,
- }],
['target_arch=="arm" or target_arch=="arm64" or target_arch=="mipsel"', {
'prefer_fixed_point%': 1,
}],
- ['(target_arch=="arm" and (arm_neon==1 or arm_neon_optional==1)) or target_arch=="arm64"', {
+ ['(target_arch=="arm" and arm_neon==1) or target_arch=="arm64"', {
'build_with_neon%': 1,
}],
['OS!="ios" and (target_arch!="arm" or arm_version>=7) and target_arch!="mips64el"', {
@@ -312,10 +325,16 @@
'cflags': [
'-Wimplicit-fallthrough',
'-Wthread-safety',
+ '-Winconsistent-missing-override',
],
}],
],
}],
+ ['enable_libevent==1', {
+ 'defines': [
+ 'WEBRTC_BUILD_LIBEVENT',
+ ],
+ }],
['target_arch=="arm64"', {
'defines': [
'WEBRTC_ARCH_ARM64',
@@ -333,9 +352,6 @@
['arm_neon==1', {
'defines': ['WEBRTC_HAS_NEON',],
}],
- ['arm_neon==0 and arm_neon_optional==1', {
- 'defines': ['WEBRTC_DETECT_NEON',],
- }],
],
}],
],
@@ -371,6 +387,7 @@
['coverage==1 and OS=="linux"', {
'cflags': [ '-ftest-coverage',
'-fprofile-arcs' ],
+ 'ldflags': [ '--coverage' ],
'link_settings': { 'libraries': [ '-lgcov' ] },
}],
['os_posix==1', {
@@ -444,6 +461,11 @@
'WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE',
],
}],
+ ['libvpx_build_vp9==0', {
+ 'defines': [
+ 'RTC_DISABLE_VP9',
+ ],
+ }],
], # conditions
'direct_dependent_settings': {
'conditions': [
diff --git a/chromium/third_party/webrtc/build/gyp_webrtc b/chromium/third_party/webrtc/build/gyp_webrtc
index 2376cfbb85c..c0a9ed9da8e 100755..100644
--- a/chromium/third_party/webrtc/build/gyp_webrtc
+++ b/chromium/third_party/webrtc/build/gyp_webrtc
@@ -8,108 +8,10 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
-# This script is used to run GYP for WebRTC. It contains selected parts of the
-# main function from the src/build/gyp_chromium file.
+# Simple launcher script for gyp_webrtc.py.
+# TODO(kjellander): This should probably be shell script but for historical
+# reasons (all the python code used to live in this script without a
+# .py extension, and was often run as 'python gyp_webrtc') it is
+# currently still python.
-import glob
-import os
-import shlex
-import sys
-
-script_dir = os.path.dirname(os.path.realpath(__file__))
-checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir, os.pardir))
-
-sys.path.insert(0, os.path.join(checkout_root, 'build'))
-import gyp_chromium
-import gyp_helper
-import vs_toolchain
-
-sys.path.insert(0, os.path.join(checkout_root, 'tools', 'gyp', 'pylib'))
-import gyp
-
-def GetSupplementalFiles():
- """Returns a list of the supplemental files that are included in all GYP
- sources."""
- # Can't use the one in gyp_chromium since the directory location of the root
- # is different.
- return glob.glob(os.path.join(checkout_root, '*', 'supplement.gypi'))
-
-
-if __name__ == '__main__':
- args = sys.argv[1:]
-
- if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
- print 'Skipping gyp_webrtc due to GYP_CHROMIUM_NO_ACTION env var.'
- sys.exit(0)
-
- if 'SKIP_WEBRTC_GYP_ENV' not in os.environ:
- # Update the environment based on webrtc.gyp_env
- gyp_env_path = os.path.join(os.path.dirname(checkout_root),
- 'webrtc.gyp_env')
- gyp_helper.apply_gyp_environment_from_file(gyp_env_path)
-
- # This could give false positives since it doesn't actually do real option
- # parsing. Oh well.
- gyp_file_specified = False
- for arg in args:
- if arg.endswith('.gyp'):
- gyp_file_specified = True
- break
-
- # If we didn't get a file, assume 'all.gyp' in the root of the checkout.
- if not gyp_file_specified:
- # Because of a bug in gyp, simply adding the abspath to all.gyp doesn't
- # work, but chdir'ing and adding the relative path does. Spooky :/
- os.chdir(checkout_root)
- args.append('all.gyp')
-
- # There shouldn't be a circular dependency relationship between .gyp files,
- args.append('--no-circular-check')
-
- # Default to ninja unless GYP_GENERATORS is set.
- if not os.environ.get('GYP_GENERATORS'):
- os.environ['GYP_GENERATORS'] = 'ninja'
-
- # Enable check for missing sources in GYP files on Windows.
- if sys.platform.startswith('win'):
- gyp_generator_flags = os.getenv('GYP_GENERATOR_FLAGS', '')
- if not 'msvs_error_on_missing_sources' in gyp_generator_flags:
- os.environ['GYP_GENERATOR_FLAGS'] = (
- gyp_generator_flags + ' msvs_error_on_missing_sources=1')
-
- vs2013_runtime_dll_dirs = None
- if int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')):
- vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
-
- # Enforce gyp syntax checking. This adds about 20% execution time.
- args.append('--check')
-
- supplemental_includes = GetSupplementalFiles()
- gyp_vars = gyp_chromium.GetGypVars(supplemental_includes)
-
- # Automatically turn on crosscompile support for platforms that need it.
- if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
- gyp_vars.get('OS') in ['android', 'ios'],
- 'GYP_CROSSCOMPILE' not in os.environ)):
- os.environ['GYP_CROSSCOMPILE'] = '1'
-
- args.extend(['-I' + i for i in
- gyp_chromium.additional_include_files(supplemental_includes,
- args)])
-
- # Set the gyp depth variable to the root of the checkout.
- args.append('--depth=' + os.path.relpath(checkout_root))
-
- print 'Updating projects from gyp files...'
- sys.stdout.flush()
-
- # Off we go...
- gyp_rc = gyp.main(args)
-
- if vs2013_runtime_dll_dirs:
- x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
- vs_toolchain.CopyVsRuntimeDlls(
- os.path.join(checkout_root, gyp_chromium.GetOutputDirectory()),
- (x86_runtime, x64_runtime))
-
- sys.exit(gyp_rc)
+execfile(__file__ + '.py')
diff --git a/chromium/third_party/webrtc/build/gyp_webrtc.py b/chromium/third_party/webrtc/build/gyp_webrtc.py
index 87d8a57ddf0..05729489952 100644..100755
--- a/chromium/third_party/webrtc/build/gyp_webrtc.py
+++ b/chromium/third_party/webrtc/build/gyp_webrtc.py
@@ -8,17 +8,123 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
-# This file is (possibly, depending on python version) imported by
-# gyp_webrtc when GYP_PARALLEL=1 and it creates sub-processes
-# through the multiprocessing library.
-
-# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
-# imports that don't end in .py (and aren't directories with an
-# __init__.py). This wrapper makes "import gyp_webrtc" work with
-# those old versions and makes it possible to execute gyp_webrtc.py
-# directly on Windows where the extension is useful.
+# This script is used to run GYP for WebRTC. It contains selected parts of the
+# main function from the src/build/gyp_chromium.py file while other parts are
+# reused to minimize code duplication.
+import gc
+import glob
import os
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir, os.pardir))
+
+sys.path.insert(0, os.path.join(checkout_root, 'build'))
+import gyp_chromium
+import gyp_helper
+import vs_toolchain
+
+sys.path.insert(0, os.path.join(checkout_root, 'tools', 'gyp', 'pylib'))
+import gyp
+
+
+def GetSupplementalFiles():
+ """Returns a list of the supplemental files.
+
+ A supplemental file is included in all GYP sources. Such files can be used to
+ override default values.
+ """
+ # Can't use the one in gyp_chromium since the directory location of the root
+ # is different.
+ return glob.glob(os.path.join(checkout_root, '*', 'supplement.gypi'))
+
+
+def main():
+ # Disabling garbage collection saves about 5% processing time. Since this is a
+ # short-lived process it's not a problem.
+ gc.disable()
+
+ args = sys.argv[1:]
+
+ if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
+ print 'Skipping gyp_webrtc.py due to GYP_CHROMIUM_NO_ACTION env var.'
+ sys.exit(0)
+
+ if 'SKIP_WEBRTC_GYP_ENV' not in os.environ:
+ # Update the environment based on webrtc.gyp_env.
+ gyp_env_path = os.path.join(os.path.dirname(checkout_root),
+ 'webrtc.gyp_env')
+ gyp_helper.apply_gyp_environment_from_file(gyp_env_path)
+
+ # This could give false positives since it doesn't actually do real option
+ # parsing. Oh well.
+ gyp_file_specified = False
+ for arg in args:
+ if arg.endswith('.gyp'):
+ gyp_file_specified = True
+ break
+
+ # If we didn't get a file, assume 'all.gyp' in the root of the checkout.
+ if not gyp_file_specified:
+ # Because of a bug in gyp, simply adding the abspath to all.gyp doesn't
+ # work, but chdir'ing and adding the relative path does. Spooky :/
+ os.chdir(checkout_root)
+ args.append('all.gyp')
+
+ # There shouldn't be a circular dependency relationship between .gyp files,
+ args.append('--no-circular-check')
+
+ # Default to ninja unless GYP_GENERATORS is set.
+ if not os.environ.get('GYP_GENERATORS'):
+ os.environ['GYP_GENERATORS'] = 'ninja'
+
+ # Enable check for missing sources in GYP files on Windows.
+ if sys.platform.startswith('win'):
+ gyp_generator_flags = os.getenv('GYP_GENERATOR_FLAGS', '')
+ if not 'msvs_error_on_missing_sources' in gyp_generator_flags:
+ os.environ['GYP_GENERATOR_FLAGS'] = (
+ gyp_generator_flags + ' msvs_error_on_missing_sources=1')
+
+ vs2013_runtime_dll_dirs = None
+ if int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')):
+ vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+
+ # Enforce gyp syntax checking. This adds about 20% execution time.
+ args.append('--check')
+
+ supplemental_includes = GetSupplementalFiles()
+ gyp_vars = gyp_chromium.GetGypVars(supplemental_includes)
+
+ # Automatically turn on crosscompile support for platforms that need it.
+ if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
+ gyp_vars.get('OS') in ['android', 'ios'],
+ 'GYP_CROSSCOMPILE' not in os.environ)):
+ os.environ['GYP_CROSSCOMPILE'] = '1'
+
+ args.extend(['-I' + i for i in
+ gyp_chromium.additional_include_files(supplemental_includes,
+ args)])
+
+ # Set the gyp depth variable to the root of the checkout.
+ args.append('--depth=' + os.path.relpath(checkout_root))
+
+ print 'Updating projects from gyp files...'
+ sys.stdout.flush()
+
+ # Off we go...
+ gyp_rc = gyp.main(args)
+
+ if vs2013_runtime_dll_dirs:
+ # pylint: disable=unpacking-non-sequence
+ x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
+ vs_toolchain.CopyVsRuntimeDlls(
+ os.path.join(checkout_root, gyp_chromium.GetOutputDirectory()),
+ (x86_runtime, x64_runtime))
+
+ sys.exit(gyp_rc)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
-path = os.path.abspath(os.path.split(__file__)[0])
-execfile(os.path.join(path, 'gyp_webrtc'))
diff --git a/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC.xcodeproj/project.pbxproj b/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC.xcodeproj/project.pbxproj
deleted file mode 100644
index 209dbe5be89..00000000000
--- a/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC.xcodeproj/project.pbxproj
+++ /dev/null
@@ -1,910 +0,0 @@
-// !$*UTF8*$!
-{
- archiveVersion = 1;
- classes = {
- };
- objectVersion = 46;
- objects = {
-
-/* Begin PBXBuildFile section */
- 980224981CA243DE00295D57 /* libaudio_coding_module.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224561CA243DE00295D57 /* libaudio_coding_module.a */; };
- 980224991CA243DE00295D57 /* libaudio_conference_mixer.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224571CA243DE00295D57 /* libaudio_conference_mixer.a */; };
- 9802249A1CA243DE00295D57 /* libaudio_decoder_interface.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224581CA243DE00295D57 /* libaudio_decoder_interface.a */; };
- 9802249B1CA243DE00295D57 /* libaudio_device.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224591CA243DE00295D57 /* libaudio_device.a */; };
- 9802249C1CA243DE00295D57 /* libaudio_encoder_interface.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802245A1CA243DE00295D57 /* libaudio_encoder_interface.a */; };
- 9802249D1CA243DE00295D57 /* libaudio_processing.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802245B1CA243DE00295D57 /* libaudio_processing.a */; };
- 9802249E1CA243DE00295D57 /* libaudioproc_debug_proto.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802245C1CA243DE00295D57 /* libaudioproc_debug_proto.a */; };
- 9802249F1CA243DE00295D57 /* libbitrate_controller.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802245D1CA243DE00295D57 /* libbitrate_controller.a */; };
- 980224A01CA243DE00295D57 /* libboringssl.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802245E1CA243DE00295D57 /* libboringssl.a */; };
- 980224A11CA243DE00295D57 /* libcng.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802245F1CA243DE00295D57 /* libcng.a */; };
- 980224A21CA243DE00295D57 /* libcommon_audio.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224601CA243DE00295D57 /* libcommon_audio.a */; };
- 980224A31CA243DE00295D57 /* libcommon_video.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224611CA243DE00295D57 /* libcommon_video.a */; };
- 980224A41CA243DE00295D57 /* libcongestion_controller.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224621CA243DE00295D57 /* libcongestion_controller.a */; };
- 980224A51CA243DE00295D57 /* libexpat.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224631CA243DE00295D57 /* libexpat.a */; };
- 980224A61CA243DE00295D57 /* libfield_trial_default.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224641CA243DE00295D57 /* libfield_trial_default.a */; };
- 980224A71CA243DE00295D57 /* libg711.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224651CA243DE00295D57 /* libg711.a */; };
- 980224A81CA243DE00295D57 /* libg722.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224661CA243DE00295D57 /* libg722.a */; };
- 980224A91CA243DE00295D57 /* libilbc.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224671CA243DE00295D57 /* libilbc.a */; };
- 980224AA1CA243DE00295D57 /* libisac_common.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224681CA243DE00295D57 /* libisac_common.a */; };
- 980224AB1CA243DE00295D57 /* libisac.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224691CA243DE00295D57 /* libisac.a */; };
- 980224AC1CA243DE00295D57 /* libjingle_peerconnection.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802246A1CA243DE00295D57 /* libjingle_peerconnection.a */; };
- 980224AD1CA243DE00295D57 /* libjsoncpp.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802246B1CA243DE00295D57 /* libjsoncpp.a */; };
- 980224AE1CA243DE00295D57 /* libmedia_file.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802246C1CA243DE00295D57 /* libmedia_file.a */; };
- 980224AF1CA243DE00295D57 /* libmetrics_default.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802246D1CA243DE00295D57 /* libmetrics_default.a */; };
- 980224B01CA243DE00295D57 /* libneteq.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802246E1CA243DE00295D57 /* libneteq.a */; };
- 980224B11CA243DE00295D57 /* libopus.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802246F1CA243DE00295D57 /* libopus.a */; };
- 980224B21CA243DE00295D57 /* libpaced_sender.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224701CA243DE00295D57 /* libpaced_sender.a */; };
- 980224B31CA243DE00295D57 /* libpcm16b.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224711CA243DE00295D57 /* libpcm16b.a */; };
- 980224B41CA243DE00295D57 /* libprotobuf_lite.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224721CA243DE00295D57 /* libprotobuf_lite.a */; };
- 980224B51CA243DE00295D57 /* libred.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224731CA243DE00295D57 /* libred.a */; };
- 980224B61CA243DE00295D57 /* libremote_bitrate_estimator.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224741CA243DE00295D57 /* libremote_bitrate_estimator.a */; };
- 980224B71CA243DE00295D57 /* librent_a_codec.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224751CA243DE00295D57 /* librent_a_codec.a */; };
- 980224B91CA243DE00295D57 /* librtc_base_approved.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224771CA243DE00295D57 /* librtc_base_approved.a */; };
- 980224BB1CA243DE00295D57 /* librtc_base.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224791CA243DE00295D57 /* librtc_base.a */; };
- 980224BC1CA243DE00295D57 /* librtc_event_log_proto.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802247A1CA243DE00295D57 /* librtc_event_log_proto.a */; };
- 980224BD1CA243DE00295D57 /* librtc_event_log.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802247B1CA243DE00295D57 /* librtc_event_log.a */; };
- 980224BE1CA243DE00295D57 /* librtc_media.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802247C1CA243DE00295D57 /* librtc_media.a */; };
- 980224BF1CA243DE00295D57 /* librtc_p2p.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802247D1CA243DE00295D57 /* librtc_p2p.a */; };
- 980224C01CA243DE00295D57 /* librtc_pc.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802247E1CA243DE00295D57 /* librtc_pc.a */; };
- 980224C11CA243DE00295D57 /* librtc_xmllite.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802247F1CA243DE00295D57 /* librtc_xmllite.a */; };
- 980224C21CA243DE00295D57 /* librtc_xmpp.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224801CA243DE00295D57 /* librtc_xmpp.a */; };
- 980224C31CA243DE00295D57 /* librtp_rtcp.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224811CA243DE00295D57 /* librtp_rtcp.a */; };
- 980224C41CA243DE00295D57 /* libsrtp.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224821CA243DE00295D57 /* libsrtp.a */; };
- 980224C51CA243DE00295D57 /* libsystem_wrappers.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224831CA243DE00295D57 /* libsystem_wrappers.a */; };
- 980224C61CA243DE00295D57 /* libusrsctplib.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224841CA243DE00295D57 /* libusrsctplib.a */; };
- 980224C71CA243DE00295D57 /* libvideo_capture_module_internal_impl.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224851CA243DE00295D57 /* libvideo_capture_module_internal_impl.a */; };
- 980224C81CA243DE00295D57 /* libvideo_capture_module.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224861CA243DE00295D57 /* libvideo_capture_module.a */; };
- 980224C91CA243DE00295D57 /* libvideo_coding_utility.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224871CA243DE00295D57 /* libvideo_coding_utility.a */; };
- 980224CA1CA243DE00295D57 /* libvideo_processing.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224881CA243DE00295D57 /* libvideo_processing.a */; };
- 980224CB1CA243DE00295D57 /* libvideo_render_module_internal_impl.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224891CA243DE00295D57 /* libvideo_render_module_internal_impl.a */; };
- 980224CC1CA243DE00295D57 /* libvideo_render_module.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802248A1CA243DE00295D57 /* libvideo_render_module.a */; };
- 980224CD1CA243DE00295D57 /* libvoice_engine.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802248B1CA243DE00295D57 /* libvoice_engine.a */; };
- 980224CE1CA243DE00295D57 /* libvpx.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802248C1CA243DE00295D57 /* libvpx.a */; };
- 980224CF1CA243DE00295D57 /* libwebrtc_common.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802248D1CA243DE00295D57 /* libwebrtc_common.a */; };
- 980224D01CA243DE00295D57 /* libwebrtc_h264_video_toolbox.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802248E1CA243DE00295D57 /* libwebrtc_h264_video_toolbox.a */; };
- 980224D11CA243DE00295D57 /* libwebrtc_h264.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 9802248F1CA243DE00295D57 /* libwebrtc_h264.a */; };
- 980224D21CA243DE00295D57 /* libwebrtc_i420.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224901CA243DE00295D57 /* libwebrtc_i420.a */; };
- 980224D31CA243DE00295D57 /* libwebrtc_opus.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224911CA243DE00295D57 /* libwebrtc_opus.a */; };
- 980224D41CA243DE00295D57 /* libwebrtc_utility.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224921CA243DE00295D57 /* libwebrtc_utility.a */; };
- 980224D51CA243DE00295D57 /* libwebrtc_video_coding.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224931CA243DE00295D57 /* libwebrtc_video_coding.a */; };
- 980224D61CA243DE00295D57 /* libwebrtc_vp8.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224941CA243DE00295D57 /* libwebrtc_vp8.a */; };
- 980224D71CA243DE00295D57 /* libwebrtc_vp9.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224951CA243DE00295D57 /* libwebrtc_vp9.a */; };
- 980224D81CA243DE00295D57 /* libwebrtc.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224961CA243DE00295D57 /* libwebrtc.a */; };
- 980224D91CA243DE00295D57 /* libyuv.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 980224971CA243DE00295D57 /* libyuv.a */; };
- 9820AAC71C977D11001E5793 /* WebRTC.h in Headers */ = {isa = PBXBuildFile; fileRef = 9820AAC61C977D11001E5793 /* WebRTC.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 982C13291C98B78000895DC2 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 982C13281C98B78000895DC2 /* AVFoundation.framework */; };
- 982C132B1C98B79700895DC2 /* VideoToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 982C132A1C98B79700895DC2 /* VideoToolbox.framework */; };
- 982C132D1C98B7B400895DC2 /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 982C132C1C98B7B400895DC2 /* CoreMedia.framework */; };
- 982C132F1C98B7C100895DC2 /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 982C132E1C98B7C100895DC2 /* AudioToolbox.framework */; };
- 982C15181C99083B00895DC2 /* NSString+StdString.mm in Sources */ = {isa = PBXBuildFile; fileRef = 982C150F1C99083B00895DC2 /* NSString+StdString.mm */; };
- 982C15191C99083B00895DC2 /* RTCCameraPreviewView.m in Sources */ = {isa = PBXBuildFile; fileRef = 982C15101C99083B00895DC2 /* RTCCameraPreviewView.m */; };
- 982C151A1C99083B00895DC2 /* RTCDispatcher.m in Sources */ = {isa = PBXBuildFile; fileRef = 982C15111C99083B00895DC2 /* RTCDispatcher.m */; };
- 982C151B1C99083B00895DC2 /* RTCFieldTrials.mm in Sources */ = {isa = PBXBuildFile; fileRef = 982C15121C99083B00895DC2 /* RTCFieldTrials.mm */; };
- 982C151C1C99083B00895DC2 /* RTCFileLogger.mm in Sources */ = {isa = PBXBuildFile; fileRef = 982C15131C99083B00895DC2 /* RTCFileLogger.mm */; };
- 982C151D1C99083B00895DC2 /* RTCLogging.mm in Sources */ = {isa = PBXBuildFile; fileRef = 982C15141C99083B00895DC2 /* RTCLogging.mm */; };
- 982C151E1C99083B00895DC2 /* RTCSSLAdapter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 982C15151C99083B00895DC2 /* RTCSSLAdapter.mm */; };
- 982C151F1C99083B00895DC2 /* RTCTracing.mm in Sources */ = {isa = PBXBuildFile; fileRef = 982C15161C99083B00895DC2 /* RTCTracing.mm */; };
- 982C15201C99083B00895DC2 /* RTCUIApplication.mm in Sources */ = {isa = PBXBuildFile; fileRef = 982C15171C99083B00895DC2 /* RTCUIApplication.mm */; };
- 9845CE9B1CA1AE1D00BFE57C /* RTCAudioTrack.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE7E1CA1AE1D00BFE57C /* RTCAudioTrack.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CE9C1CA1AE1D00BFE57C /* RTCAVFoundationVideoSource.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE7F1CA1AE1D00BFE57C /* RTCAVFoundationVideoSource.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CE9D1CA1AE1E00BFE57C /* RTCCameraPreviewView.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE801CA1AE1D00BFE57C /* RTCCameraPreviewView.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CE9E1CA1AE1E00BFE57C /* RTCConfiguration.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE811CA1AE1D00BFE57C /* RTCConfiguration.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CE9F1CA1AE1E00BFE57C /* RTCDataChannel.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE821CA1AE1D00BFE57C /* RTCDataChannel.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA01CA1AE1E00BFE57C /* RTCDataChannelConfiguration.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE831CA1AE1D00BFE57C /* RTCDataChannelConfiguration.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA11CA1AE1E00BFE57C /* RTCDispatcher.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE841CA1AE1D00BFE57C /* RTCDispatcher.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA21CA1AE1E00BFE57C /* RTCEAGLVideoView.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE851CA1AE1D00BFE57C /* RTCEAGLVideoView.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA31CA1AE1E00BFE57C /* RTCFieldTrials.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE861CA1AE1D00BFE57C /* RTCFieldTrials.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA41CA1AE1E00BFE57C /* RTCFileLogger.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE871CA1AE1D00BFE57C /* RTCFileLogger.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA51CA1AE1E00BFE57C /* RTCIceCandidate.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE881CA1AE1D00BFE57C /* RTCIceCandidate.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA61CA1AE1E00BFE57C /* RTCIceServer.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE891CA1AE1D00BFE57C /* RTCIceServer.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA71CA1AE1E00BFE57C /* RTCLogging.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE8A1CA1AE1D00BFE57C /* RTCLogging.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA81CA1AE1E00BFE57C /* RTCMacros.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE8B1CA1AE1D00BFE57C /* RTCMacros.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEA91CA1AE1E00BFE57C /* RTCMediaConstraints.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE8C1CA1AE1D00BFE57C /* RTCMediaConstraints.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEAA1CA1AE1E00BFE57C /* RTCMediaStream.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE8D1CA1AE1D00BFE57C /* RTCMediaStream.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEAB1CA1AE1E00BFE57C /* RTCMediaStreamTrack.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE8E1CA1AE1D00BFE57C /* RTCMediaStreamTrack.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEAC1CA1AE1E00BFE57C /* RTCOpenGLVideoRenderer.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE8F1CA1AE1D00BFE57C /* RTCOpenGLVideoRenderer.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEAD1CA1AE1E00BFE57C /* RTCPeerConnection.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE901CA1AE1D00BFE57C /* RTCPeerConnection.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEAE1CA1AE1E00BFE57C /* RTCPeerConnectionFactory.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE911CA1AE1D00BFE57C /* RTCPeerConnectionFactory.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEAF1CA1AE1E00BFE57C /* RTCSessionDescription.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE921CA1AE1D00BFE57C /* RTCSessionDescription.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEB01CA1AE1E00BFE57C /* RTCSSLAdapter.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE931CA1AE1D00BFE57C /* RTCSSLAdapter.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEB11CA1AE1E00BFE57C /* RTCStatsReport.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE941CA1AE1D00BFE57C /* RTCStatsReport.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEB21CA1AE1E00BFE57C /* RTCTracing.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE951CA1AE1D00BFE57C /* RTCTracing.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEB31CA1AE1E00BFE57C /* RTCVideoFrame.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE961CA1AE1D00BFE57C /* RTCVideoFrame.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEB41CA1AE1E00BFE57C /* RTCVideoRenderer.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE971CA1AE1D00BFE57C /* RTCVideoRenderer.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEB51CA1AE1E00BFE57C /* RTCVideoRendererAdapter.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE981CA1AE1D00BFE57C /* RTCVideoRendererAdapter.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEB61CA1AE1E00BFE57C /* RTCVideoSource.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE991CA1AE1D00BFE57C /* RTCVideoSource.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 9845CEB71CA1AE1E00BFE57C /* RTCVideoTrack.h in Headers */ = {isa = PBXBuildFile; fileRef = 9845CE9A1CA1AE1D00BFE57C /* RTCVideoTrack.h */; settings = {ATTRIBUTES = (Public, ); }; };
- 986649D21C986B7D008EC831 /* avfoundationvideocapturer.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649BA1C986B7D008EC831 /* avfoundationvideocapturer.mm */; };
- 986649D31C986B7D008EC831 /* RTCAudioTrack.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649BB1C986B7D008EC831 /* RTCAudioTrack.mm */; };
- 986649D41C986B7D008EC831 /* RTCAVFoundationVideoSource.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649BC1C986B7D008EC831 /* RTCAVFoundationVideoSource.mm */; };
- 986649D51C986B7D008EC831 /* RTCConfiguration.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649BD1C986B7D008EC831 /* RTCConfiguration.mm */; };
- 986649D61C986B7D008EC831 /* RTCDataChannel.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649BE1C986B7D008EC831 /* RTCDataChannel.mm */; };
- 986649D71C986B7D008EC831 /* RTCDataChannelConfiguration.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649BF1C986B7D008EC831 /* RTCDataChannelConfiguration.mm */; };
- 986649D81C986B7D008EC831 /* RTCEAGLVideoView.m in Sources */ = {isa = PBXBuildFile; fileRef = 986649C01C986B7D008EC831 /* RTCEAGLVideoView.m */; };
- 986649D91C986B7D008EC831 /* RTCIceCandidate.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649C11C986B7D008EC831 /* RTCIceCandidate.mm */; };
- 986649DA1C986B7D008EC831 /* RTCIceServer.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649C21C986B7D008EC831 /* RTCIceServer.mm */; };
- 986649DB1C986B7D008EC831 /* RTCMediaConstraints.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649C31C986B7D008EC831 /* RTCMediaConstraints.mm */; };
- 986649DC1C986B7D008EC831 /* RTCMediaStream.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649C41C986B7D008EC831 /* RTCMediaStream.mm */; };
- 986649DD1C986B7D008EC831 /* RTCMediaStreamTrack.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649C51C986B7D008EC831 /* RTCMediaStreamTrack.mm */; };
- 986649DF1C986B7D008EC831 /* RTCOpenGLVideoRenderer.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649C71C986B7D008EC831 /* RTCOpenGLVideoRenderer.mm */; };
- 986649E01C986B7D008EC831 /* RTCPeerConnection.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649C81C986B7D008EC831 /* RTCPeerConnection.mm */; };
- 986649E11C986B7D008EC831 /* RTCPeerConnection+DataChannel.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649C91C986B7D008EC831 /* RTCPeerConnection+DataChannel.mm */; };
- 986649E21C986B7D008EC831 /* RTCPeerConnection+Stats.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649CA1C986B7D008EC831 /* RTCPeerConnection+Stats.mm */; };
- 986649E31C986B7D008EC831 /* RTCPeerConnectionFactory.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649CB1C986B7D008EC831 /* RTCPeerConnectionFactory.mm */; };
- 986649E41C986B7D008EC831 /* RTCSessionDescription.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649CC1C986B7D008EC831 /* RTCSessionDescription.mm */; };
- 986649E51C986B7D008EC831 /* RTCStatsReport.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649CD1C986B7D008EC831 /* RTCStatsReport.mm */; };
- 986649E61C986B7D008EC831 /* RTCVideoFrame.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649CE1C986B7D008EC831 /* RTCVideoFrame.mm */; };
- 986649E71C986B7D008EC831 /* RTCVideoRendererAdapter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649CF1C986B7D008EC831 /* RTCVideoRendererAdapter.mm */; };
- 986649E81C986B7D008EC831 /* RTCVideoSource.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649D01C986B7D008EC831 /* RTCVideoSource.mm */; };
- 986649E91C986B7D008EC831 /* RTCVideoTrack.mm in Sources */ = {isa = PBXBuildFile; fileRef = 986649D11C986B7D008EC831 /* RTCVideoTrack.mm */; };
-/* End PBXBuildFile section */
-
-/* Begin PBXFileReference section */
- 980224561CA243DE00295D57 /* libaudio_coding_module.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libaudio_coding_module.a; path = ../../../../../out_ios_libs/lib/libaudio_coding_module.a; sourceTree = "<group>"; };
- 980224571CA243DE00295D57 /* libaudio_conference_mixer.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libaudio_conference_mixer.a; path = ../../../../../out_ios_libs/lib/libaudio_conference_mixer.a; sourceTree = "<group>"; };
- 980224581CA243DE00295D57 /* libaudio_decoder_interface.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libaudio_decoder_interface.a; path = ../../../../../out_ios_libs/lib/libaudio_decoder_interface.a; sourceTree = "<group>"; };
- 980224591CA243DE00295D57 /* libaudio_device.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libaudio_device.a; path = ../../../../../out_ios_libs/lib/libaudio_device.a; sourceTree = "<group>"; };
- 9802245A1CA243DE00295D57 /* libaudio_encoder_interface.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libaudio_encoder_interface.a; path = ../../../../../out_ios_libs/lib/libaudio_encoder_interface.a; sourceTree = "<group>"; };
- 9802245B1CA243DE00295D57 /* libaudio_processing.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libaudio_processing.a; path = ../../../../../out_ios_libs/lib/libaudio_processing.a; sourceTree = "<group>"; };
- 9802245C1CA243DE00295D57 /* libaudioproc_debug_proto.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libaudioproc_debug_proto.a; path = ../../../../../out_ios_libs/lib/libaudioproc_debug_proto.a; sourceTree = "<group>"; };
- 9802245D1CA243DE00295D57 /* libbitrate_controller.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libbitrate_controller.a; path = ../../../../../out_ios_libs/lib/libbitrate_controller.a; sourceTree = "<group>"; };
- 9802245E1CA243DE00295D57 /* libboringssl.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libboringssl.a; path = ../../../../../out_ios_libs/lib/libboringssl.a; sourceTree = "<group>"; };
- 9802245F1CA243DE00295D57 /* libcng.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libcng.a; path = ../../../../../out_ios_libs/lib/libcng.a; sourceTree = "<group>"; };
- 980224601CA243DE00295D57 /* libcommon_audio.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libcommon_audio.a; path = ../../../../../out_ios_libs/lib/libcommon_audio.a; sourceTree = "<group>"; };
- 980224611CA243DE00295D57 /* libcommon_video.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libcommon_video.a; path = ../../../../../out_ios_libs/lib/libcommon_video.a; sourceTree = "<group>"; };
- 980224621CA243DE00295D57 /* libcongestion_controller.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libcongestion_controller.a; path = ../../../../../out_ios_libs/lib/libcongestion_controller.a; sourceTree = "<group>"; };
- 980224631CA243DE00295D57 /* libexpat.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libexpat.a; path = ../../../../../out_ios_libs/lib/libexpat.a; sourceTree = "<group>"; };
- 980224641CA243DE00295D57 /* libfield_trial_default.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libfield_trial_default.a; path = ../../../../../out_ios_libs/lib/libfield_trial_default.a; sourceTree = "<group>"; };
- 980224651CA243DE00295D57 /* libg711.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libg711.a; path = ../../../../../out_ios_libs/lib/libg711.a; sourceTree = "<group>"; };
- 980224661CA243DE00295D57 /* libg722.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libg722.a; path = ../../../../../out_ios_libs/lib/libg722.a; sourceTree = "<group>"; };
- 980224671CA243DE00295D57 /* libilbc.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libilbc.a; path = ../../../../../out_ios_libs/lib/libilbc.a; sourceTree = "<group>"; };
- 980224681CA243DE00295D57 /* libisac_common.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libisac_common.a; path = ../../../../../out_ios_libs/lib/libisac_common.a; sourceTree = "<group>"; };
- 980224691CA243DE00295D57 /* libisac.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libisac.a; path = ../../../../../out_ios_libs/lib/libisac.a; sourceTree = "<group>"; };
- 9802246A1CA243DE00295D57 /* libjingle_peerconnection.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libjingle_peerconnection.a; path = ../../../../../out_ios_libs/lib/libjingle_peerconnection.a; sourceTree = "<group>"; };
- 9802246B1CA243DE00295D57 /* libjsoncpp.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libjsoncpp.a; path = ../../../../../out_ios_libs/lib/libjsoncpp.a; sourceTree = "<group>"; };
- 9802246C1CA243DE00295D57 /* libmedia_file.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libmedia_file.a; path = ../../../../../out_ios_libs/lib/libmedia_file.a; sourceTree = "<group>"; };
- 9802246D1CA243DE00295D57 /* libmetrics_default.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libmetrics_default.a; path = ../../../../../out_ios_libs/lib/libmetrics_default.a; sourceTree = "<group>"; };
- 9802246E1CA243DE00295D57 /* libneteq.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libneteq.a; path = ../../../../../out_ios_libs/lib/libneteq.a; sourceTree = "<group>"; };
- 9802246F1CA243DE00295D57 /* libopus.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libopus.a; path = ../../../../../out_ios_libs/lib/libopus.a; sourceTree = "<group>"; };
- 980224701CA243DE00295D57 /* libpaced_sender.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libpaced_sender.a; path = ../../../../../out_ios_libs/lib/libpaced_sender.a; sourceTree = "<group>"; };
- 980224711CA243DE00295D57 /* libpcm16b.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libpcm16b.a; path = ../../../../../out_ios_libs/lib/libpcm16b.a; sourceTree = "<group>"; };
- 980224721CA243DE00295D57 /* libprotobuf_lite.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libprotobuf_lite.a; path = ../../../../../out_ios_libs/lib/libprotobuf_lite.a; sourceTree = "<group>"; };
- 980224731CA243DE00295D57 /* libred.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libred.a; path = ../../../../../out_ios_libs/lib/libred.a; sourceTree = "<group>"; };
- 980224741CA243DE00295D57 /* libremote_bitrate_estimator.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libremote_bitrate_estimator.a; path = ../../../../../out_ios_libs/lib/libremote_bitrate_estimator.a; sourceTree = "<group>"; };
- 980224751CA243DE00295D57 /* librent_a_codec.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librent_a_codec.a; path = ../../../../../out_ios_libs/lib/librent_a_codec.a; sourceTree = "<group>"; };
- 980224771CA243DE00295D57 /* librtc_base_approved.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_base_approved.a; path = ../../../../../out_ios_libs/lib/librtc_base_approved.a; sourceTree = "<group>"; };
- 980224791CA243DE00295D57 /* librtc_base.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_base.a; path = ../../../../../out_ios_libs/lib/librtc_base.a; sourceTree = "<group>"; };
- 9802247A1CA243DE00295D57 /* librtc_event_log_proto.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_event_log_proto.a; path = ../../../../../out_ios_libs/lib/librtc_event_log_proto.a; sourceTree = "<group>"; };
- 9802247B1CA243DE00295D57 /* librtc_event_log.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_event_log.a; path = ../../../../../out_ios_libs/lib/librtc_event_log.a; sourceTree = "<group>"; };
- 9802247C1CA243DE00295D57 /* librtc_media.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_media.a; path = ../../../../../out_ios_libs/lib/librtc_media.a; sourceTree = "<group>"; };
- 9802247D1CA243DE00295D57 /* librtc_p2p.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_p2p.a; path = ../../../../../out_ios_libs/lib/librtc_p2p.a; sourceTree = "<group>"; };
- 9802247E1CA243DE00295D57 /* librtc_pc.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_pc.a; path = ../../../../../out_ios_libs/lib/librtc_pc.a; sourceTree = "<group>"; };
- 9802247F1CA243DE00295D57 /* librtc_xmllite.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_xmllite.a; path = ../../../../../out_ios_libs/lib/librtc_xmllite.a; sourceTree = "<group>"; };
- 980224801CA243DE00295D57 /* librtc_xmpp.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtc_xmpp.a; path = ../../../../../out_ios_libs/lib/librtc_xmpp.a; sourceTree = "<group>"; };
- 980224811CA243DE00295D57 /* librtp_rtcp.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = librtp_rtcp.a; path = ../../../../../out_ios_libs/lib/librtp_rtcp.a; sourceTree = "<group>"; };
- 980224821CA243DE00295D57 /* libsrtp.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libsrtp.a; path = ../../../../../out_ios_libs/lib/libsrtp.a; sourceTree = "<group>"; };
- 980224831CA243DE00295D57 /* libsystem_wrappers.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libsystem_wrappers.a; path = ../../../../../out_ios_libs/lib/libsystem_wrappers.a; sourceTree = "<group>"; };
- 980224841CA243DE00295D57 /* libusrsctplib.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libusrsctplib.a; path = ../../../../../out_ios_libs/lib/libusrsctplib.a; sourceTree = "<group>"; };
- 980224851CA243DE00295D57 /* libvideo_capture_module_internal_impl.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libvideo_capture_module_internal_impl.a; path = ../../../../../out_ios_libs/lib/libvideo_capture_module_internal_impl.a; sourceTree = "<group>"; };
- 980224861CA243DE00295D57 /* libvideo_capture_module.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libvideo_capture_module.a; path = ../../../../../out_ios_libs/lib/libvideo_capture_module.a; sourceTree = "<group>"; };
- 980224871CA243DE00295D57 /* libvideo_coding_utility.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libvideo_coding_utility.a; path = ../../../../../out_ios_libs/lib/libvideo_coding_utility.a; sourceTree = "<group>"; };
- 980224881CA243DE00295D57 /* libvideo_processing.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libvideo_processing.a; path = ../../../../../out_ios_libs/lib/libvideo_processing.a; sourceTree = "<group>"; };
- 980224891CA243DE00295D57 /* libvideo_render_module_internal_impl.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libvideo_render_module_internal_impl.a; path = ../../../../../out_ios_libs/lib/libvideo_render_module_internal_impl.a; sourceTree = "<group>"; };
- 9802248A1CA243DE00295D57 /* libvideo_render_module.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libvideo_render_module.a; path = ../../../../../out_ios_libs/lib/libvideo_render_module.a; sourceTree = "<group>"; };
- 9802248B1CA243DE00295D57 /* libvoice_engine.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libvoice_engine.a; path = ../../../../../out_ios_libs/lib/libvoice_engine.a; sourceTree = "<group>"; };
- 9802248C1CA243DE00295D57 /* libvpx.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libvpx.a; path = ../../../../../out_ios_libs/lib/libvpx.a; sourceTree = "<group>"; };
- 9802248D1CA243DE00295D57 /* libwebrtc_common.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_common.a; path = ../../../../../out_ios_libs/lib/libwebrtc_common.a; sourceTree = "<group>"; };
- 9802248E1CA243DE00295D57 /* libwebrtc_h264_video_toolbox.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_h264_video_toolbox.a; path = ../../../../../out_ios_libs/lib/libwebrtc_h264_video_toolbox.a; sourceTree = "<group>"; };
- 9802248F1CA243DE00295D57 /* libwebrtc_h264.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_h264.a; path = ../../../../../out_ios_libs/lib/libwebrtc_h264.a; sourceTree = "<group>"; };
- 980224901CA243DE00295D57 /* libwebrtc_i420.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_i420.a; path = ../../../../../out_ios_libs/lib/libwebrtc_i420.a; sourceTree = "<group>"; };
- 980224911CA243DE00295D57 /* libwebrtc_opus.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_opus.a; path = ../../../../../out_ios_libs/lib/libwebrtc_opus.a; sourceTree = "<group>"; };
- 980224921CA243DE00295D57 /* libwebrtc_utility.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_utility.a; path = ../../../../../out_ios_libs/lib/libwebrtc_utility.a; sourceTree = "<group>"; };
- 980224931CA243DE00295D57 /* libwebrtc_video_coding.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_video_coding.a; path = ../../../../../out_ios_libs/lib/libwebrtc_video_coding.a; sourceTree = "<group>"; };
- 980224941CA243DE00295D57 /* libwebrtc_vp8.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_vp8.a; path = ../../../../../out_ios_libs/lib/libwebrtc_vp8.a; sourceTree = "<group>"; };
- 980224951CA243DE00295D57 /* libwebrtc_vp9.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc_vp9.a; path = ../../../../../out_ios_libs/lib/libwebrtc_vp9.a; sourceTree = "<group>"; };
- 980224961CA243DE00295D57 /* libwebrtc.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libwebrtc.a; path = ../../../../../out_ios_libs/lib/libwebrtc.a; sourceTree = "<group>"; };
- 980224971CA243DE00295D57 /* libyuv.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libyuv.a; path = ../../../../../out_ios_libs/lib/libyuv.a; sourceTree = "<group>"; };
- 9820AAC31C977D11001E5793 /* WebRTC.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = WebRTC.framework; sourceTree = BUILT_PRODUCTS_DIR; };
- 9820AAC61C977D11001E5793 /* WebRTC.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WebRTC.h; sourceTree = "<group>"; };
- 9820AAC81C977D11001E5793 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
- 982C13281C98B78000895DC2 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
- 982C132A1C98B79700895DC2 /* VideoToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = VideoToolbox.framework; path = System/Library/Frameworks/VideoToolbox.framework; sourceTree = SDKROOT; };
- 982C132C1C98B7B400895DC2 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
- 982C132E1C98B7C100895DC2 /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = System/Library/Frameworks/AudioToolbox.framework; sourceTree = SDKROOT; };
- 982C150F1C99083B00895DC2 /* NSString+StdString.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = "NSString+StdString.mm"; path = "../../../../base/objc/NSString+StdString.mm"; sourceTree = "<group>"; };
- 982C15101C99083B00895DC2 /* RTCCameraPreviewView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RTCCameraPreviewView.m; path = ../../../../base/objc/RTCCameraPreviewView.m; sourceTree = "<group>"; };
- 982C15111C99083B00895DC2 /* RTCDispatcher.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RTCDispatcher.m; path = ../../../../base/objc/RTCDispatcher.m; sourceTree = "<group>"; };
- 982C15121C99083B00895DC2 /* RTCFieldTrials.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCFieldTrials.mm; path = ../../../../base/objc/RTCFieldTrials.mm; sourceTree = "<group>"; };
- 982C15131C99083B00895DC2 /* RTCFileLogger.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCFileLogger.mm; path = ../../../../base/objc/RTCFileLogger.mm; sourceTree = "<group>"; };
- 982C15141C99083B00895DC2 /* RTCLogging.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCLogging.mm; path = ../../../../base/objc/RTCLogging.mm; sourceTree = "<group>"; };
- 982C15151C99083B00895DC2 /* RTCSSLAdapter.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCSSLAdapter.mm; path = ../../../../base/objc/RTCSSLAdapter.mm; sourceTree = "<group>"; };
- 982C15161C99083B00895DC2 /* RTCTracing.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCTracing.mm; path = ../../../../base/objc/RTCTracing.mm; sourceTree = "<group>"; };
- 982C15171C99083B00895DC2 /* RTCUIApplication.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCUIApplication.mm; path = ../../../../base/objc/RTCUIApplication.mm; sourceTree = "<group>"; };
- 9845CE7E1CA1AE1D00BFE57C /* RTCAudioTrack.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCAudioTrack.h; sourceTree = "<group>"; };
- 9845CE7F1CA1AE1D00BFE57C /* RTCAVFoundationVideoSource.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCAVFoundationVideoSource.h; sourceTree = "<group>"; };
- 9845CE801CA1AE1D00BFE57C /* RTCCameraPreviewView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCCameraPreviewView.h; sourceTree = "<group>"; };
- 9845CE811CA1AE1D00BFE57C /* RTCConfiguration.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCConfiguration.h; sourceTree = "<group>"; };
- 9845CE821CA1AE1D00BFE57C /* RTCDataChannel.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCDataChannel.h; sourceTree = "<group>"; };
- 9845CE831CA1AE1D00BFE57C /* RTCDataChannelConfiguration.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCDataChannelConfiguration.h; sourceTree = "<group>"; };
- 9845CE841CA1AE1D00BFE57C /* RTCDispatcher.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCDispatcher.h; sourceTree = "<group>"; };
- 9845CE851CA1AE1D00BFE57C /* RTCEAGLVideoView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCEAGLVideoView.h; sourceTree = "<group>"; };
- 9845CE861CA1AE1D00BFE57C /* RTCFieldTrials.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCFieldTrials.h; sourceTree = "<group>"; };
- 9845CE871CA1AE1D00BFE57C /* RTCFileLogger.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCFileLogger.h; sourceTree = "<group>"; };
- 9845CE881CA1AE1D00BFE57C /* RTCIceCandidate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCIceCandidate.h; sourceTree = "<group>"; };
- 9845CE891CA1AE1D00BFE57C /* RTCIceServer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCIceServer.h; sourceTree = "<group>"; };
- 9845CE8A1CA1AE1D00BFE57C /* RTCLogging.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCLogging.h; sourceTree = "<group>"; };
- 9845CE8B1CA1AE1D00BFE57C /* RTCMacros.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCMacros.h; sourceTree = "<group>"; };
- 9845CE8C1CA1AE1D00BFE57C /* RTCMediaConstraints.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCMediaConstraints.h; sourceTree = "<group>"; };
- 9845CE8D1CA1AE1D00BFE57C /* RTCMediaStream.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCMediaStream.h; sourceTree = "<group>"; };
- 9845CE8E1CA1AE1D00BFE57C /* RTCMediaStreamTrack.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCMediaStreamTrack.h; sourceTree = "<group>"; };
- 9845CE8F1CA1AE1D00BFE57C /* RTCOpenGLVideoRenderer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCOpenGLVideoRenderer.h; sourceTree = "<group>"; };
- 9845CE901CA1AE1D00BFE57C /* RTCPeerConnection.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCPeerConnection.h; sourceTree = "<group>"; };
- 9845CE911CA1AE1D00BFE57C /* RTCPeerConnectionFactory.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCPeerConnectionFactory.h; sourceTree = "<group>"; };
- 9845CE921CA1AE1D00BFE57C /* RTCSessionDescription.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCSessionDescription.h; sourceTree = "<group>"; };
- 9845CE931CA1AE1D00BFE57C /* RTCSSLAdapter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCSSLAdapter.h; sourceTree = "<group>"; };
- 9845CE941CA1AE1D00BFE57C /* RTCStatsReport.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCStatsReport.h; sourceTree = "<group>"; };
- 9845CE951CA1AE1D00BFE57C /* RTCTracing.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCTracing.h; sourceTree = "<group>"; };
- 9845CE961CA1AE1D00BFE57C /* RTCVideoFrame.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCVideoFrame.h; sourceTree = "<group>"; };
- 9845CE971CA1AE1D00BFE57C /* RTCVideoRenderer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCVideoRenderer.h; sourceTree = "<group>"; };
- 9845CE981CA1AE1D00BFE57C /* RTCVideoRendererAdapter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCVideoRendererAdapter.h; sourceTree = "<group>"; };
- 9845CE991CA1AE1D00BFE57C /* RTCVideoSource.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCVideoSource.h; sourceTree = "<group>"; };
- 9845CE9A1CA1AE1D00BFE57C /* RTCVideoTrack.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCVideoTrack.h; sourceTree = "<group>"; };
- 986649BA1C986B7D008EC831 /* avfoundationvideocapturer.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = avfoundationvideocapturer.mm; path = ../../../../api/objc/avfoundationvideocapturer.mm; sourceTree = "<group>"; };
- 986649BB1C986B7D008EC831 /* RTCAudioTrack.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCAudioTrack.mm; path = ../../../../api/objc/RTCAudioTrack.mm; sourceTree = "<group>"; };
- 986649BC1C986B7D008EC831 /* RTCAVFoundationVideoSource.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCAVFoundationVideoSource.mm; path = ../../../../api/objc/RTCAVFoundationVideoSource.mm; sourceTree = "<group>"; };
- 986649BD1C986B7D008EC831 /* RTCConfiguration.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCConfiguration.mm; path = ../../../../api/objc/RTCConfiguration.mm; sourceTree = "<group>"; };
- 986649BE1C986B7D008EC831 /* RTCDataChannel.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCDataChannel.mm; path = ../../../../api/objc/RTCDataChannel.mm; sourceTree = "<group>"; };
- 986649BF1C986B7D008EC831 /* RTCDataChannelConfiguration.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCDataChannelConfiguration.mm; path = ../../../../api/objc/RTCDataChannelConfiguration.mm; sourceTree = "<group>"; };
- 986649C01C986B7D008EC831 /* RTCEAGLVideoView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RTCEAGLVideoView.m; path = ../../../../api/objc/RTCEAGLVideoView.m; sourceTree = "<group>"; };
- 986649C11C986B7D008EC831 /* RTCIceCandidate.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCIceCandidate.mm; path = ../../../../api/objc/RTCIceCandidate.mm; sourceTree = "<group>"; };
- 986649C21C986B7D008EC831 /* RTCIceServer.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCIceServer.mm; path = ../../../../api/objc/RTCIceServer.mm; sourceTree = "<group>"; };
- 986649C31C986B7D008EC831 /* RTCMediaConstraints.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCMediaConstraints.mm; path = ../../../../api/objc/RTCMediaConstraints.mm; sourceTree = "<group>"; };
- 986649C41C986B7D008EC831 /* RTCMediaStream.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCMediaStream.mm; path = ../../../../api/objc/RTCMediaStream.mm; sourceTree = "<group>"; };
- 986649C51C986B7D008EC831 /* RTCMediaStreamTrack.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCMediaStreamTrack.mm; path = ../../../../api/objc/RTCMediaStreamTrack.mm; sourceTree = "<group>"; };
- 986649C71C986B7D008EC831 /* RTCOpenGLVideoRenderer.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCOpenGLVideoRenderer.mm; path = ../../../../api/objc/RTCOpenGLVideoRenderer.mm; sourceTree = "<group>"; };
- 986649C81C986B7D008EC831 /* RTCPeerConnection.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCPeerConnection.mm; path = ../../../../api/objc/RTCPeerConnection.mm; sourceTree = "<group>"; };
- 986649C91C986B7D008EC831 /* RTCPeerConnection+DataChannel.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = "RTCPeerConnection+DataChannel.mm"; path = "../../../../api/objc/RTCPeerConnection+DataChannel.mm"; sourceTree = "<group>"; };
- 986649CA1C986B7D008EC831 /* RTCPeerConnection+Stats.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = "RTCPeerConnection+Stats.mm"; path = "../../../../api/objc/RTCPeerConnection+Stats.mm"; sourceTree = "<group>"; };
- 986649CB1C986B7D008EC831 /* RTCPeerConnectionFactory.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCPeerConnectionFactory.mm; path = ../../../../api/objc/RTCPeerConnectionFactory.mm; sourceTree = "<group>"; };
- 986649CC1C986B7D008EC831 /* RTCSessionDescription.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCSessionDescription.mm; path = ../../../../api/objc/RTCSessionDescription.mm; sourceTree = "<group>"; };
- 986649CD1C986B7D008EC831 /* RTCStatsReport.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCStatsReport.mm; path = ../../../../api/objc/RTCStatsReport.mm; sourceTree = "<group>"; };
- 986649CE1C986B7D008EC831 /* RTCVideoFrame.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCVideoFrame.mm; path = ../../../../api/objc/RTCVideoFrame.mm; sourceTree = "<group>"; };
- 986649CF1C986B7D008EC831 /* RTCVideoRendererAdapter.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCVideoRendererAdapter.mm; path = ../../../../api/objc/RTCVideoRendererAdapter.mm; sourceTree = "<group>"; };
- 986649D01C986B7D008EC831 /* RTCVideoSource.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCVideoSource.mm; path = ../../../../api/objc/RTCVideoSource.mm; sourceTree = "<group>"; };
- 986649D11C986B7D008EC831 /* RTCVideoTrack.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = RTCVideoTrack.mm; path = ../../../../api/objc/RTCVideoTrack.mm; sourceTree = "<group>"; };
-/* End PBXFileReference section */
-
-/* Begin PBXFrameworksBuildPhase section */
- 9820AABF1C977D11001E5793 /* Frameworks */ = {
- isa = PBXFrameworksBuildPhase;
- buildActionMask = 2147483647;
- files = (
- 9802249B1CA243DE00295D57 /* libaudio_device.a in Frameworks */,
- 980224BE1CA243DE00295D57 /* librtc_media.a in Frameworks */,
- 980224A61CA243DE00295D57 /* libfield_trial_default.a in Frameworks */,
- 980224D31CA243DE00295D57 /* libwebrtc_opus.a in Frameworks */,
- 980224C11CA243DE00295D57 /* librtc_xmllite.a in Frameworks */,
- 980224C51CA243DE00295D57 /* libsystem_wrappers.a in Frameworks */,
- 980224D41CA243DE00295D57 /* libwebrtc_utility.a in Frameworks */,
- 980224A81CA243DE00295D57 /* libg722.a in Frameworks */,
- 9802249D1CA243DE00295D57 /* libaudio_processing.a in Frameworks */,
- 980224D61CA243DE00295D57 /* libwebrtc_vp8.a in Frameworks */,
- 980224AE1CA243DE00295D57 /* libmedia_file.a in Frameworks */,
- 980224B91CA243DE00295D57 /* librtc_base_approved.a in Frameworks */,
- 980224C71CA243DE00295D57 /* libvideo_capture_module_internal_impl.a in Frameworks */,
- 9802249C1CA243DE00295D57 /* libaudio_encoder_interface.a in Frameworks */,
- 980224CF1CA243DE00295D57 /* libwebrtc_common.a in Frameworks */,
- 9802249A1CA243DE00295D57 /* libaudio_decoder_interface.a in Frameworks */,
- 980224BC1CA243DE00295D57 /* librtc_event_log_proto.a in Frameworks */,
- 980224AC1CA243DE00295D57 /* libjingle_peerconnection.a in Frameworks */,
- 980224C41CA243DE00295D57 /* libsrtp.a in Frameworks */,
- 982C132F1C98B7C100895DC2 /* AudioToolbox.framework in Frameworks */,
- 980224981CA243DE00295D57 /* libaudio_coding_module.a in Frameworks */,
- 980224A31CA243DE00295D57 /* libcommon_video.a in Frameworks */,
- 982C132D1C98B7B400895DC2 /* CoreMedia.framework in Frameworks */,
- 980224A01CA243DE00295D57 /* libboringssl.a in Frameworks */,
- 980224D81CA243DE00295D57 /* libwebrtc.a in Frameworks */,
- 980224C91CA243DE00295D57 /* libvideo_coding_utility.a in Frameworks */,
- 980224D01CA243DE00295D57 /* libwebrtc_h264_video_toolbox.a in Frameworks */,
- 980224BF1CA243DE00295D57 /* librtc_p2p.a in Frameworks */,
- 980224D21CA243DE00295D57 /* libwebrtc_i420.a in Frameworks */,
- 980224B21CA243DE00295D57 /* libpaced_sender.a in Frameworks */,
- 980224AF1CA243DE00295D57 /* libmetrics_default.a in Frameworks */,
- 982C132B1C98B79700895DC2 /* VideoToolbox.framework in Frameworks */,
- 980224AD1CA243DE00295D57 /* libjsoncpp.a in Frameworks */,
- 980224A71CA243DE00295D57 /* libg711.a in Frameworks */,
- 980224AA1CA243DE00295D57 /* libisac_common.a in Frameworks */,
- 980224D11CA243DE00295D57 /* libwebrtc_h264.a in Frameworks */,
- 980224CB1CA243DE00295D57 /* libvideo_render_module_internal_impl.a in Frameworks */,
- 980224A91CA243DE00295D57 /* libilbc.a in Frameworks */,
- 980224D51CA243DE00295D57 /* libwebrtc_video_coding.a in Frameworks */,
- 980224BD1CA243DE00295D57 /* librtc_event_log.a in Frameworks */,
- 980224A41CA243DE00295D57 /* libcongestion_controller.a in Frameworks */,
- 980224B01CA243DE00295D57 /* libneteq.a in Frameworks */,
- 9802249E1CA243DE00295D57 /* libaudioproc_debug_proto.a in Frameworks */,
- 980224A11CA243DE00295D57 /* libcng.a in Frameworks */,
- 980224991CA243DE00295D57 /* libaudio_conference_mixer.a in Frameworks */,
- 980224B61CA243DE00295D57 /* libremote_bitrate_estimator.a in Frameworks */,
- 980224C31CA243DE00295D57 /* librtp_rtcp.a in Frameworks */,
- 980224CC1CA243DE00295D57 /* libvideo_render_module.a in Frameworks */,
- 980224C81CA243DE00295D57 /* libvideo_capture_module.a in Frameworks */,
- 982C13291C98B78000895DC2 /* AVFoundation.framework in Frameworks */,
- 980224D71CA243DE00295D57 /* libwebrtc_vp9.a in Frameworks */,
- 980224C21CA243DE00295D57 /* librtc_xmpp.a in Frameworks */,
- 980224B31CA243DE00295D57 /* libpcm16b.a in Frameworks */,
- 980224CE1CA243DE00295D57 /* libvpx.a in Frameworks */,
- 980224BB1CA243DE00295D57 /* librtc_base.a in Frameworks */,
- 9802249F1CA243DE00295D57 /* libbitrate_controller.a in Frameworks */,
- 980224B11CA243DE00295D57 /* libopus.a in Frameworks */,
- 980224CD1CA243DE00295D57 /* libvoice_engine.a in Frameworks */,
- 980224A51CA243DE00295D57 /* libexpat.a in Frameworks */,
- 980224A21CA243DE00295D57 /* libcommon_audio.a in Frameworks */,
- 980224D91CA243DE00295D57 /* libyuv.a in Frameworks */,
- 980224B41CA243DE00295D57 /* libprotobuf_lite.a in Frameworks */,
- 980224B71CA243DE00295D57 /* librent_a_codec.a in Frameworks */,
- 980224CA1CA243DE00295D57 /* libvideo_processing.a in Frameworks */,
- 980224C01CA243DE00295D57 /* librtc_pc.a in Frameworks */,
- 980224AB1CA243DE00295D57 /* libisac.a in Frameworks */,
- 980224B51CA243DE00295D57 /* libred.a in Frameworks */,
- 980224C61CA243DE00295D57 /* libusrsctplib.a in Frameworks */,
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
-/* End PBXFrameworksBuildPhase section */
-
-/* Begin PBXGroup section */
- 9820AAB91C977D11001E5793 = {
- isa = PBXGroup;
- children = (
- 9845CE7D1CA1AE1D00BFE57C /* include */,
- 986649A51C986B19008EC831 /* API */,
- 982C150E1C99080E00895DC2 /* Base */,
- 982C13301C98BAAE00895DC2 /* Frameworks */,
- 9866491E1C985706008EC831 /* Static Libraries */,
- 9820AAC51C977D11001E5793 /* WebRTC */,
- 9820AAC41C977D11001E5793 /* Products */,
- );
- sourceTree = "<group>";
- };
- 9820AAC41C977D11001E5793 /* Products */ = {
- isa = PBXGroup;
- children = (
- 9820AAC31C977D11001E5793 /* WebRTC.framework */,
- );
- name = Products;
- sourceTree = "<group>";
- };
- 9820AAC51C977D11001E5793 /* WebRTC */ = {
- isa = PBXGroup;
- children = (
- 9820AAC61C977D11001E5793 /* WebRTC.h */,
- 9820AAC81C977D11001E5793 /* Info.plist */,
- );
- path = WebRTC;
- sourceTree = "<group>";
- };
- 982C13301C98BAAE00895DC2 /* Frameworks */ = {
- isa = PBXGroup;
- children = (
- 982C132E1C98B7C100895DC2 /* AudioToolbox.framework */,
- 982C132C1C98B7B400895DC2 /* CoreMedia.framework */,
- 982C132A1C98B79700895DC2 /* VideoToolbox.framework */,
- 982C13281C98B78000895DC2 /* AVFoundation.framework */,
- );
- name = Frameworks;
- sourceTree = "<group>";
- };
- 982C150E1C99080E00895DC2 /* Base */ = {
- isa = PBXGroup;
- children = (
- 982C150F1C99083B00895DC2 /* NSString+StdString.mm */,
- 982C15101C99083B00895DC2 /* RTCCameraPreviewView.m */,
- 982C15111C99083B00895DC2 /* RTCDispatcher.m */,
- 982C15121C99083B00895DC2 /* RTCFieldTrials.mm */,
- 982C15131C99083B00895DC2 /* RTCFileLogger.mm */,
- 982C15141C99083B00895DC2 /* RTCLogging.mm */,
- 982C15151C99083B00895DC2 /* RTCSSLAdapter.mm */,
- 982C15161C99083B00895DC2 /* RTCTracing.mm */,
- 982C15171C99083B00895DC2 /* RTCUIApplication.mm */,
- );
- name = Base;
- sourceTree = "<group>";
- };
- 9845CE7D1CA1AE1D00BFE57C /* include */ = {
- isa = PBXGroup;
- children = (
- 9845CE7E1CA1AE1D00BFE57C /* RTCAudioTrack.h */,
- 9845CE7F1CA1AE1D00BFE57C /* RTCAVFoundationVideoSource.h */,
- 9845CE801CA1AE1D00BFE57C /* RTCCameraPreviewView.h */,
- 9845CE811CA1AE1D00BFE57C /* RTCConfiguration.h */,
- 9845CE821CA1AE1D00BFE57C /* RTCDataChannel.h */,
- 9845CE831CA1AE1D00BFE57C /* RTCDataChannelConfiguration.h */,
- 9845CE841CA1AE1D00BFE57C /* RTCDispatcher.h */,
- 9845CE851CA1AE1D00BFE57C /* RTCEAGLVideoView.h */,
- 9845CE861CA1AE1D00BFE57C /* RTCFieldTrials.h */,
- 9845CE871CA1AE1D00BFE57C /* RTCFileLogger.h */,
- 9845CE881CA1AE1D00BFE57C /* RTCIceCandidate.h */,
- 9845CE891CA1AE1D00BFE57C /* RTCIceServer.h */,
- 9845CE8A1CA1AE1D00BFE57C /* RTCLogging.h */,
- 9845CE8B1CA1AE1D00BFE57C /* RTCMacros.h */,
- 9845CE8C1CA1AE1D00BFE57C /* RTCMediaConstraints.h */,
- 9845CE8D1CA1AE1D00BFE57C /* RTCMediaStream.h */,
- 9845CE8E1CA1AE1D00BFE57C /* RTCMediaStreamTrack.h */,
- 9845CE8F1CA1AE1D00BFE57C /* RTCOpenGLVideoRenderer.h */,
- 9845CE901CA1AE1D00BFE57C /* RTCPeerConnection.h */,
- 9845CE911CA1AE1D00BFE57C /* RTCPeerConnectionFactory.h */,
- 9845CE921CA1AE1D00BFE57C /* RTCSessionDescription.h */,
- 9845CE931CA1AE1D00BFE57C /* RTCSSLAdapter.h */,
- 9845CE941CA1AE1D00BFE57C /* RTCStatsReport.h */,
- 9845CE951CA1AE1D00BFE57C /* RTCTracing.h */,
- 9845CE961CA1AE1D00BFE57C /* RTCVideoFrame.h */,
- 9845CE971CA1AE1D00BFE57C /* RTCVideoRenderer.h */,
- 9845CE981CA1AE1D00BFE57C /* RTCVideoRendererAdapter.h */,
- 9845CE991CA1AE1D00BFE57C /* RTCVideoSource.h */,
- 9845CE9A1CA1AE1D00BFE57C /* RTCVideoTrack.h */,
- );
- name = include;
- path = ../../../../../out_ios_framework/include;
- sourceTree = "<group>";
- };
- 9866491E1C985706008EC831 /* Static Libraries */ = {
- isa = PBXGroup;
- children = (
- 980224561CA243DE00295D57 /* libaudio_coding_module.a */,
- 980224571CA243DE00295D57 /* libaudio_conference_mixer.a */,
- 980224581CA243DE00295D57 /* libaudio_decoder_interface.a */,
- 980224591CA243DE00295D57 /* libaudio_device.a */,
- 9802245A1CA243DE00295D57 /* libaudio_encoder_interface.a */,
- 9802245B1CA243DE00295D57 /* libaudio_processing.a */,
- 9802245C1CA243DE00295D57 /* libaudioproc_debug_proto.a */,
- 9802245D1CA243DE00295D57 /* libbitrate_controller.a */,
- 9802245E1CA243DE00295D57 /* libboringssl.a */,
- 9802245F1CA243DE00295D57 /* libcng.a */,
- 980224601CA243DE00295D57 /* libcommon_audio.a */,
- 980224611CA243DE00295D57 /* libcommon_video.a */,
- 980224621CA243DE00295D57 /* libcongestion_controller.a */,
- 980224631CA243DE00295D57 /* libexpat.a */,
- 980224641CA243DE00295D57 /* libfield_trial_default.a */,
- 980224651CA243DE00295D57 /* libg711.a */,
- 980224661CA243DE00295D57 /* libg722.a */,
- 980224671CA243DE00295D57 /* libilbc.a */,
- 980224681CA243DE00295D57 /* libisac_common.a */,
- 980224691CA243DE00295D57 /* libisac.a */,
- 9802246A1CA243DE00295D57 /* libjingle_peerconnection.a */,
- 9802246B1CA243DE00295D57 /* libjsoncpp.a */,
- 9802246C1CA243DE00295D57 /* libmedia_file.a */,
- 9802246D1CA243DE00295D57 /* libmetrics_default.a */,
- 9802246E1CA243DE00295D57 /* libneteq.a */,
- 9802246F1CA243DE00295D57 /* libopus.a */,
- 980224701CA243DE00295D57 /* libpaced_sender.a */,
- 980224711CA243DE00295D57 /* libpcm16b.a */,
- 980224721CA243DE00295D57 /* libprotobuf_lite.a */,
- 980224731CA243DE00295D57 /* libred.a */,
- 980224741CA243DE00295D57 /* libremote_bitrate_estimator.a */,
- 980224751CA243DE00295D57 /* librent_a_codec.a */,
- 980224771CA243DE00295D57 /* librtc_base_approved.a */,
- 980224791CA243DE00295D57 /* librtc_base.a */,
- 9802247A1CA243DE00295D57 /* librtc_event_log_proto.a */,
- 9802247B1CA243DE00295D57 /* librtc_event_log.a */,
- 9802247C1CA243DE00295D57 /* librtc_media.a */,
- 9802247D1CA243DE00295D57 /* librtc_p2p.a */,
- 9802247E1CA243DE00295D57 /* librtc_pc.a */,
- 9802247F1CA243DE00295D57 /* librtc_xmllite.a */,
- 980224801CA243DE00295D57 /* librtc_xmpp.a */,
- 980224811CA243DE00295D57 /* librtp_rtcp.a */,
- 980224821CA243DE00295D57 /* libsrtp.a */,
- 980224831CA243DE00295D57 /* libsystem_wrappers.a */,
- 980224841CA243DE00295D57 /* libusrsctplib.a */,
- 980224851CA243DE00295D57 /* libvideo_capture_module_internal_impl.a */,
- 980224861CA243DE00295D57 /* libvideo_capture_module.a */,
- 980224871CA243DE00295D57 /* libvideo_coding_utility.a */,
- 980224881CA243DE00295D57 /* libvideo_processing.a */,
- 980224891CA243DE00295D57 /* libvideo_render_module_internal_impl.a */,
- 9802248A1CA243DE00295D57 /* libvideo_render_module.a */,
- 9802248B1CA243DE00295D57 /* libvoice_engine.a */,
- 9802248C1CA243DE00295D57 /* libvpx.a */,
- 9802248D1CA243DE00295D57 /* libwebrtc_common.a */,
- 9802248E1CA243DE00295D57 /* libwebrtc_h264_video_toolbox.a */,
- 9802248F1CA243DE00295D57 /* libwebrtc_h264.a */,
- 980224901CA243DE00295D57 /* libwebrtc_i420.a */,
- 980224911CA243DE00295D57 /* libwebrtc_opus.a */,
- 980224921CA243DE00295D57 /* libwebrtc_utility.a */,
- 980224931CA243DE00295D57 /* libwebrtc_video_coding.a */,
- 980224941CA243DE00295D57 /* libwebrtc_vp8.a */,
- 980224951CA243DE00295D57 /* libwebrtc_vp9.a */,
- 980224961CA243DE00295D57 /* libwebrtc.a */,
- 980224971CA243DE00295D57 /* libyuv.a */,
- );
- name = "Static Libraries";
- sourceTree = "<group>";
- };
- 986649A51C986B19008EC831 /* API */ = {
- isa = PBXGroup;
- children = (
- 986649BA1C986B7D008EC831 /* avfoundationvideocapturer.mm */,
- 986649BB1C986B7D008EC831 /* RTCAudioTrack.mm */,
- 986649BC1C986B7D008EC831 /* RTCAVFoundationVideoSource.mm */,
- 986649BD1C986B7D008EC831 /* RTCConfiguration.mm */,
- 986649BE1C986B7D008EC831 /* RTCDataChannel.mm */,
- 986649BF1C986B7D008EC831 /* RTCDataChannelConfiguration.mm */,
- 986649C01C986B7D008EC831 /* RTCEAGLVideoView.m */,
- 986649C11C986B7D008EC831 /* RTCIceCandidate.mm */,
- 986649C21C986B7D008EC831 /* RTCIceServer.mm */,
- 986649C31C986B7D008EC831 /* RTCMediaConstraints.mm */,
- 986649C41C986B7D008EC831 /* RTCMediaStream.mm */,
- 986649C51C986B7D008EC831 /* RTCMediaStreamTrack.mm */,
- 986649C71C986B7D008EC831 /* RTCOpenGLVideoRenderer.mm */,
- 986649C81C986B7D008EC831 /* RTCPeerConnection.mm */,
- 986649C91C986B7D008EC831 /* RTCPeerConnection+DataChannel.mm */,
- 986649CA1C986B7D008EC831 /* RTCPeerConnection+Stats.mm */,
- 986649CB1C986B7D008EC831 /* RTCPeerConnectionFactory.mm */,
- 986649CC1C986B7D008EC831 /* RTCSessionDescription.mm */,
- 986649CD1C986B7D008EC831 /* RTCStatsReport.mm */,
- 986649CE1C986B7D008EC831 /* RTCVideoFrame.mm */,
- 986649CF1C986B7D008EC831 /* RTCVideoRendererAdapter.mm */,
- 986649D01C986B7D008EC831 /* RTCVideoSource.mm */,
- 986649D11C986B7D008EC831 /* RTCVideoTrack.mm */,
- );
- name = API;
- sourceTree = "<group>";
- };
-/* End PBXGroup section */
-
-/* Begin PBXHeadersBuildPhase section */
- 9820AAC01C977D11001E5793 /* Headers */ = {
- isa = PBXHeadersBuildPhase;
- buildActionMask = 2147483647;
- files = (
- 9820AAC71C977D11001E5793 /* WebRTC.h in Headers */,
- 9845CEB41CA1AE1E00BFE57C /* RTCVideoRenderer.h in Headers */,
- 9845CE9E1CA1AE1E00BFE57C /* RTCConfiguration.h in Headers */,
- 9845CE9F1CA1AE1E00BFE57C /* RTCDataChannel.h in Headers */,
- 9845CEA41CA1AE1E00BFE57C /* RTCFileLogger.h in Headers */,
- 9845CEA71CA1AE1E00BFE57C /* RTCLogging.h in Headers */,
- 9845CEB71CA1AE1E00BFE57C /* RTCVideoTrack.h in Headers */,
- 9845CE9B1CA1AE1D00BFE57C /* RTCAudioTrack.h in Headers */,
- 9845CEB21CA1AE1E00BFE57C /* RTCTracing.h in Headers */,
- 9845CEA01CA1AE1E00BFE57C /* RTCDataChannelConfiguration.h in Headers */,
- 9845CEAB1CA1AE1E00BFE57C /* RTCMediaStreamTrack.h in Headers */,
- 9845CEAA1CA1AE1E00BFE57C /* RTCMediaStream.h in Headers */,
- 9845CEA51CA1AE1E00BFE57C /* RTCIceCandidate.h in Headers */,
- 9845CEB11CA1AE1E00BFE57C /* RTCStatsReport.h in Headers */,
- 9845CEB31CA1AE1E00BFE57C /* RTCVideoFrame.h in Headers */,
- 9845CEA21CA1AE1E00BFE57C /* RTCEAGLVideoView.h in Headers */,
- 9845CEA31CA1AE1E00BFE57C /* RTCFieldTrials.h in Headers */,
- 9845CEB61CA1AE1E00BFE57C /* RTCVideoSource.h in Headers */,
- 9845CEA11CA1AE1E00BFE57C /* RTCDispatcher.h in Headers */,
- 9845CEA81CA1AE1E00BFE57C /* RTCMacros.h in Headers */,
- 9845CEAD1CA1AE1E00BFE57C /* RTCPeerConnection.h in Headers */,
- 9845CEB51CA1AE1E00BFE57C /* RTCVideoRendererAdapter.h in Headers */,
- 9845CEA91CA1AE1E00BFE57C /* RTCMediaConstraints.h in Headers */,
- 9845CEAF1CA1AE1E00BFE57C /* RTCSessionDescription.h in Headers */,
- 9845CEA61CA1AE1E00BFE57C /* RTCIceServer.h in Headers */,
- 9845CE9C1CA1AE1D00BFE57C /* RTCAVFoundationVideoSource.h in Headers */,
- 9845CEAE1CA1AE1E00BFE57C /* RTCPeerConnectionFactory.h in Headers */,
- 9845CEB01CA1AE1E00BFE57C /* RTCSSLAdapter.h in Headers */,
- 9845CEAC1CA1AE1E00BFE57C /* RTCOpenGLVideoRenderer.h in Headers */,
- 9845CE9D1CA1AE1E00BFE57C /* RTCCameraPreviewView.h in Headers */,
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
-/* End PBXHeadersBuildPhase section */
-
-/* Begin PBXNativeTarget section */
- 9820AAC21C977D11001E5793 /* WebRTC */ = {
- isa = PBXNativeTarget;
- buildConfigurationList = 9820AACB1C977D11001E5793 /* Build configuration list for PBXNativeTarget "WebRTC" */;
- buildPhases = (
- 9820AABE1C977D11001E5793 /* Sources */,
- 9820AABF1C977D11001E5793 /* Frameworks */,
- 9820AAC01C977D11001E5793 /* Headers */,
- 9820AAC11C977D11001E5793 /* Resources */,
- );
- buildRules = (
- );
- dependencies = (
- );
- name = WebRTC;
- productName = WebRTC;
- productReference = 9820AAC31C977D11001E5793 /* WebRTC.framework */;
- productType = "com.apple.product-type.framework";
- };
-/* End PBXNativeTarget section */
-
-/* Begin PBXProject section */
- 9820AABA1C977D11001E5793 /* Project object */ = {
- isa = PBXProject;
- attributes = {
- LastUpgradeCheck = 0720;
- ORGANIZATIONNAME = "Google Inc.";
- TargetAttributes = {
- 9820AAC21C977D11001E5793 = {
- CreatedOnToolsVersion = 7.2.1;
- };
- };
- };
- buildConfigurationList = 9820AABD1C977D11001E5793 /* Build configuration list for PBXProject "WebRTC" */;
- compatibilityVersion = "Xcode 3.2";
- developmentRegion = English;
- hasScannedForEncodings = 0;
- knownRegions = (
- en,
- );
- mainGroup = 9820AAB91C977D11001E5793;
- productRefGroup = 9820AAC41C977D11001E5793 /* Products */;
- projectDirPath = "";
- projectRoot = "";
- targets = (
- 9820AAC21C977D11001E5793 /* WebRTC */,
- );
- };
-/* End PBXProject section */
-
-/* Begin PBXResourcesBuildPhase section */
- 9820AAC11C977D11001E5793 /* Resources */ = {
- isa = PBXResourcesBuildPhase;
- buildActionMask = 2147483647;
- files = (
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
-/* End PBXResourcesBuildPhase section */
-
-/* Begin PBXSourcesBuildPhase section */
- 9820AABE1C977D11001E5793 /* Sources */ = {
- isa = PBXSourcesBuildPhase;
- buildActionMask = 2147483647;
- files = (
- 986649E01C986B7D008EC831 /* RTCPeerConnection.mm in Sources */,
- 986649D81C986B7D008EC831 /* RTCEAGLVideoView.m in Sources */,
- 982C151E1C99083B00895DC2 /* RTCSSLAdapter.mm in Sources */,
- 982C15181C99083B00895DC2 /* NSString+StdString.mm in Sources */,
- 986649E11C986B7D008EC831 /* RTCPeerConnection+DataChannel.mm in Sources */,
- 986649DA1C986B7D008EC831 /* RTCIceServer.mm in Sources */,
- 982C15191C99083B00895DC2 /* RTCCameraPreviewView.m in Sources */,
- 986649E51C986B7D008EC831 /* RTCStatsReport.mm in Sources */,
- 986649D21C986B7D008EC831 /* avfoundationvideocapturer.mm in Sources */,
- 986649E31C986B7D008EC831 /* RTCPeerConnectionFactory.mm in Sources */,
- 982C151B1C99083B00895DC2 /* RTCFieldTrials.mm in Sources */,
- 982C151F1C99083B00895DC2 /* RTCTracing.mm in Sources */,
- 986649E61C986B7D008EC831 /* RTCVideoFrame.mm in Sources */,
- 986649E81C986B7D008EC831 /* RTCVideoSource.mm in Sources */,
- 986649DF1C986B7D008EC831 /* RTCOpenGLVideoRenderer.mm in Sources */,
- 986649DC1C986B7D008EC831 /* RTCMediaStream.mm in Sources */,
- 982C151D1C99083B00895DC2 /* RTCLogging.mm in Sources */,
- 986649D51C986B7D008EC831 /* RTCConfiguration.mm in Sources */,
- 986649E91C986B7D008EC831 /* RTCVideoTrack.mm in Sources */,
- 986649DD1C986B7D008EC831 /* RTCMediaStreamTrack.mm in Sources */,
- 982C151C1C99083B00895DC2 /* RTCFileLogger.mm in Sources */,
- 986649D91C986B7D008EC831 /* RTCIceCandidate.mm in Sources */,
- 982C15201C99083B00895DC2 /* RTCUIApplication.mm in Sources */,
- 986649DB1C986B7D008EC831 /* RTCMediaConstraints.mm in Sources */,
- 986649D71C986B7D008EC831 /* RTCDataChannelConfiguration.mm in Sources */,
- 986649D31C986B7D008EC831 /* RTCAudioTrack.mm in Sources */,
- 986649E41C986B7D008EC831 /* RTCSessionDescription.mm in Sources */,
- 986649D41C986B7D008EC831 /* RTCAVFoundationVideoSource.mm in Sources */,
- 986649E21C986B7D008EC831 /* RTCPeerConnection+Stats.mm in Sources */,
- 986649E71C986B7D008EC831 /* RTCVideoRendererAdapter.mm in Sources */,
- 982C151A1C99083B00895DC2 /* RTCDispatcher.m in Sources */,
- 986649D61C986B7D008EC831 /* RTCDataChannel.mm in Sources */,
- );
- runOnlyForDeploymentPostprocessing = 0;
- };
-/* End PBXSourcesBuildPhase section */
-
-/* Begin XCBuildConfiguration section */
- 9820AAC91C977D11001E5793 /* Debug */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- ALWAYS_SEARCH_USER_PATHS = NO;
- CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
- CLANG_CXX_LIBRARY = "libc++";
- CLANG_ENABLE_MODULES = YES;
- CLANG_ENABLE_OBJC_ARC = YES;
- CLANG_WARN_BOOL_CONVERSION = YES;
- CLANG_WARN_CONSTANT_CONVERSION = YES;
- CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
- CLANG_WARN_EMPTY_BODY = YES;
- CLANG_WARN_ENUM_CONVERSION = YES;
- CLANG_WARN_INT_CONVERSION = YES;
- CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
- CLANG_WARN_UNREACHABLE_CODE = YES;
- CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
- "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
- COPY_PHASE_STRIP = NO;
- CURRENT_PROJECT_VERSION = 1;
- DEBUG_INFORMATION_FORMAT = dwarf;
- DEPLOYMENT_POSTPROCESSING = YES;
- ENABLE_STRICT_OBJC_MSGSEND = YES;
- ENABLE_TESTABILITY = YES;
- GCC_C_LANGUAGE_STANDARD = gnu99;
- GCC_DYNAMIC_NO_PIC = NO;
- GCC_NO_COMMON_BLOCKS = YES;
- GCC_OPTIMIZATION_LEVEL = 0;
- GCC_PREPROCESSOR_DEFINITIONS = (
- "DEBUG=1",
- "$(inherited)",
- );
- GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
- GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
- GCC_WARN_UNDECLARED_SELECTOR = YES;
- GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
- GCC_WARN_UNUSED_FUNCTION = YES;
- GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 9.2;
- MTL_ENABLE_DEBUG_INFO = YES;
- ONLY_ACTIVE_ARCH = YES;
- SDKROOT = iphoneos;
- TARGETED_DEVICE_FAMILY = "1,2";
- VERSIONING_SYSTEM = "apple-generic";
- VERSION_INFO_PREFIX = "";
- };
- name = Debug;
- };
- 9820AACA1C977D11001E5793 /* Release */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- ALWAYS_SEARCH_USER_PATHS = NO;
- CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
- CLANG_CXX_LIBRARY = "libc++";
- CLANG_ENABLE_MODULES = YES;
- CLANG_ENABLE_OBJC_ARC = YES;
- CLANG_WARN_BOOL_CONVERSION = YES;
- CLANG_WARN_CONSTANT_CONVERSION = YES;
- CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
- CLANG_WARN_EMPTY_BODY = YES;
- CLANG_WARN_ENUM_CONVERSION = YES;
- CLANG_WARN_INT_CONVERSION = YES;
- CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
- CLANG_WARN_UNREACHABLE_CODE = YES;
- CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
- "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
- COPY_PHASE_STRIP = NO;
- CURRENT_PROJECT_VERSION = 1;
- DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
- DEPLOYMENT_POSTPROCESSING = YES;
- ENABLE_NS_ASSERTIONS = NO;
- ENABLE_STRICT_OBJC_MSGSEND = YES;
- GCC_C_LANGUAGE_STANDARD = gnu99;
- GCC_NO_COMMON_BLOCKS = YES;
- GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
- GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
- GCC_WARN_UNDECLARED_SELECTOR = YES;
- GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
- GCC_WARN_UNUSED_FUNCTION = YES;
- GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 9.2;
- MTL_ENABLE_DEBUG_INFO = NO;
- SDKROOT = iphoneos;
- TARGETED_DEVICE_FAMILY = "1,2";
- VALIDATE_PRODUCT = YES;
- VERSIONING_SYSTEM = "apple-generic";
- VERSION_INFO_PREFIX = "";
- };
- name = Release;
- };
- 9820AACC1C977D11001E5793 /* Debug */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- ALTERNATE_GROUP = "";
- ALTERNATE_OWNER = "";
- DEFINES_MODULE = YES;
- DYLIB_COMPATIBILITY_VERSION = 1;
- DYLIB_CURRENT_VERSION = 1;
- DYLIB_INSTALL_NAME_BASE = "@rpath";
- ENABLE_BITCODE = NO;
- GCC_ENABLE_CPP_RTTI = NO;
- GCC_PREPROCESSOR_DEFINITIONS = (
- "DEBUG=1",
- "$(inherited)",
- V8_DEPRECATION_WARNINGS,
- "CLD_VERSION=2",
- DISABLE_NACL,
- CHROMIUM_BUILD,
- "CR_CLANG_REVISION=263324-1",
- "USE_LIBJPEG_TURBO=1",
- FIELDTRIAL_TESTING_ENABLED,
- "DISABLE_FTP_SUPPORT=1",
- V8_USE_EXTERNAL_STARTUP_DATA,
- "GTEST_HAS_POSIX_RE=0",
- "GTEST_LANG_CXX11=0",
- WEBRTC_MAC,
- WEBRTC_IOS,
- WEBRTC_POSIX,
- PROTOBUF_USE_DLLS,
- GOOGLE_PROTOBUF_NO_RTTI,
- GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER,
- "USE_LIBPCI=1",
- "USE_OPENSSL=1",
- NDEBUG,
- NVALGRIND,
- "DYNAMIC_ANNOTATIONS_ENABLED=0",
- "NS_BLOCK_ASSERTIONS=1",
- );
- HEADER_SEARCH_PATHS = "$(SRCROOT)/../../../../..";
- INFOPLIST_FILE = WebRTC/Info.plist;
- INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
- LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
- LIBRARY_SEARCH_PATHS = "$(SRCROOT)/../../../../../out_ios_libs/lib";
- OTHER_LDFLAGS = "-ObjC";
- PRODUCT_BUNDLE_IDENTIFIER = com.google.WebRTC;
- PRODUCT_NAME = "$(TARGET_NAME)";
- SKIP_INSTALL = YES;
- SYMROOT = $SRCROOT/build;
- };
- name = Debug;
- };
- 9820AACD1C977D11001E5793 /* Release */ = {
- isa = XCBuildConfiguration;
- buildSettings = {
- ALTERNATE_GROUP = "";
- ALTERNATE_OWNER = "";
- DEFINES_MODULE = YES;
- DYLIB_COMPATIBILITY_VERSION = 1;
- DYLIB_CURRENT_VERSION = 1;
- DYLIB_INSTALL_NAME_BASE = "@rpath";
- ENABLE_BITCODE = NO;
- GCC_ENABLE_CPP_RTTI = NO;
- GCC_PREPROCESSOR_DEFINITIONS = (
- V8_DEPRECATION_WARNINGS,
- "CLD_VERSION=2",
- DISABLE_NACL,
- CHROMIUM_BUILD,
- "CR_CLANG_REVISION=263324-1",
- "USE_LIBJPEG_TURBO=1",
- FIELDTRIAL_TESTING_ENABLED,
- "DISABLE_FTP_SUPPORT=1",
- V8_USE_EXTERNAL_STARTUP_DATA,
- "GTEST_HAS_POSIX_RE=0",
- "GTEST_LANG_CXX11=0",
- WEBRTC_MAC,
- WEBRTC_IOS,
- WEBRTC_POSIX,
- PROTOBUF_USE_DLLS,
- GOOGLE_PROTOBUF_NO_RTTI,
- GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER,
- "USE_LIBPCI=1",
- "USE_OPENSSL=1",
- NDEBUG,
- NVALGRIND,
- "DYNAMIC_ANNOTATIONS_ENABLED=0",
- "NS_BLOCK_ASSERTIONS=1",
- );
- HEADER_SEARCH_PATHS = "$(SRCROOT)/../../../../..";
- INFOPLIST_FILE = WebRTC/Info.plist;
- INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
- LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
- LIBRARY_SEARCH_PATHS = "$(SRCROOT)/../../../../../out_ios_libs/lib";
- OTHER_LDFLAGS = "-ObjC";
- PRODUCT_BUNDLE_IDENTIFIER = com.google.WebRTC;
- PRODUCT_NAME = "$(TARGET_NAME)";
- SKIP_INSTALL = YES;
- SYMROOT = $SRCROOT/build;
- };
- name = Release;
- };
-/* End XCBuildConfiguration section */
-
-/* Begin XCConfigurationList section */
- 9820AABD1C977D11001E5793 /* Build configuration list for PBXProject "WebRTC" */ = {
- isa = XCConfigurationList;
- buildConfigurations = (
- 9820AAC91C977D11001E5793 /* Debug */,
- 9820AACA1C977D11001E5793 /* Release */,
- );
- defaultConfigurationIsVisible = 0;
- defaultConfigurationName = Release;
- };
- 9820AACB1C977D11001E5793 /* Build configuration list for PBXNativeTarget "WebRTC" */ = {
- isa = XCConfigurationList;
- buildConfigurations = (
- 9820AACC1C977D11001E5793 /* Debug */,
- 9820AACD1C977D11001E5793 /* Release */,
- );
- defaultConfigurationIsVisible = 0;
- defaultConfigurationName = Release;
- };
-/* End XCConfigurationList section */
- };
- rootObject = 9820AABA1C977D11001E5793 /* Project object */;
-}
diff --git a/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC.xcodeproj/xcshareddata/xcschemes/WebRTC.xcscheme b/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC.xcodeproj/xcshareddata/xcschemes/WebRTC.xcscheme
deleted file mode 100644
index 2447c448df3..00000000000
--- a/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC.xcodeproj/xcshareddata/xcschemes/WebRTC.xcscheme
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Scheme
- LastUpgradeVersion = "0720"
- version = "1.3">
- <BuildAction
- parallelizeBuildables = "YES"
- buildImplicitDependencies = "YES">
- <BuildActionEntries>
- <BuildActionEntry
- buildForTesting = "YES"
- buildForRunning = "YES"
- buildForProfiling = "YES"
- buildForArchiving = "YES"
- buildForAnalyzing = "YES">
- <BuildableReference
- BuildableIdentifier = "primary"
- BlueprintIdentifier = "9820AAC21C977D11001E5793"
- BuildableName = "WebRTC.framework"
- BlueprintName = "WebRTC"
- ReferencedContainer = "container:WebRTC.xcodeproj">
- </BuildableReference>
- </BuildActionEntry>
- </BuildActionEntries>
- </BuildAction>
- <TestAction
- buildConfiguration = "Debug"
- selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
- selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
- shouldUseLaunchSchemeArgsEnv = "YES">
- <Testables>
- </Testables>
- <AdditionalOptions>
- </AdditionalOptions>
- </TestAction>
- <LaunchAction
- buildConfiguration = "Release"
- selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
- selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
- launchStyle = "0"
- useCustomWorkingDirectory = "NO"
- ignoresPersistentStateOnLaunch = "NO"
- debugDocumentVersioning = "YES"
- debugServiceExtension = "internal"
- allowLocationSimulation = "YES">
- <MacroExpansion>
- <BuildableReference
- BuildableIdentifier = "primary"
- BlueprintIdentifier = "9820AAC21C977D11001E5793"
- BuildableName = "WebRTC.framework"
- BlueprintName = "WebRTC"
- ReferencedContainer = "container:WebRTC.xcodeproj">
- </BuildableReference>
- </MacroExpansion>
- <AdditionalOptions>
- </AdditionalOptions>
- </LaunchAction>
- <ProfileAction
- buildConfiguration = "Release"
- shouldUseLaunchSchemeArgsEnv = "YES"
- savedToolIdentifier = ""
- useCustomWorkingDirectory = "NO"
- debugDocumentVersioning = "YES">
- <MacroExpansion>
- <BuildableReference
- BuildableIdentifier = "primary"
- BlueprintIdentifier = "9820AAC21C977D11001E5793"
- BuildableName = "WebRTC.framework"
- BlueprintName = "WebRTC"
- ReferencedContainer = "container:WebRTC.xcodeproj">
- </BuildableReference>
- </MacroExpansion>
- </ProfileAction>
- <AnalyzeAction
- buildConfiguration = "Debug">
- </AnalyzeAction>
- <ArchiveAction
- buildConfiguration = "Release"
- revealArchiveInOrganizer = "YES">
- </ArchiveAction>
-</Scheme>
diff --git a/chromium/third_party/webrtc/build/ios/SDK/PodTest/Podfile b/chromium/third_party/webrtc/build/ios/SDK/PodTest/Podfile
index 730afcfa185..fec22d917fa 100644
--- a/chromium/third_party/webrtc/build/ios/SDK/PodTest/Podfile
+++ b/chromium/third_party/webrtc/build/ios/SDK/PodTest/Podfile
@@ -4,6 +4,6 @@
# use_frameworks!
target 'PodTest' do
-pod "WebRTC", :path => "../../../../../out_ios_framework"
+pod "WebRTC", :path => "../../../../../out_ios_libs"
end
diff --git a/chromium/third_party/webrtc/build/ios/SDK/README b/chromium/third_party/webrtc/build/ios/SDK/README
deleted file mode 100644
index c234617bcad..00000000000
--- a/chromium/third_party/webrtc/build/ios/SDK/README
+++ /dev/null
@@ -1,50 +0,0 @@
-We want to generate a dynamic framework for a CocoaPod. Unfortunately, using gyp-xcode generation
-for a framework currently presents some issues. To get around those issues, we chose to create a
-project with Xcode for building the framework directly.
-
-The Framework directory contains the Xcode project used to build the framework. The PodTest
-directory contains a project used to confirm that the built framework installs correctly with
-CocoaPods. The build_ios_framework.sh script needs to be run first, then `pod install` should be run
-in the same directory as the Xcode project. After installing, the PodTest.xcworkspace should be used
-instead of PodTest.xcodeproj.
-
-USAGE
-To build the framework, run the build_ios_framework.sh script. This will build static libraries for
-iOS (via the build_ios_libs.sh script), collect header files and adjust import/include statements
-for use inside the framework, build the framework using the Xcode project, merge multiple
-architectures together, and collect the framework files (the .framework itself and the .dSYM) with
-the Podspec into a common directory.
-
-DETAILS OF THE XCODE PROJECT
-The Xcode project contains relative references to the built static libraries (from
-build_ios_libs.sh), with the exception of librtc_api_objc and librtc_base_objc. The iOS source files
-from webrtc/api/objc and webrtc/base/objc are also included. NOTE: This will require updating
-if/when the built static libraries change.
-
-The flattened header files for webrtc/api/objc and webrtc/base/objc were also added to the Public
-Headers of the framework target. NOTE: This will require updating as the Obj-C API changes.
-
-Preprocessor definitions were copied from a gyp-xcode generated project.
-
-RTTI was disabled due to compiler errors and based on the setting in build/common.gypi
-(https://code.google.com/p/chromium/codesearch#chromium/src/build/common.gypi&q=rtti&sq=package:chromium&type=cs&l=5069).
-
-Bitcode is disabled for the time being.
-
-The minimum number of system frameworks were linked against based on build errors (currently
-AVFoundation, AudioToolbox, CoreMedia, VideoToolbox).
-
-The Build Products Path (SYMROOT) was changed to $SRCROOT/build so the build products are in a known
-location.
-
-The created WebRTC scheme was shared so the build_ios_framework.sh script will work on any machine
-that runs it.
-
-DEPLOYMENT_POSTPROCESSING is set to "Yes" so debug symbols will be stripped (the iOS Default for
-STRIP_INSTALLED_PRODUCT is already set to "Yes").
-
-"-ObjC" is added to OTHER_LDFLAGS to ensure category methods (in particular, those from
-RTCAudioSession+Configuration.mm) are included in the framework binary.
-
-During the build process, dSYMs will be generated for each architecture alongside each framework
-bundle. These are merged together and placed alongside the final framework product.
diff --git a/chromium/third_party/webrtc/build/ios/build_ios_framework.sh b/chromium/third_party/webrtc/build/ios/build_ios_framework.sh
deleted file mode 100755
index 35ec552798c..00000000000
--- a/chromium/third_party/webrtc/build/ios/build_ios_framework.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/bash
-
-# Copyright 2015 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-# Generates dynamic FAT framework for iOS in out_ios_framework.
-
-# Check for Darwin.
-if [[ ! $(uname) = "Darwin" ]]; then
- echo "OS X required." >&2
-fi
-
-# Check for iOS library build script.
-SCRIPT_DIR=$(dirname $0)
-WEBRTC_BASE_DIR=${SCRIPT_DIR}/../../..
-BUILD_WEBRTC_SCRIPT=${WEBRTC_BASE_DIR}/webrtc/build/ios/build_ios_libs.sh
-if [[ ! -x ${BUILD_WEBRTC_SCRIPT} ]]; then
- echo "Failed to find iOS library build script." >&2
- exit 1
-fi
-# Check for flatten iOS headers script.
-FLATTEN_HEADERS_SCRIPT=${WEBRTC_BASE_DIR}/webrtc/build/ios/flatten_ios_headers
-if [[ ! -x ${FLATTEN_HEADERS_SCRIPT} ]]; then
- echo "Failed to find flatten iOS headers script." >&2
- exit 1
-fi
-
-pushd ${WEBRTC_BASE_DIR}
-LIB_BASE_DIR=out_ios_libs
-FRAMEWORK_BASE_DIR=out_ios_framework
-
-# Build static libraries for iOS.
-${BUILD_WEBRTC_SCRIPT}
-if [ $? -ne 0 ]; then
- echo "Failed to build iOS static libraries." >&2
- exit 1
-fi
-
-# Flatten the directory structure for iOS headers.
-${FLATTEN_HEADERS_SCRIPT} ${LIB_BASE_DIR} ${FRAMEWORK_BASE_DIR}
-if [ $? -ne 0 ]; then
- echo "Failed to flatten iOS headers." >&2
- exit 1
-fi
-
-# Replace full paths for headers with framework paths.
-SED_PATTERN='
- s/(\#import )\"webrtc\/api\/objc\/(.*)\"/\1<WebRTC\/\2>/g;
- s/(\#import )\"webrtc\/base\/objc\/(.*)\"/\1<WebRTC\/\2>/g;
- s/(\#include )\"webrtc\/base\/objc\/(.*)\"/\1<WebRTC\/\2>/g;
-'
-sed -E -i '' "$SED_PATTERN" ${FRAMEWORK_BASE_DIR}/include/*.h
-
-SDK_DIR=webrtc/build/ios/SDK
-PROJECT_DIR=${SDK_DIR}/Framework
-# Build the framework.
-pushd ${PROJECT_DIR}
-xcodebuild -project WebRTC.xcodeproj -scheme WebRTC -configuration Release \
- build CODE_SIGN_IDENTITY="" CODE_SIGNING_REQUIRED=NO
-xcodebuild -project WebRTC.xcodeproj -scheme WebRTC -configuration Release \
- build -destination 'platform=iOS Simulator,name=iPhone 6'
-popd
-
-# Copy podspec, framework, dSYM and LICENSE to FRAMEWORK_BASE_DIR
-DEVICE_BUILD_DIR=${PROJECT_DIR}/build/Release-iphoneos
-cp ${SDK_DIR}/WebRTC.podspec ${FRAMEWORK_BASE_DIR}/
-cp -R ${DEVICE_BUILD_DIR}/WebRTC.framework ${FRAMEWORK_BASE_DIR}/
-cp -R ${DEVICE_BUILD_DIR}/WebRTC.framework.dSYM ${FRAMEWORK_BASE_DIR}/
-cp -R webrtc/LICENSE ${FRAMEWORK_BASE_DIR}/
-
-# Combine multiple architectures
-SIMULATOR_BUILD_DIR=${PROJECT_DIR}/build/Release-iphonesimulator
-DYLIB_PATH=WebRTC.framework/WebRTC
-DWARF_PATH=WebRTC.framework.dSYM/Contents/Resources/DWARF/WebRTC
-lipo ${FRAMEWORK_BASE_DIR}/${DYLIB_PATH} ${SIMULATOR_BUILD_DIR}/${DYLIB_PATH} \
- -create -output ${FRAMEWORK_BASE_DIR}/${DYLIB_PATH}
-lipo ${FRAMEWORK_BASE_DIR}/${DWARF_PATH} ${SIMULATOR_BUILD_DIR}/${DWARF_PATH} \
- -create -output ${FRAMEWORK_BASE_DIR}/${DWARF_PATH}
-
-popd
diff --git a/chromium/third_party/webrtc/build/ios/build_ios_libs.sh b/chromium/third_party/webrtc/build/ios/build_ios_libs.sh
index 5d6157aab2f..772fc6824e4 100755
--- a/chromium/third_party/webrtc/build/ios/build_ios_libs.sh
+++ b/chromium/third_party/webrtc/build/ios/build_ios_libs.sh
@@ -8,79 +8,262 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
-# Generates static FAT libraries for ios in out_ios_libs.
+# Generates static or dynamic FAT libraries for ios in out_ios_libs.
-# Flag to build the new or legacy version of the API.
-USE_LEGACY_API=0
+# Exit on errors.
+set -e
-# Check for Darwin.
-if [[ ! $(uname) = "Darwin" ]]; then
- echo "OS/X required." >&2
-fi
+# Globals.
+SCRIPT_DIR=$(cd $(dirname $0) && pwd)
+WEBRTC_BASE_DIR=${SCRIPT_DIR}/../../..
+GYP_WEBRTC_SCRIPT=${WEBRTC_BASE_DIR}/webrtc/build/gyp_webrtc.py
+MERGE_SCRIPT=${SCRIPT_DIR}/merge_ios_libs.py
+LICENSE_SCRIPT=${SCRIPT_DIR}/generate_licenses.py
-# Check for libtool.
-if [[ -z $(which libtool) ]]; then
- echo "Missing libtool binary." >&2
-fi
+function check_preconditions {
+ # Check for Darwin.
+ if [[ ! $(uname) = "Darwin" ]]; then
+ echo "OS/X required." >&2
+ exit 1
+ fi
-# Check for GYP generator.
-SCRIPT_DIR=$(dirname $0)
-WEBRTC_BASE_DIR=${SCRIPT_DIR}/../../..
-GYP_WEBRTC_SCRIPT=${WEBRTC_BASE_DIR}/webrtc/build/gyp_webrtc
-if [[ ! -x ${GYP_WEBRTC_SCRIPT} ]]; then
- echo "Failed to find gyp generator." >&2
- exit 1
-fi
-# Check for export headers script.
-EXPORT_HEADERS_SCRIPT=${SCRIPT_DIR}/export_headers
-if [[ ! -x ${EXPORT_HEADERS_SCRIPT} ]]; then
- echo "Failed to find export headers script." >&2
- exit 1
-fi
-# Check for merge script.
-MERGE_SCRIPT=${SCRIPT_DIR}/merge_ios_libs
-if [[ ! -x ${MERGE_SCRIPT} ]]; then
- echo "Failed to find library merging script." >&2
- exit 1
-fi
+ # Check for libtool.
+ if [[ -z $(which libtool) ]]; then
+ echo "Missing libtool binary." >&2
+ exit 1
+ fi
-pushd ${WEBRTC_BASE_DIR}
-LIBRARY_BASE_DIR="out_ios_libs"
+ # Check for GYP generator.
+ if [[ ! -x ${GYP_WEBRTC_SCRIPT} ]]; then
+ echo "Failed to find gyp generator." >&2
+ exit 1
+ fi
+
+ # Check for merge script.
+ if [[ ! -x ${MERGE_SCRIPT} ]]; then
+ echo "Failed to find library merging script." >&2
+ exit 1
+ fi
+}
function build_webrtc {
- OUTPUT_DIR=$1
- FLAVOR=$2
- TARGET_ARCH=$3
- if [[ ${TARGET_ARCH} = 'arm' || ${TARGET_ARCH} = 'arm64' ]]; then
- FLAVOR="${FLAVOR}-iphoneos"
+ local base_output_dir=$1
+ local flavor=$2
+ local target_arch=$3
+ local build_type=$4
+
+ local ninja_output_dir=${base_output_dir}/${target_arch}_ninja
+ local library_output_dir=${base_output_dir}/${target_arch}_libs
+ if [[ ${target_arch} = 'arm' || ${target_arch} = 'arm64' ]]; then
+ flavor="${flavor}-iphoneos"
else
- FLAVOR="${FLAVOR}-iphonesimulator"
+ flavor="${flavor}-iphonesimulator"
fi
- export GYP_DEFINES="OS=ios target_arch=${TARGET_ARCH} use_objc_h264=1 \
-clang_xcode=1 ios_override_visibility=1"
- export GYP_GENERATORS="ninja"
- export GYP_GENERATOR_FLAGS="output_dir=${OUTPUT_DIR}"
- webrtc/build/gyp_webrtc webrtc/build/ios/merge_ios_libs.gyp
- if [[ ${USE_LEGACY_API} -eq 1 ]]; then
- ninja -C ${OUTPUT_DIR}/${FLAVOR} libjingle_peerconnection_objc_no_op
+ local ninja_flavor_dir=${ninja_output_dir}/${flavor}
+
+ # Compile framework by default.
+ local gyp_file=webrtc/sdk/sdk.gyp
+ local gyp_target=rtc_sdk_framework_objc
+ # Set to 1 to explicitly not hide symbols. We'll want this if we're just
+ # generating static libs.
+ local override_visibility=0
+ if [[ ${build_type} = "legacy" ]]; then
+ echo "Building legacy."
+ gyp_file=webrtc/build/ios/merge_ios_libs.gyp
+ gyp_target=libjingle_peerconnection_objc_no_op
+ override_visibility=1
+ elif [[ ${build_type} = "static_only" ]]; then
+ echo "Building static only."
+ gyp_file=webrtc/build/ios/merge_ios_libs.gyp
+ gyp_target=rtc_sdk_peerconnection_objc_no_op
+ override_visibility=1
+ elif [[ ${build_type} == "framework" ]]; then
+ echo "Building framework."
else
- ninja -C ${OUTPUT_DIR}/${FLAVOR} webrtc_api_objc_no_op
+ echo "Unexpected build type: ${build_type}"
+ exit 1
fi
- mkdir -p ${LIBRARY_BASE_DIR}/${TARGET_ARCH}
- mv ${OUTPUT_DIR}/${FLAVOR}/*.a ${LIBRARY_BASE_DIR}/${TARGET_ARCH}
+
+ export GYP_DEFINES="OS=ios target_arch=${target_arch} use_objc_h264=1 \
+clang_xcode=1 ios_deployment_target=8.0 \
+ios_override_visibility=${override_visibility}"
+ export GYP_GENERATORS="ninja"
+ export GYP_GENERATOR_FLAGS="output_dir=${ninja_output_dir}"
+
+ # GYP generation requires relative path for some reason.
+ pushd ${WEBRTC_BASE_DIR}
+ webrtc/build/gyp_webrtc.py ${gyp_file}
+ popd
+ # Compile the target we're interested in.
+ ninja -C ${ninja_flavor_dir} ${gyp_target}
+
+ if [[ ${build_type} = "framework" ]]; then
+ # Manually generate the dSYM files before stripping them. GYP does not seem
+ # to instruct ninja to generate dSYM files.
+ dsymutil --out=${ninja_flavor_dir}/WebRTC.framework.dSYM \
+ ${ninja_flavor_dir}/WebRTC.framework/WebRTC
+ fi
+
+ # Make links to the generated static archives.
+ mkdir -p ${library_output_dir}
+ for f in ${ninja_flavor_dir}/*.a
+ do
+ ln -sf "${f}" "${library_output_dir}/$(basename ${f})"
+ done
}
+function clean_artifacts {
+ local output_dir=$1
+ if [[ -d ${output_dir} ]]; then
+ rm -r ${output_dir}
+ fi
+}
+
+function usage {
+ echo "WebRTC iOS FAT libraries build script."
+ echo "Each architecture is compiled separately before being merged together."
+ echo "By default, the fat libraries will be created in out_ios_libs/fat_libs."
+ echo "The headers will be copied to out_ios_libs/include."
+ echo "Usage: $0 [-h] [-b build_type] [-c] [-o output_dir]"
+ echo " -h Print this help."
+ echo " -b The build type. Can be framework, static_only or legacy."
+ echo " Defaults to framework."
+ echo " -c Removes generated build output."
+ echo " -o Specifies a directory to output build artifacts to."
+ echo " If specified together with -c, deletes the dir."
+ echo " -r Specifies a revision number to embed if building the framework."
+ exit 0
+}
+
+check_preconditions
+
+# Set default arguments.
+# Output directory for build artifacts.
+OUTPUT_DIR=${WEBRTC_BASE_DIR}/out_ios_libs
+# The type of build to perform. Valid arguments are framework, static_only and
+# legacy.
+BUILD_TYPE="framework"
+PERFORM_CLEAN=0
+FLAVOR="Profile"
+POINT_VERSION="0"
+
+# Parse arguments.
+while getopts "hb:co:r:" opt; do
+ case "${opt}" in
+ h) usage;;
+ b) BUILD_TYPE="${OPTARG}";;
+ c) PERFORM_CLEAN=1;;
+ o) OUTPUT_DIR="${OPTARG}";;
+ r) POINT_VERSION="${OPTARG}";;
+ *)
+ usage
+ exit 1
+ ;;
+ esac
+done
+
+if [[ ${PERFORM_CLEAN} -ne 0 ]]; then
+ clean_artifacts ${OUTPUT_DIR}
+ exit 0
+fi
+
# Build all the common architectures.
-build_webrtc "out_ios_arm" "Release" "arm"
-build_webrtc "out_ios_arm64" "Release" "arm64"
-build_webrtc "out_ios_ia32" "Release" "ia32"
-build_webrtc "out_ios_x86_64" "Release" "x64"
+ARCHS=( "arm" "arm64" "ia32" "x64" )
+for ARCH in "${ARCHS[@]}"
+do
+ echo "Building WebRTC arch: ${ARCH}"
+ build_webrtc ${OUTPUT_DIR} ${FLAVOR} $ARCH ${BUILD_TYPE}
+done
+
+ARM_NINJA_DIR=${OUTPUT_DIR}/arm_ninja/${FLAVOR}-iphoneos
+ARM64_NINJA_DIR=${OUTPUT_DIR}/arm64_ninja/${FLAVOR}-iphoneos
+IA32_NINJA_DIR=${OUTPUT_DIR}/ia32_ninja/${FLAVOR}-iphonesimulator
+X64_NINJA_DIR=${OUTPUT_DIR}/x64_ninja/${FLAVOR}-iphonesimulator
+
+if [[ ${BUILD_TYPE} = "framework" ]]; then
+ # Merge the framework slices together into a FAT library by copying one arch
+ # output and merging the rest in.
+ DYLIB_PATH="WebRTC.framework/WebRTC"
+ cp -R ${ARM_NINJA_DIR}/WebRTC.framework ${OUTPUT_DIR}
+ rm ${OUTPUT_DIR}/${DYLIB_PATH}
+ echo "Merging framework slices."
+ lipo ${ARM_NINJA_DIR}/${DYLIB_PATH} \
+ ${ARM64_NINJA_DIR}/${DYLIB_PATH} \
+ ${IA32_NINJA_DIR}/${DYLIB_PATH} \
+ ${X64_NINJA_DIR}/${DYLIB_PATH} \
+ -create -output ${OUTPUT_DIR}/${DYLIB_PATH}
-popd
+ # Merge the dSYM files together in a similar fashion.
+ DSYM_PATH="WebRTC.framework.dSYM/Contents/Resources/DWARF/WebRTC"
+ cp -R ${ARM_NINJA_DIR}/WebRTC.framework.dSYM ${OUTPUT_DIR}
+ rm ${OUTPUT_DIR}/${DSYM_PATH}
+ echo "Merging dSYM slices."
+ lipo ${ARM_NINJA_DIR}/${DSYM_PATH} \
+ ${ARM64_NINJA_DIR}/${DSYM_PATH} \
+ ${IA32_NINJA_DIR}/${DSYM_PATH} \
+ ${X64_NINJA_DIR}/${DSYM_PATH} \
+ -create -output ${OUTPUT_DIR}/${DSYM_PATH}
+
+ # Strip the dynamic framework of non-global symbols.
+ # TODO(tkchin): Override chromium strip settings in supplement.gypi instead.
+ echo "Stripping non-global symbols."
+ strip -x ${OUTPUT_DIR}/${DYLIB_PATH}
+
+ # Modify the version number.
+ INFOPLIST_PATH=${OUTPUT_DIR}/WebRTC.framework/Info.plist
+ MAJOR_MINOR=$(plistbuddy -c "Print :CFBundleShortVersionString" \
+ ${INFOPLIST_PATH})
+ VERSION_NUMBER="${MAJOR_MINOR}.${POINT_VERSION}"
+ echo "Substituting revision number: ${VERSION_NUMBER}"
+ plistbuddy -c "Set :CFBundleVersion ${VERSION_NUMBER}" ${INFOPLIST_PATH}
+ plutil -convert binary1 ${INFOPLIST_PATH}
+
+ # Copy pod file.
+ FORMAT_STRING=s/\${FRAMEWORK_VERSION_NUMBER}/${VERSION_NUMBER}/g
+ sed -e ${FORMAT_STRING} ${WEBRTC_BASE_DIR}/webrtc/sdk/objc/WebRTC.podspec > \
+ ${OUTPUT_DIR}/WebRTC.podspec
+else
+ echo "Merging static library slices."
+ # Merge the static libraries together into individual FAT archives.
+ ${MERGE_SCRIPT} ${OUTPUT_DIR}
+
+ # Merge the dSYM files together.
+ TARGET_NAME="rtc_sdk_peerconnection_objc_no_op"
+ if [[ ${BUILD_TYPE} = "legacy" ]]; then
+ TARGET_NAME="libjingle_peerconnection_objc_no_op"
+ fi
+ DSYM_PATH="${TARGET_NAME}.app.dSYM/Contents/Resources/DWARF/${TARGET_NAME}"
+ cp -R ${ARM_NINJA_DIR}/${TARGET_NAME}.app.dSYM ${OUTPUT_DIR}
+ echo "Merging dSYM slices."
+ lipo ${ARM_NINJA_DIR}/${DSYM_PATH} \
+ ${ARM64_NINJA_DIR}/${DSYM_PATH} \
+ ${IA32_NINJA_DIR}/${DSYM_PATH} \
+ ${X64_NINJA_DIR}/${DSYM_PATH} \
+ -create -output ${OUTPUT_DIR}/${DSYM_PATH}
+
+ # Strip debugging symbols.
+ # TODO(tkchin): Override chromium settings in supplement.gypi instead to do
+ # stripping at build time.
+ echo "Stripping debug symbols."
+ strip -S ${OUTPUT_DIR}/fat_libs/*.a
+
+ # Symlink the headers.
+ echo "Symlinking headers."
+ INPUT_HEADER_DIR="${WEBRTC_BASE_DIR}/webrtc/sdk/objc/Framework/Headers/WebRTC"
+ OUTPUT_HEADER_DIR="${OUTPUT_DIR}/include"
+ if [[ -d ${OUTPUT_HEADER_DIR} ]]; then
+ rm -rf ${OUTPUT_HEADER_DIR}
+ fi
+ if [[ ${BUILD_TYPE} = "legacy" ]]; then
+ INPUT_HEADER_DIR="${WEBRTC_BASE_DIR}/talk/app/webrtc/objc/public"
+ ln -sf ${INPUT_HEADER_DIR} ${OUTPUT_HEADER_DIR}
+ else
+ mkdir -p ${OUTPUT_HEADER_DIR}
+ ln -sf ${INPUT_HEADER_DIR} ${OUTPUT_HEADER_DIR}/WebRTC
+ fi
+fi
-# Export header files.
-${EXPORT_HEADERS_SCRIPT} ${WEBRTC_BASE_DIR}/${LIBRARY_BASE_DIR} \
- ${USE_LEGACY_API}
+echo "Generating LICENSE.html."
+${LICENSE_SCRIPT} ${OUTPUT_DIR}/arm64_libs ${OUTPUT_DIR}
-# Merge the libraries together.
-${MERGE_SCRIPT} ${WEBRTC_BASE_DIR}/${LIBRARY_BASE_DIR}
+echo "Done!"
diff --git a/chromium/third_party/webrtc/build/ios/export_headers b/chromium/third_party/webrtc/build/ios/export_headers
deleted file mode 100755
index 84e738c0504..00000000000
--- a/chromium/third_party/webrtc/build/ios/export_headers
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2016 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-"""Script for exporting iOS header files."""
-
-import errno
-import optparse
-import os
-import re
-import shutil
-import sys
-
-LEGACY_HEADER_DIRS = ['talk/app/webrtc/objc/public', 'webrtc/base/objc/']
-HEADER_DIRS = ['webrtc/api/objc/', 'webrtc/base/objc/',
- 'webrtc/modules/audio_device/ios/objc']
-# Individual header files that should also be exported.
-LEGACY_HEADER_INCLUDES = []
-HEADER_INCLUDES = []
-# Individual header files that should not be exported.
-LEGACY_HEADER_EXCLUDES = ['talk/app/webrtc/objc/public/RTCNSGLVideoView.h']
-HEADER_EXCLUDES = [
- 'webrtc/api/objc/avfoundationvideocapturer.h',
- 'webrtc/api/objc/RTCNSGLVideoView.h',
- 'webrtc/base/objc/NSString+StdString.h',
- 'webrtc/base/objc/RTCUIApplication.h',
- 'webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.h',
-]
-
-def ExportHeaders(include_base_dir, use_legacy_headers):
- """Exports iOS header files.
-
- Creates an include directory and recreates the hierarchy for the header files
- within the include directory.
-
- Args:
- include_base_dir: directory where the include directory should be created
- """
-
- include_dir_name = 'include'
- include_path = os.path.join(include_base_dir, include_dir_name)
- # Remove existing directory first in case files change.
- if (os.path.exists(include_path)):
- shutil.rmtree(include_path)
-
- script_path = sys.path[0]
- webrtc_base_path = os.path.join(script_path, '../../..')
-
- header_dirs = HEADER_DIRS
- include_headers = HEADER_INCLUDES
- exclude_headers = HEADER_EXCLUDES
- if use_legacy_headers:
- header_dirs = LEGACY_HEADER_DIRS
- include_headers = LEGACY_HEADER_INCLUDES
- exclude_headers = LEGACY_HEADER_EXCLUDES
-
- for directory in header_dirs:
- full_dir_path = os.path.join(webrtc_base_path, directory)
- filenames = os.listdir(full_dir_path)
- for filename in filenames:
- if filename.endswith('.h') and not filename.endswith('+Private.h'):
- include_headers.append(os.path.join(directory, filename))
-
- for header in exclude_headers:
- include_headers.remove(header)
-
- for header_path in include_headers:
- output_dir = os.path.join(include_path, os.path.dirname(header_path))
- # Create hierarchy for the header file within the include directory.
- try:
- os.makedirs(output_dir)
- except OSError as exc:
- if exc.errno != errno.EEXIST:
- raise exc
- current_path = os.path.join(webrtc_base_path, header_path)
- new_path = os.path.join(include_path, header_path)
- shutil.copy(current_path, new_path)
-
-def Main():
- parser = optparse.OptionParser()
- _, args = parser.parse_args()
- if len(args) != 2:
- parser.error('Error: Exactly 2 arguments required.')
- include_base_dir = args[0]
- use_legacy_headers = False if int(args[1]) == 0 else True
- ExportHeaders(include_base_dir, use_legacy_headers)
-
-if __name__ == '__main__':
- sys.exit(Main())
diff --git a/chromium/third_party/webrtc/build/ios/flatten_ios_headers b/chromium/third_party/webrtc/build/ios/flatten_ios_headers
deleted file mode 100755
index 67c06acc91a..00000000000
--- a/chromium/third_party/webrtc/build/ios/flatten_ios_headers
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2016 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-"""Script for flattening iOS header structure."""
-
-import optparse
-import os
-import shutil
-import sys
-
-def FlattenHeaders(lib_base_dir, framework_base_dir):
- """Flattens iOS header file directory structure."""
- include_dir = 'include'
- unflattened_include_dir_path = os.path.join(lib_base_dir, include_dir)
- flattened_include_dir_path = os.path.join(framework_base_dir, include_dir)
-
- # Create output directories.
- if not os.path.exists(framework_base_dir):
- os.mkdir(framework_base_dir)
- if not os.path.exists(flattened_include_dir_path):
- os.mkdir(flattened_include_dir_path)
-
- for dirpath, _, filenames in os.walk(unflattened_include_dir_path):
- for filename in filenames:
- current_path = os.path.join(dirpath, filename)
- new_path = os.path.join(flattened_include_dir_path, filename)
- shutil.copy(current_path, new_path)
-
-def Main():
- parser = optparse.OptionParser()
- _, args = parser.parse_args()
- if len(args) != 2:
- parser.error('Error: Exactly 2 arguments required.')
- lib_base_dir = args[0]
- framework_base_dir = args[1]
- FlattenHeaders(lib_base_dir, framework_base_dir)
-
-if __name__ == '__main__':
- sys.exit(Main())
diff --git a/chromium/third_party/webrtc/build/ios/generate_licenses.py b/chromium/third_party/webrtc/build/ios/generate_licenses.py
new file mode 100755
index 00000000000..7a2db5382e3
--- /dev/null
+++ b/chromium/third_party/webrtc/build/ios/generate_licenses.py
@@ -0,0 +1,145 @@
+#!/usr/bin/python
+
+# Copyright 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""Generates license HTML for a prebuilt version of WebRTC for iOS."""
+
+import sys
+
+import argparse
+import cgi
+import fnmatch
+import os
+import re
+import textwrap
+
+
+LIB_TO_LICENSES_DICT = {
+ 'boringssl': ['third_party/boringssl/src/LICENSE'],
+ 'expat': ['third_party/expat/files/COPYING'],
+ 'jsoncpp': ['third_party/jsoncpp/LICENSE'],
+ 'opus': ['third_party/opus/src/COPYING'],
+ 'protobuf_lite': ['third_party/protobuf/LICENSE'],
+ 'srtp': ['third_party/libsrtp/srtp/LICENSE'],
+ 'usrsctplib': ['third_party/usrsctp/LICENSE'],
+ 'webrtc': ['webrtc/LICENSE', 'webrtc/LICENSE_THIRD_PARTY'],
+ 'vpx': ['third_party/libvpx/source/libvpx/LICENSE'],
+ 'yuv': ['third_party/libyuv/LICENSE'],
+}
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0]))
+CHECKOUT_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir,
+ os.pardir))
+TALK_ROOT = os.path.join(CHECKOUT_ROOT, 'talk')
+WEBRTC_ROOT = os.path.join(CHECKOUT_ROOT, 'webrtc')
+
+
+def GetWebRTCGypFilePaths():
+ gyp_filepaths = []
+ search_roots = [TALK_ROOT, WEBRTC_ROOT]
+ for search_root in search_roots:
+ for root, _, filenames in os.walk(search_root):
+ for filename in fnmatch.filter(filenames, '*.gyp*'):
+ gyp_filepaths.append(os.path.join(root, filename))
+ return gyp_filepaths
+
+
+def GetWebRTCTargetNames():
+ gyp_filepaths = GetWebRTCGypFilePaths()
+ target_names = []
+ for gyp_filepath in gyp_filepaths:
+ with open(gyp_filepath, 'r') as gyp_file:
+ for line in gyp_file:
+ match = re.search(r'\'target_name\'.*\'(\w+)\'', line)
+ if match:
+ target_name = match.group(1)
+ target_names.append(target_name)
+ return target_names
+
+
+class LicenseBuilder(object):
+
+ def __init__(self):
+ self.webrtc_target_names = GetWebRTCTargetNames()
+
+ def IsWebRTCLib(self, lib_name):
+ alternate_lib_name = 'lib' + lib_name
+ return (lib_name in self.webrtc_target_names or
+ alternate_lib_name in self.webrtc_target_names)
+
+ def GenerateLicenseText(self, static_lib_dir, output_dir):
+ # Get a list of libs from the files without their prefix and extension.
+ static_libs = []
+ for static_lib in os.listdir(static_lib_dir):
+ # Skip non libraries.
+ if not (static_lib.endswith('.a') and static_lib.startswith('lib')):
+ continue
+ # Extract library name.
+ static_libs.append(static_lib[3:-2])
+
+ # Generate amalgamated list of libraries. Mostly this just collapses the
+ # various WebRTC libs names into just 'webrtc'. Will exit with error if a
+ # lib is unrecognized.
+ license_libs = set()
+ for static_lib in static_libs:
+ license_lib = 'webrtc' if self.IsWebRTCLib(static_lib) else static_lib
+ license_path = LIB_TO_LICENSES_DICT.get(license_lib)
+ if license_path is None:
+ print 'Missing license path for lib: %s' % license_lib
+ return 1
+ license_libs.add(license_lib)
+
+ # Put webrtc at the front of the list.
+ assert 'webrtc' in license_libs
+ license_libs.remove('webrtc')
+ license_libs = sorted(license_libs)
+ license_libs.insert(0, 'webrtc')
+
+ # Generate HTML.
+ output_license_file = open(os.path.join(output_dir, 'LICENSE.html'), 'w+')
+ output_license_file.write('<!DOCTYPE html>\n')
+ output_license_file.write('<html>\n<head>\n')
+ output_license_file.write('<meta charset="UTF-8">\n')
+ output_license_file.write('<title>Licenses</title>\n')
+ style_tag = textwrap.dedent('''\
+ <style>
+ body { margin: 0; font-family: sans-serif; }
+ pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; }
+ p { margin: 1em; white-space: nowrap; }
+ </style>
+ ''')
+ output_license_file.write(style_tag)
+ output_license_file.write('</head>\n')
+
+ for license_lib in license_libs:
+ output_license_file.write('<p>%s<br/></p>\n' % license_lib)
+ output_license_file.write('<pre>\n')
+ for path in LIB_TO_LICENSES_DICT[license_lib]:
+ license_path = os.path.join(CHECKOUT_ROOT, path)
+ with open(license_path, 'r') as license_file:
+ license_text = cgi.escape(license_file.read(), quote=True)
+ output_license_file.write(license_text)
+ output_license_file.write('\n')
+ output_license_file.write('</pre>\n')
+
+ output_license_file.write('</body>\n')
+ output_license_file.write('</html>')
+ output_license_file.close()
+ return 0
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.html')
+ parser.add_argument('static_lib_dir',
+ help='Directory with built static libraries.')
+ parser.add_argument('output_dir',
+ help='Directory to output LICENSE.html to.')
+ args = parser.parse_args()
+ builder = LicenseBuilder()
+ sys.exit(builder.GenerateLicenseText(args.static_lib_dir, args.output_dir))
diff --git a/chromium/third_party/webrtc/build/ios/merge_ios_libs.gyp b/chromium/third_party/webrtc/build/ios/merge_ios_libs.gyp
index cc24d6530ef..5b0a9cbbb09 100644
--- a/chromium/third_party/webrtc/build/ios/merge_ios_libs.gyp
+++ b/chromium/third_party/webrtc/build/ios/merge_ios_libs.gyp
@@ -21,11 +21,11 @@
'sources': ['no_op.cc',],
},
{
- 'target_name': 'webrtc_api_objc_no_op',
+ 'target_name': 'rtc_sdk_peerconnection_objc_no_op',
'includes': [ 'objc_app.gypi' ],
'type': 'executable',
'dependencies': [
- '<(webrtc_root)/api/api.gyp:rtc_api_objc',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_peerconnection_objc',
],
'sources': ['no_op.cc',],
},
diff --git a/chromium/third_party/webrtc/build/ios/merge_ios_libs b/chromium/third_party/webrtc/build/ios/merge_ios_libs.py
index d96d5e3c0c6..651024eb6ac 100755
--- a/chromium/third_party/webrtc/build/ios/merge_ios_libs
+++ b/chromium/third_party/webrtc/build/ios/merge_ios_libs.py
@@ -10,11 +10,15 @@
"""Script for merging generated iOS libraries."""
-import optparse
+import sys
+
+import argparse
import os
import re
import subprocess
-import sys
+
+# Valid arch subdir names.
+VALID_ARCHS = ['arm_libs', 'arm64_libs', 'ia32_libs', 'x64_libs']
def MergeLibs(lib_base_dir):
@@ -29,24 +33,22 @@ def MergeLibs(lib_base_dir):
Returns:
Exit code of libtool.
"""
- include_dir_name = 'include'
- output_dir_name = 'lib'
+ output_dir_name = 'fat_libs'
archs = [arch for arch in os.listdir(lib_base_dir)
- if arch[:1] != '.' and arch != output_dir_name
- and arch != include_dir_name]
+ if arch in VALID_ARCHS]
# For each arch, find (library name, libary path) for arch. We will merge
# all libraries with the same name.
libs = {}
- for dirpath, _, filenames in os.walk(lib_base_dir):
- if dirpath.endswith(output_dir_name):
+ for lib_dir in [os.path.join(lib_base_dir, arch) for arch in VALID_ARCHS]:
+ if not os.path.exists(lib_dir):
continue
- for filename in filenames:
- if not filename.endswith('.a'):
- continue
- entry = libs.get(filename, [])
- entry.append(os.path.join(dirpath, filename))
- libs[filename] = entry
-
+ for dirpath, _, filenames in os.walk(lib_dir):
+ for filename in filenames:
+ if not filename.endswith('.a'):
+ continue
+ entry = libs.get(filename, [])
+ entry.append(os.path.join(dirpath, filename))
+ libs[filename] = entry
orphaned_libs = {}
valid_libs = {}
for library, paths in libs.items():
@@ -69,7 +71,6 @@ def MergeLibs(lib_base_dir):
if not found:
base_prefix = library[:-2].split('_')[0]
for valid_lib, valid_paths in valid_libs.items():
- prefix = '_'.join(components)
if valid_lib[:len(base_prefix)] == base_prefix:
valid_paths.extend(paths)
found = True
@@ -89,6 +90,7 @@ def MergeLibs(lib_base_dir):
libtool_re = re.compile(r'^.*libtool:.*file: .* has no symbols$')
# Merge libraries using libtool.
+ libtool_returncode = 0
for library, paths in valid_libs.items():
cmd_list = ['libtool', '-static', '-v', '-o',
os.path.join(output_dir_path, library)] + paths
@@ -99,22 +101,23 @@ def MergeLibs(lib_base_dir):
print >>sys.stderr, line
# Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
+ libtool_returncode = libtoolout.returncode
+ if not libtool_returncode:
for i in range(len(cmd_list) - 1):
if cmd_list[i] == '-o' and cmd_list[i+1].endswith('.a'):
os.utime(cmd_list[i+1], None)
break
- else:
- return libtoolout.returncode
- return libtoolout.returncode
+ return libtool_returncode
def Main():
- parser = optparse.OptionParser()
- _, args = parser.parse_args()
- if len(args) != 1:
- parser.error('Error: Exactly 1 argument required.')
- lib_base_dir = args[0]
+ parser_description = 'Merge WebRTC libraries.'
+ parser = argparse.ArgumentParser(description=parser_description)
+ parser.add_argument('lib_base_dir',
+ help='Directory with built libraries. ',
+ type=str)
+ args = parser.parse_args()
+ lib_base_dir = args.lib_base_dir
MergeLibs(lib_base_dir)
if __name__ == '__main__':
diff --git a/chromium/third_party/webrtc/build/isolate.gypi b/chromium/third_party/webrtc/build/isolate.gypi
index ea44e2cc7f3..2d4ea778697 100644
--- a/chromium/third_party/webrtc/build/isolate.gypi
+++ b/chromium/third_party/webrtc/build/isolate.gypi
@@ -107,7 +107,6 @@
'--config-variable', 'use_instrumented_libraries=<(use_instrumented_libraries)',
'--config-variable',
'use_prebuilt_instrumented_libraries=<(use_prebuilt_instrumented_libraries)',
- '--config-variable', 'use_openssl=<(use_openssl)',
'--config-variable', 'use_ozone=<(use_ozone)',
'--config-variable', 'use_x11=<(use_x11)',
'--config-variable', 'v8_use_external_startup_data=<(v8_use_external_startup_data)',
diff --git a/chromium/third_party/webrtc/build/objc_common.gypi b/chromium/third_party/webrtc/build/objc_common.gypi
index 086210ee757..ff908cfe629 100644
--- a/chromium/third_party/webrtc/build/objc_common.gypi
+++ b/chromium/third_party/webrtc/build/objc_common.gypi
@@ -10,7 +10,7 @@
{
'variables': {
- 'objc_prefix_file': './WebRTC-Prefix.pch',
+ 'objc_prefix_file': '../sdk/objc/WebRTC-Prefix.pch',
},
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',
diff --git a/chromium/third_party/webrtc/build/protoc.gypi b/chromium/third_party/webrtc/build/protoc.gypi
index 682bc22cc5c..6e9af457aaf 100644
--- a/chromium/third_party/webrtc/build/protoc.gypi
+++ b/chromium/third_party/webrtc/build/protoc.gypi
@@ -12,6 +12,11 @@
# build/common.gypi is different for the standalone and Chromium builds. Gyp
# doesn't permit conditional inclusion or variable expansion in include paths.
# http://code.google.com/p/gyp/wiki/InputFormatReference#Including_Other_Files
+#
+# Local changes:
+# * Removed <(DEPTH) from include_dir due to difficulties with generated
+# downstream code.
+
# This file is meant to be included into a target to provide a rule
# to invoke protoc in a consistent manner. For Java-targets, see
@@ -111,12 +116,10 @@
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/protoc_out',
- '<(DEPTH)',
],
'direct_dependent_settings': {
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/protoc_out',
- '<(DEPTH)',
]
},
# This target exports a hard dependency because it generates header
diff --git a/chromium/third_party/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc b/chromium/third_party/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
index 30a9a4572eb..fc7ddbf23b7 100644
--- a/chromium/third_party/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
+++ b/chromium/third_party/webrtc/build/sanitizers/tsan_suppressions_webrtc.cc
@@ -76,9 +76,7 @@ char kTSanDefaultSuppressions[] =
// Potential deadlocks detected after roll in r6516.
// https://code.google.com/p/webrtc/issues/detail?id=3509
-"deadlock:webrtc::RTCPReceiver::SetSsrcs\n"
"deadlock:webrtc::test::UdpSocketManagerPosixImpl::RemoveSocket\n"
-"deadlock:webrtc::vcm::VideoReceiver::RegisterPacketRequestCallback\n"
// TODO(pbos): Trace events are racy due to lack of proper POD atomics.
// https://code.google.com/p/webrtc/issues/detail?id=2497
diff --git a/chromium/third_party/webrtc/build/webrtc.gni b/chromium/third_party/webrtc/build/webrtc.gni
index 8e1b9521f74..788e60824c5 100644
--- a/chromium/third_party/webrtc/build/webrtc.gni
+++ b/chromium/third_party/webrtc/build/webrtc.gni
@@ -36,7 +36,6 @@ declare_args() {
# Disable these to not build components which can be externally provided.
rtc_build_expat = true
rtc_build_json = true
- rtc_build_libjpeg = true
rtc_build_libvpx = true
rtc_build_libyuv = true
rtc_build_openmax_dl = true
@@ -59,8 +58,16 @@ declare_args() {
rtc_include_tests = false
rtc_restrict_logging = true
+ # Enable libevent task queues on platforms that support it.
+ if (is_win || is_mac || is_ios || is_nacl) {
+ rtc_enable_libevent = false
+ rtc_build_libevent = false
+ } else {
+ rtc_enable_libevent = true
+ rtc_build_libevent = true
+ }
+
if (is_ios) {
- rtc_build_libjpeg = false
rtc_enable_protobuf = false
}
@@ -78,8 +85,7 @@ declare_args() {
# Determines whether NEON code will be built.
rtc_build_with_neon =
- (current_cpu == "arm" && (arm_use_neon || arm_optionally_use_neon)) ||
- current_cpu == "arm64"
+ (current_cpu == "arm" && arm_use_neon) || current_cpu == "arm64"
# Enable this to use HW H.264 encoder/decoder on iOS PeerConnections.
# Enabling this may break interop with Android clients that support H264.
diff --git a/chromium/third_party/webrtc/call.h b/chromium/third_party/webrtc/call.h
index 3ba473fec07..80134fa27d4 100644
--- a/chromium/third_party/webrtc/call.h
+++ b/chromium/third_party/webrtc/call.h
@@ -17,6 +17,7 @@
#include "webrtc/audio_receive_stream.h"
#include "webrtc/audio_send_stream.h"
#include "webrtc/audio_state.h"
+#include "webrtc/base/networkroute.h"
#include "webrtc/base/socket.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
@@ -140,6 +141,10 @@ class Call {
virtual void SignalChannelNetworkState(MediaType media,
NetworkState state) = 0;
+ virtual void OnNetworkRouteChanged(
+ const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) = 0;
+
virtual void OnSentPacket(const rtc::SentPacket& sent_packet) = 0;
virtual ~Call() {}
diff --git a/chromium/third_party/webrtc/call/bitrate_allocator.cc b/chromium/third_party/webrtc/call/bitrate_allocator.cc
index 097378f02a1..3672ef520ca 100644
--- a/chromium/third_party/webrtc/call/bitrate_allocator.cc
+++ b/chromium/third_party/webrtc/call/bitrate_allocator.cc
@@ -14,6 +14,7 @@
#include <algorithm>
#include <utility>
+#include "webrtc/base/checks.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
namespace webrtc {
@@ -24,10 +25,10 @@ const int kTransmissionMaxBitrateMultiplier = 2;
const int kDefaultBitrateBps = 300000;
BitrateAllocator::BitrateAllocator()
- : bitrate_observers_(),
- bitrate_observers_modified_(false),
+ : bitrate_observer_configs_(),
enforce_min_bitrate_(true),
last_bitrate_bps_(kDefaultBitrateBps),
+ last_non_zero_bitrate_bps_(kDefaultBitrateBps),
last_fraction_loss_(0),
last_rtt_(0) {}
@@ -36,10 +37,13 @@ uint32_t BitrateAllocator::OnNetworkChanged(uint32_t bitrate,
int64_t rtt) {
rtc::CritScope lock(&crit_sect_);
last_bitrate_bps_ = bitrate;
+ last_non_zero_bitrate_bps_ =
+ bitrate > 0 ? bitrate : last_non_zero_bitrate_bps_;
last_fraction_loss_ = fraction_loss;
last_rtt_ = rtt;
+
uint32_t allocated_bitrate_bps = 0;
- ObserverBitrateMap allocation = AllocateBitrates();
+ ObserverAllocation allocation = AllocateBitrates(bitrate);
for (const auto& kv : allocation) {
kv.first->OnBitrateUpdated(kv.second, last_fraction_loss_, last_rtt_);
allocated_bitrate_bps += kv.second;
@@ -47,121 +51,123 @@ uint32_t BitrateAllocator::OnNetworkChanged(uint32_t bitrate,
return allocated_bitrate_bps;
}
-BitrateAllocator::ObserverBitrateMap BitrateAllocator::AllocateBitrates() {
- if (bitrate_observers_.empty())
- return ObserverBitrateMap();
-
- uint32_t sum_min_bitrates = 0;
- for (const auto& observer : bitrate_observers_)
- sum_min_bitrates += observer.second.min_bitrate;
- if (last_bitrate_bps_ <= sum_min_bitrates)
- return LowRateAllocation(last_bitrate_bps_);
- else
- return NormalRateAllocation(last_bitrate_bps_, sum_min_bitrates);
-}
-
int BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer,
uint32_t min_bitrate_bps,
- uint32_t max_bitrate_bps) {
+ uint32_t max_bitrate_bps,
+ bool enforce_min_bitrate) {
rtc::CritScope lock(&crit_sect_);
+ // TODO(mflodman): Enforce this per observer.
+ EnforceMinBitrate(enforce_min_bitrate);
- BitrateObserverConfList::iterator it =
- FindObserverConfigurationPair(observer);
+ auto it = FindObserverConfig(observer);
// Allow the max bitrate to be exceeded for FEC and retransmissions.
// TODO(holmer): We have to get rid of this hack as it makes it difficult to
// properly allocate bitrate. The allocator should instead distribute any
// extra bitrate after all streams have maxed out.
max_bitrate_bps *= kTransmissionMaxBitrateMultiplier;
- if (it != bitrate_observers_.end()) {
+ if (it != bitrate_observer_configs_.end()) {
// Update current configuration.
- it->second.min_bitrate = min_bitrate_bps;
- it->second.max_bitrate = max_bitrate_bps;
+ it->min_bitrate_bps = min_bitrate_bps;
+ it->max_bitrate_bps = max_bitrate_bps;
} else {
// Add new settings.
- bitrate_observers_.push_back(BitrateObserverConfiguration(
- observer, BitrateConfiguration(min_bitrate_bps, max_bitrate_bps)));
- bitrate_observers_modified_ = true;
+ bitrate_observer_configs_.push_back(ObserverConfig(
+ observer, min_bitrate_bps, max_bitrate_bps, enforce_min_bitrate));
}
- ObserverBitrateMap allocation = AllocateBitrates();
int new_observer_bitrate_bps = 0;
- for (auto& kv : allocation) {
- kv.first->OnBitrateUpdated(kv.second, last_fraction_loss_, last_rtt_);
- if (kv.first == observer)
- new_observer_bitrate_bps = kv.second;
+ if (last_bitrate_bps_ > 0) { // We have a bitrate to allocate.
+ ObserverAllocation allocation = AllocateBitrates(last_bitrate_bps_);
+ for (auto& kv : allocation) {
+ // Update all observers with the new allocation.
+ kv.first->OnBitrateUpdated(kv.second, last_fraction_loss_, last_rtt_);
+ if (kv.first == observer)
+ new_observer_bitrate_bps = kv.second;
+ }
+ } else {
+ // Currently, an encoder is not allowed to produce frames.
+ // But we still have to return the initial config bitrate + let the
+ // observer know that it can not produce frames.
+ ObserverAllocation allocation =
+ AllocateBitrates(last_non_zero_bitrate_bps_);
+ observer->OnBitrateUpdated(0, last_fraction_loss_, last_rtt_);
+ new_observer_bitrate_bps = allocation[observer];
}
return new_observer_bitrate_bps;
}
void BitrateAllocator::RemoveObserver(BitrateAllocatorObserver* observer) {
rtc::CritScope lock(&crit_sect_);
- BitrateObserverConfList::iterator it =
- FindObserverConfigurationPair(observer);
- if (it != bitrate_observers_.end()) {
- bitrate_observers_.erase(it);
- bitrate_observers_modified_ = true;
+ auto it = FindObserverConfig(observer);
+ if (it != bitrate_observer_configs_.end()) {
+ bitrate_observer_configs_.erase(it);
}
}
-void BitrateAllocator::GetMinMaxBitrateSumBps(int* min_bitrate_sum_bps,
- int* max_bitrate_sum_bps) const {
- *min_bitrate_sum_bps = 0;
- *max_bitrate_sum_bps = 0;
-
- rtc::CritScope lock(&crit_sect_);
- for (const auto& observer : bitrate_observers_) {
- *min_bitrate_sum_bps += observer.second.min_bitrate;
- *max_bitrate_sum_bps += observer.second.max_bitrate;
- }
+void BitrateAllocator::EnforceMinBitrate(bool enforce_min_bitrate) {
+ enforce_min_bitrate_ = enforce_min_bitrate;
}
-BitrateAllocator::BitrateObserverConfList::iterator
-BitrateAllocator::FindObserverConfigurationPair(
+BitrateAllocator::ObserverConfigList::iterator
+BitrateAllocator::FindObserverConfig(
const BitrateAllocatorObserver* observer) {
- for (auto it = bitrate_observers_.begin(); it != bitrate_observers_.end();
- ++it) {
- if (it->first == observer)
+ for (auto it = bitrate_observer_configs_.begin();
+ it != bitrate_observer_configs_.end(); ++it) {
+ if (it->observer == observer)
return it;
}
- return bitrate_observers_.end();
+ return bitrate_observer_configs_.end();
}
-void BitrateAllocator::EnforceMinBitrate(bool enforce_min_bitrate) {
- rtc::CritScope lock(&crit_sect_);
- enforce_min_bitrate_ = enforce_min_bitrate;
+BitrateAllocator::ObserverAllocation BitrateAllocator::AllocateBitrates(
+ uint32_t bitrate) {
+ if (bitrate_observer_configs_.empty())
+ return ObserverAllocation();
+
+ if (bitrate == 0)
+ return ZeroRateAllocation();
+
+ uint32_t sum_min_bitrates = 0;
+ for (const auto& observer_config : bitrate_observer_configs_)
+ sum_min_bitrates += observer_config.min_bitrate_bps;
+ if (bitrate <= sum_min_bitrates)
+ return LowRateAllocation(bitrate);
+
+ return NormalRateAllocation(bitrate, sum_min_bitrates);
}
-BitrateAllocator::ObserverBitrateMap BitrateAllocator::NormalRateAllocation(
+BitrateAllocator::ObserverAllocation BitrateAllocator::NormalRateAllocation(
uint32_t bitrate,
uint32_t sum_min_bitrates) {
- uint32_t number_of_observers =
- static_cast<uint32_t>(bitrate_observers_.size());
+ uint32_t num_remaining_observers =
+ static_cast<uint32_t>(bitrate_observer_configs_.size());
+ RTC_DCHECK_GT(num_remaining_observers, 0u);
+
uint32_t bitrate_per_observer =
- (bitrate - sum_min_bitrates) / number_of_observers;
+ (bitrate - sum_min_bitrates) / num_remaining_observers;
// Use map to sort list based on max bitrate.
ObserverSortingMap list_max_bitrates;
- for (const auto& observer : bitrate_observers_) {
- list_max_bitrates.insert(std::pair<uint32_t, ObserverConfiguration>(
- observer.second.max_bitrate,
- ObserverConfiguration(observer.first, observer.second.min_bitrate)));
+ for (const auto& config : bitrate_observer_configs_) {
+ list_max_bitrates.insert(std::pair<uint32_t, const ObserverConfig*>(
+ config.max_bitrate_bps, &config));
}
- ObserverBitrateMap allocation;
+
+ ObserverAllocation allocation;
ObserverSortingMap::iterator max_it = list_max_bitrates.begin();
while (max_it != list_max_bitrates.end()) {
- number_of_observers--;
+ num_remaining_observers--;
uint32_t observer_allowance =
- max_it->second.min_bitrate + bitrate_per_observer;
+ max_it->second->min_bitrate_bps + bitrate_per_observer;
if (max_it->first < observer_allowance) {
// We have more than enough for this observer.
// Carry the remainder forward.
uint32_t remainder = observer_allowance - max_it->first;
- if (number_of_observers != 0) {
- bitrate_per_observer += remainder / number_of_observers;
- }
- allocation[max_it->second.observer] = max_it->first;
+ if (num_remaining_observers != 0)
+ bitrate_per_observer += remainder / num_remaining_observers;
+ allocation[max_it->second->observer] = max_it->first;
} else {
- allocation[max_it->second.observer] = observer_allowance;
+ allocation[max_it->second->observer] = observer_allowance;
}
list_max_bitrates.erase(max_it);
// Prepare next iteration.
@@ -170,21 +176,29 @@ BitrateAllocator::ObserverBitrateMap BitrateAllocator::NormalRateAllocation(
return allocation;
}
-BitrateAllocator::ObserverBitrateMap BitrateAllocator::LowRateAllocation(
+BitrateAllocator::ObserverAllocation BitrateAllocator::ZeroRateAllocation() {
+ ObserverAllocation allocation;
+ // Zero bitrate to all observers.
+ for (const auto& observer_config : bitrate_observer_configs_)
+ allocation[observer_config.observer] = 0;
+ return allocation;
+}
+
+BitrateAllocator::ObserverAllocation BitrateAllocator::LowRateAllocation(
uint32_t bitrate) {
- ObserverBitrateMap allocation;
+ ObserverAllocation allocation;
if (enforce_min_bitrate_) {
// Min bitrate to all observers.
- for (const auto& observer : bitrate_observers_)
- allocation[observer.first] = observer.second.min_bitrate;
+ for (const auto& observer_config : bitrate_observer_configs_)
+ allocation[observer_config.observer] = observer_config.min_bitrate_bps;
} else {
- // Allocate up to |min_bitrate| to one observer at a time, until
+ // Allocate up to |min_bitrate_bps| to one observer at a time, until
// |bitrate| is depleted.
uint32_t remainder = bitrate;
- for (const auto& observer : bitrate_observers_) {
+ for (const auto& observer_config : bitrate_observer_configs_) {
uint32_t allocated_bitrate =
- std::min(remainder, observer.second.min_bitrate);
- allocation[observer.first] = allocated_bitrate;
+ std::min(remainder, observer_config.min_bitrate_bps);
+ allocation[observer_config.observer] = allocated_bitrate;
remainder -= allocated_bitrate;
}
}
diff --git a/chromium/third_party/webrtc/call/bitrate_allocator.h b/chromium/third_party/webrtc/call/bitrate_allocator.h
index 404a312dad3..3bafcd756be 100644
--- a/chromium/third_party/webrtc/call/bitrate_allocator.h
+++ b/chromium/third_party/webrtc/call/bitrate_allocator.h
@@ -53,59 +53,69 @@ class BitrateAllocator {
// |observer| updates bitrates if already in use.
// |min_bitrate_bps| = 0 equals no min bitrate.
// |max_bitrate_bps| = 0 equals no max bitrate.
- // Returns bitrate allocated for the bitrate observer.
+ // |enforce_min_bitrate| = 'true' will allocate at least |min_bitrate_bps| for
+ // this observer, even if the BWE is too low, 'false' will allocate 0 to
+ // the observer if BWE doesn't allow |min_bitrate_bps|.
+ // Returns initial bitrate allocated for |observer|.
+ // Note that |observer|->OnBitrateUpdated() will be called within the scope of
+ // this method with the current rtt, fraction_loss and available bitrate and
+ // that the bitrate in OnBitrateUpdated will be zero if the |observer| is
+ // currently not allowed to send data.
int AddObserver(BitrateAllocatorObserver* observer,
uint32_t min_bitrate_bps,
- uint32_t max_bitrate_bps);
+ uint32_t max_bitrate_bps,
+ bool enforce_min_bitrate);
void RemoveObserver(BitrateAllocatorObserver* observer);
- void GetMinMaxBitrateSumBps(int* min_bitrate_sum_bps,
- int* max_bitrate_sum_bps) const;
+ private:
+ struct ObserverConfig {
+ ObserverConfig(BitrateAllocatorObserver* observer,
+ uint32_t min_bitrate_bps,
+ uint32_t max_bitrate_bps,
+ bool enforce_min_bitrate)
+ : observer(observer),
+ min_bitrate_bps(min_bitrate_bps),
+ max_bitrate_bps(max_bitrate_bps),
+ enforce_min_bitrate(enforce_min_bitrate) {}
+ BitrateAllocatorObserver* const observer;
+ uint32_t min_bitrate_bps;
+ uint32_t max_bitrate_bps;
+ bool enforce_min_bitrate;
+ };
// This method controls the behavior when the available bitrate is lower than
// the minimum bitrate, or the sum of minimum bitrates.
// When true, the bitrate will never be set lower than the minimum bitrate(s).
// When false, the bitrate observers will be allocated rates up to their
// respective minimum bitrate, satisfying one observer after the other.
- void EnforceMinBitrate(bool enforce_min_bitrate);
-
- private:
- struct BitrateConfiguration {
- BitrateConfiguration(uint32_t min_bitrate, uint32_t max_bitrate)
- : min_bitrate(min_bitrate), max_bitrate(max_bitrate) {}
- uint32_t min_bitrate;
- uint32_t max_bitrate;
- };
- struct ObserverConfiguration {
- ObserverConfiguration(BitrateAllocatorObserver* observer, uint32_t bitrate)
- : observer(observer), min_bitrate(bitrate) {}
- BitrateAllocatorObserver* const observer;
- uint32_t min_bitrate;
- };
- typedef std::pair<BitrateAllocatorObserver*, BitrateConfiguration>
- BitrateObserverConfiguration;
- typedef std::list<BitrateObserverConfiguration> BitrateObserverConfList;
- typedef std::multimap<uint32_t, ObserverConfiguration> ObserverSortingMap;
- typedef std::map<BitrateAllocatorObserver*, int> ObserverBitrateMap;
+ void EnforceMinBitrate(bool enforce_min_bitrate)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- BitrateObserverConfList::iterator FindObserverConfigurationPair(
+ typedef std::list<ObserverConfig> ObserverConfigList;
+ ObserverConfigList::iterator FindObserverConfig(
const BitrateAllocatorObserver* observer)
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- ObserverBitrateMap AllocateBitrates() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- ObserverBitrateMap NormalRateAllocation(uint32_t bitrate,
+
+ typedef std::multimap<uint32_t, const ObserverConfig*> ObserverSortingMap;
+ typedef std::map<BitrateAllocatorObserver*, int> ObserverAllocation;
+
+ ObserverAllocation AllocateBitrates(uint32_t bitrate)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ ObserverAllocation NormalRateAllocation(uint32_t bitrate,
uint32_t sum_min_bitrates)
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- ObserverBitrateMap LowRateAllocation(uint32_t bitrate)
+ ObserverAllocation ZeroRateAllocation() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+ ObserverAllocation LowRateAllocation(uint32_t bitrate)
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
rtc::CriticalSection crit_sect_;
// Stored in a list to keep track of the insertion order.
- BitrateObserverConfList bitrate_observers_ GUARDED_BY(crit_sect_);
- bool bitrate_observers_modified_ GUARDED_BY(crit_sect_);
+ ObserverConfigList bitrate_observer_configs_;
bool enforce_min_bitrate_ GUARDED_BY(crit_sect_);
uint32_t last_bitrate_bps_ GUARDED_BY(crit_sect_);
+ uint32_t last_non_zero_bitrate_bps_ GUARDED_BY(crit_sect_);
uint8_t last_fraction_loss_ GUARDED_BY(crit_sect_);
int64_t last_rtt_ GUARDED_BY(crit_sect_);
};
diff --git a/chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc b/chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc
index 6e0cdd4d781..4017645c1d6 100644
--- a/chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc
+++ b/chromium/third_party/webrtc/call/bitrate_allocator_unittest.cc
@@ -48,7 +48,7 @@ class BitrateAllocatorTest : public ::testing::Test {
TEST_F(BitrateAllocatorTest, UpdatingBitrateObserver) {
TestBitrateObserver bitrate_observer;
int start_bitrate =
- allocator_->AddObserver(&bitrate_observer, 100000, 1500000);
+ allocator_->AddObserver(&bitrate_observer, 100000, 1500000, true);
EXPECT_EQ(300000, start_bitrate);
allocator_->OnNetworkChanged(200000, 0, 0);
EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
@@ -57,10 +57,12 @@ TEST_F(BitrateAllocatorTest, UpdatingBitrateObserver) {
// bitrate for FEC/retransmissions (see todo in BitrateAllocator).
allocator_->OnNetworkChanged(4000000, 0, 0);
EXPECT_EQ(3000000u, bitrate_observer.last_bitrate_);
- start_bitrate = allocator_->AddObserver(&bitrate_observer, 100000, 4000000);
+ start_bitrate =
+ allocator_->AddObserver(&bitrate_observer, 100000, 4000000, true);
EXPECT_EQ(4000000, start_bitrate);
- start_bitrate = allocator_->AddObserver(&bitrate_observer, 100000, 1500000);
+ start_bitrate =
+ allocator_->AddObserver(&bitrate_observer, 100000, 1500000, true);
EXPECT_EQ(3000000, start_bitrate);
EXPECT_EQ(3000000u, bitrate_observer.last_bitrate_);
allocator_->OnNetworkChanged(1500000, 0, 0);
@@ -71,9 +73,10 @@ TEST_F(BitrateAllocatorTest, TwoBitrateObserversOneRtcpObserver) {
TestBitrateObserver bitrate_observer_1;
TestBitrateObserver bitrate_observer_2;
int start_bitrate =
- allocator_->AddObserver(&bitrate_observer_1, 100000, 300000);
+ allocator_->AddObserver(&bitrate_observer_1, 100000, 300000, true);
EXPECT_EQ(300000, start_bitrate);
- start_bitrate = allocator_->AddObserver(&bitrate_observer_2, 200000, 300000);
+ start_bitrate =
+ allocator_->AddObserver(&bitrate_observer_2, 200000, 300000, true);
EXPECT_EQ(200000, start_bitrate);
// Test too low start bitrate, hence lower than sum of min. Min bitrates will
@@ -96,12 +99,17 @@ TEST_F(BitrateAllocatorTest, TwoBitrateObserversOneRtcpObserver) {
allocator_->OnNetworkChanged(1500000, 0, 50);
EXPECT_EQ(600000u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(600000u, bitrate_observer_2.last_bitrate_);
+
+ // Verify that if the bandwidth estimate is set to zero, the allocated rate is
+ // zero.
+ allocator_->OnNetworkChanged(0, 0, 50);
+ EXPECT_EQ(0u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
}
class BitrateAllocatorTestNoEnforceMin : public ::testing::Test {
protected:
BitrateAllocatorTestNoEnforceMin() : allocator_(new BitrateAllocator()) {
- allocator_->EnforceMinBitrate(false);
allocator_->OnNetworkChanged(300000u, 0, 0);
}
~BitrateAllocatorTestNoEnforceMin() {}
@@ -114,7 +122,7 @@ class BitrateAllocatorTestNoEnforceMin : public ::testing::Test {
TEST_F(BitrateAllocatorTestNoEnforceMin, OneBitrateObserver) {
TestBitrateObserver bitrate_observer_1;
int start_bitrate =
- allocator_->AddObserver(&bitrate_observer_1, 100000, 400000);
+ allocator_->AddObserver(&bitrate_observer_1, 100000, 400000, false);
EXPECT_EQ(300000, start_bitrate);
// High REMB.
@@ -134,14 +142,16 @@ TEST_F(BitrateAllocatorTestNoEnforceMin, ThreeBitrateObservers) {
TestBitrateObserver bitrate_observer_3;
// Set up the observers with min bitrates at 100000, 200000, and 300000.
int start_bitrate =
- allocator_->AddObserver(&bitrate_observer_1, 100000, 400000);
+ allocator_->AddObserver(&bitrate_observer_1, 100000, 400000, false);
EXPECT_EQ(300000, start_bitrate);
- start_bitrate = allocator_->AddObserver(&bitrate_observer_2, 200000, 400000);
+ start_bitrate =
+ allocator_->AddObserver(&bitrate_observer_2, 200000, 400000, false);
EXPECT_EQ(200000, start_bitrate);
EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
- start_bitrate = allocator_->AddObserver(&bitrate_observer_3, 300000, 400000);
+ start_bitrate =
+ allocator_->AddObserver(&bitrate_observer_3, 300000, 400000, false);
EXPECT_EQ(0, start_bitrate);
EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);
@@ -171,6 +181,13 @@ TEST_F(BitrateAllocatorTestNoEnforceMin, ThreeBitrateObservers) {
EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_);
+ allocator_->OnNetworkChanged(0, 0, 0);
+ // Verify that zero estimated bandwidth, means that that all gets zero,
+ // regardless of set min bitrate.
+ EXPECT_EQ(0u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_);
+
allocator_->RemoveObserver(&bitrate_observer_1);
allocator_->RemoveObserver(&bitrate_observer_2);
allocator_->RemoveObserver(&bitrate_observer_3);
@@ -181,14 +198,16 @@ TEST_F(BitrateAllocatorTest, ThreeBitrateObserversLowRembEnforceMin) {
TestBitrateObserver bitrate_observer_2;
TestBitrateObserver bitrate_observer_3;
int start_bitrate =
- allocator_->AddObserver(&bitrate_observer_1, 100000, 400000);
+ allocator_->AddObserver(&bitrate_observer_1, 100000, 400000, true);
EXPECT_EQ(300000, start_bitrate);
- start_bitrate = allocator_->AddObserver(&bitrate_observer_2, 200000, 400000);
+ start_bitrate =
+ allocator_->AddObserver(&bitrate_observer_2, 200000, 400000, true);
EXPECT_EQ(200000, start_bitrate);
EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
- start_bitrate = allocator_->AddObserver(&bitrate_observer_3, 300000, 400000);
+ start_bitrate =
+ allocator_->AddObserver(&bitrate_observer_3, 300000, 400000, true);
EXPECT_EQ(300000, start_bitrate);
EXPECT_EQ(100000, static_cast<int>(bitrate_observer_1.last_bitrate_));
EXPECT_EQ(200000, static_cast<int>(bitrate_observer_2.last_bitrate_));
@@ -203,4 +222,32 @@ TEST_F(BitrateAllocatorTest, ThreeBitrateObserversLowRembEnforceMin) {
allocator_->RemoveObserver(&bitrate_observer_2);
allocator_->RemoveObserver(&bitrate_observer_3);
}
+
+TEST_F(BitrateAllocatorTest, AddObserverWhileNetworkDown) {
+ TestBitrateObserver bitrate_observer_1;
+ int start_bitrate =
+ allocator_->AddObserver(&bitrate_observer_1, 50000, 400000, true);
+ EXPECT_EQ(300000, start_bitrate);
+
+ // Set network down, ie, no available bitrate.
+ allocator_->OnNetworkChanged(0, 0, 0);
+
+ EXPECT_EQ(0u, bitrate_observer_1.last_bitrate_);
+
+ TestBitrateObserver bitrate_observer_2;
+ start_bitrate =
+ allocator_->AddObserver(&bitrate_observer_2, 50000, 400000, true);
+
+ // Expect the start_bitrate to be set as if the network was still up but that
+ // the new observer have been notified that the network is down.
+ EXPECT_EQ(300000 / 2, start_bitrate);
+ EXPECT_EQ(0u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
+
+ // Set network back up.
+ allocator_->OnNetworkChanged(1500000, 0, 50);
+ EXPECT_EQ(750000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(750000u, bitrate_observer_2.last_bitrate_);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/bitrate_estimator_tests.cc b/chromium/third_party/webrtc/call/bitrate_estimator_tests.cc
index c63d45dc6e6..122e5bb19b6 100644
--- a/chromium/third_party/webrtc/call/bitrate_estimator_tests.cc
+++ b/chromium/third_party/webrtc/call/bitrate_estimator_tests.cc
@@ -302,7 +302,8 @@ TEST_F(BitrateEstimatorTest, SwitchesToASTForVideo) {
EXPECT_TRUE(receiver_log_.Wait());
}
-TEST_F(BitrateEstimatorTest, SwitchesToASTThenBackToTOFForVideo) {
+// This test is flaky. See webrtc:5790.
+TEST_F(BitrateEstimatorTest, DISABLED_SwitchesToASTThenBackToTOFForVideo) {
video_send_config_.rtp.extensions.push_back(
RtpExtension(RtpExtension::kTOffset, kTOFExtensionId));
receiver_log_.PushExpectedLogLine(kSingleStreamLog);
diff --git a/chromium/third_party/webrtc/call/call.cc b/chromium/third_party/webrtc/call/call.cc
index 91c27c80ad9..f7c66db4b21 100644
--- a/chromium/third_party/webrtc/call/call.cc
+++ b/chromium/third_party/webrtc/call/call.cc
@@ -19,6 +19,7 @@
#include "webrtc/audio/audio_state.h"
#include "webrtc/audio/scoped_voe_interface.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/base/thread_checker.h"
@@ -39,6 +40,7 @@
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/video/call_stats.h"
+#include "webrtc/video/send_delay_stats.h"
#include "webrtc/video/video_receive_stream.h"
#include "webrtc/video/video_send_stream.h"
#include "webrtc/video/vie_remb.h"
@@ -50,8 +52,9 @@ const int Call::Config::kDefaultStartBitrateBps = 300000;
namespace internal {
-class Call : public webrtc::Call, public PacketReceiver,
- public BitrateObserver {
+class Call : public webrtc::Call,
+ public PacketReceiver,
+ public CongestionController::Observer {
public:
explicit Call(const Call::Config& config);
virtual ~Call();
@@ -89,6 +92,9 @@ class Call : public webrtc::Call, public PacketReceiver,
void SignalChannelNetworkState(MediaType media, NetworkState state) override;
+ void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) override;
+
void OnSentPacket(const rtc::SentPacket& sent_packet) override;
// Implements BitrateObserver.
@@ -102,7 +108,6 @@ class Call : public webrtc::Call, public PacketReceiver,
const uint8_t* packet,
size_t length,
const PacketTime& packet_time);
-
void ConfigureSync(const std::string& sync_group)
EXCLUSIVE_LOCKS_REQUIRED(receive_crit_);
@@ -170,8 +175,11 @@ class Call : public webrtc::Call, public PacketReceiver,
int64_t pacer_bitrate_sum_kbits_ GUARDED_BY(&bitrate_crit_);
int64_t num_bitrate_updates_ GUARDED_BY(&bitrate_crit_);
+ std::map<std::string, rtc::NetworkRoute> network_routes_;
+
VieRemb remb_;
const std::unique_ptr<CongestionController> congestion_controller_;
+ const std::unique_ptr<SendDelayStats> video_send_delay_stats_;
RTC_DISALLOW_COPY_AND_ASSIGN(Call);
};
@@ -186,9 +194,8 @@ namespace internal {
Call::Call(const Call::Config& config)
: clock_(Clock::GetRealTimeClock()),
num_cpu_cores_(CpuInfo::DetectNumberOfCores()),
- module_process_thread_(
- rtc::ScopedToUnique(ProcessThread::Create("ModuleProcessThread"))),
- pacer_thread_(rtc::ScopedToUnique(ProcessThread::Create("PacerThread"))),
+ module_process_thread_(ProcessThread::Create("ModuleProcessThread")),
+ pacer_thread_(ProcessThread::Create("PacerThread")),
call_stats_(new CallStats(clock_)),
bitrate_allocator_(new BitrateAllocator()),
config_(config),
@@ -206,7 +213,8 @@ Call::Call(const Call::Config& config)
pacer_bitrate_sum_kbits_(0),
num_bitrate_updates_(0),
remb_(clock_),
- congestion_controller_(new CongestionController(clock_, this, &remb_)) {
+ congestion_controller_(new CongestionController(clock_, this, &remb_)),
+ video_send_delay_stats_(new SendDelayStats(clock_)) {
RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
RTC_DCHECK_GE(config.bitrate_config.min_bitrate_bps, 0);
RTC_DCHECK_GE(config.bitrate_config.start_bitrate_bps,
@@ -399,12 +407,14 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream(
TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream");
RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
+ video_send_delay_stats_->AddSsrcs(config);
// TODO(mflodman): Base the start bitrate on a current bandwidth estimate, if
// the call has already started.
VideoSendStream* send_stream = new VideoSendStream(
num_cpu_cores_, module_process_thread_.get(), call_stats_.get(),
- congestion_controller_.get(), bitrate_allocator_.get(), &remb_, config,
- encoder_config, suspended_video_send_ssrcs_);
+ congestion_controller_.get(), bitrate_allocator_.get(),
+ video_send_delay_stats_.get(), &remb_, config, encoder_config,
+ suspended_video_send_ssrcs_);
{
WriteLockScoped write_lock(*send_crit_);
for (uint32_t ssrc : config.rtp.ssrcs) {
@@ -591,6 +601,37 @@ void Call::SignalChannelNetworkState(MediaType media, NetworkState state) {
}
}
+// TODO(honghaiz): Add tests for this method.
+void Call::OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) {
+ RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
+ // Check if the network route is connected.
+ if (!network_route.connected) {
+ LOG(LS_INFO) << "Transport " << transport_name << " is disconnected";
+ // TODO(honghaiz): Perhaps handle this in SignalChannelNetworkState and
+ // consider merging these two methods.
+ return;
+ }
+
+ // Check whether the network route has changed on each transport.
+ auto result =
+ network_routes_.insert(std::make_pair(transport_name, network_route));
+ auto kv = result.first;
+ bool inserted = result.second;
+ if (inserted) {
+ // No need to reset BWE if this is the first time the network connects.
+ return;
+ }
+ if (kv->second != network_route) {
+ kv->second = network_route;
+ LOG(LS_INFO) << "Network route changed on transport " << transport_name
+ << ": new local network id " << network_route.local_network_id
+ << " new remote network id "
+ << network_route.remote_network_id;
+ // TODO(holmer): Update the BWE bitrates.
+ }
+}
+
void Call::UpdateAggregateNetworkState() {
RTC_DCHECK(configuration_thread_checker_.CalledOnValidThread());
@@ -626,6 +667,8 @@ void Call::UpdateAggregateNetworkState() {
void Call::OnSentPacket(const rtc::SentPacket& sent_packet) {
if (first_packet_sent_ms_ == -1)
first_packet_sent_ms_ = clock_->TimeInMilliseconds();
+ video_send_delay_stats_->OnSentPacket(sent_packet.packet_id,
+ clock_->TimeInMilliseconds());
congestion_controller_->OnSentPacket(sent_packet);
}
@@ -657,10 +700,8 @@ void Call::OnNetworkChanged(uint32_t target_bitrate_bps, uint8_t fraction_loss,
pacer_bitrate_sum_kbits_ += pacer_bitrate_bps / 1000;
++num_bitrate_updates_;
}
- congestion_controller_->UpdatePacerBitrate(
- target_bitrate_bps / 1000,
- PacedSender::kDefaultPaceMultiplier * pacer_bitrate_bps / 1000,
- pad_up_to_bitrate_bps / 1000);
+ congestion_controller_->SetAllocatedSendBitrate(allocated_bitrate_bps,
+ pad_up_to_bitrate_bps);
}
void Call::ConfigureSync(const std::string& sync_group) {
@@ -715,8 +756,7 @@ PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type,
const uint8_t* packet,
size_t length) {
TRACE_EVENT0("webrtc", "Call::DeliverRtcp");
- // TODO(pbos): Figure out what channel needs it actually.
- // Do NOT broadcast! Also make sure it's a valid packet.
+ // TODO(pbos): Make sure it's a valid packet.
// Return DELIVERY_UNKNOWN_SSRC if it can be determined that
// there's no receiver of the packet.
received_rtcp_bytes_ += length;
@@ -724,25 +764,35 @@ PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type,
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
ReadLockScoped read_lock(*receive_crit_);
for (VideoReceiveStream* stream : video_receive_streams_) {
- if (stream->DeliverRtcp(packet, length)) {
+ if (stream->DeliverRtcp(packet, length))
+ rtcp_delivered = true;
+ }
+ }
+ if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
+ ReadLockScoped read_lock(*receive_crit_);
+ for (auto& kv : audio_receive_ssrcs_) {
+ if (kv.second->DeliverRtcp(packet, length))
rtcp_delivered = true;
- if (event_log_)
- event_log_->LogRtcpPacket(kIncomingPacket, media_type, packet,
- length);
- }
}
}
if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) {
ReadLockScoped read_lock(*send_crit_);
for (VideoSendStream* stream : video_send_streams_) {
- if (stream->DeliverRtcp(packet, length)) {
+ if (stream->DeliverRtcp(packet, length))
+ rtcp_delivered = true;
+ }
+ }
+ if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) {
+ ReadLockScoped read_lock(*send_crit_);
+ for (auto& kv : audio_send_ssrcs_) {
+ if (kv.second->DeliverRtcp(packet, length))
rtcp_delivered = true;
- if (event_log_)
- event_log_->LogRtcpPacket(kIncomingPacket, media_type, packet,
- length);
- }
}
}
+
+ if (event_log_ && rtcp_delivered)
+ event_log_->LogRtcpPacket(kIncomingPacket, media_type, packet, length);
+
return rtcp_delivered ? DELIVERY_OK : DELIVERY_PACKET_ERROR;
}
diff --git a/chromium/third_party/webrtc/call/call_perf_tests.cc b/chromium/third_party/webrtc/call/call_perf_tests.cc
index 9aa50d0ad97..329c1f25b61 100644
--- a/chromium/third_party/webrtc/call/call_perf_tests.cc
+++ b/chromium/third_party/webrtc/call/call_perf_tests.cc
@@ -17,6 +17,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
@@ -41,7 +42,6 @@
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
-#include "webrtc/voice_engine/include/voe_network.h"
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
#include "webrtc/voice_engine/include/voe_video_sync.h"
@@ -149,39 +149,11 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec,
const char* kSyncGroup = "av_sync";
const uint32_t kAudioSendSsrc = 1234;
const uint32_t kAudioRecvSsrc = 5678;
- class AudioPacketReceiver : public PacketReceiver {
- public:
- AudioPacketReceiver(int channel, VoENetwork* voe_network)
- : channel_(channel),
- voe_network_(voe_network),
- parser_(RtpHeaderParser::Create()) {}
- DeliveryStatus DeliverPacket(MediaType media_type,
- const uint8_t* packet,
- size_t length,
- const PacketTime& packet_time) override {
- EXPECT_TRUE(media_type == MediaType::ANY ||
- media_type == MediaType::AUDIO);
- int ret;
- if (parser_->IsRtcp(packet, length)) {
- ret = voe_network_->ReceivedRTCPPacket(channel_, packet, length);
- } else {
- ret = voe_network_->ReceivedRTPPacket(channel_, packet, length,
- PacketTime());
- }
- return ret == 0 ? DELIVERY_OK : DELIVERY_PACKET_ERROR;
- }
-
- private:
- int channel_;
- VoENetwork* voe_network_;
- std::unique_ptr<RtpHeaderParser> parser_;
- };
test::ClearHistograms();
VoiceEngine* voice_engine = VoiceEngine::Create();
VoEBase* voe_base = VoEBase::GetInterface(voice_engine);
VoECodec* voe_codec = VoECodec::GetInterface(voice_engine);
- VoENetwork* voe_network = VoENetwork::GetInterface(voice_engine);
const std::string audio_filename =
test::ResourcePath("voice_engine/audio_long16", "pcm");
ASSERT_STRNE("", audio_filename.c_str());
@@ -201,44 +173,56 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec,
receiver_config.audio_state = sender_config.audio_state;
CreateCalls(sender_config, receiver_config);
- AudioPacketReceiver voe_send_packet_receiver(send_channel_id, voe_network);
- AudioPacketReceiver voe_recv_packet_receiver(recv_channel_id, voe_network);
VideoRtcpAndSyncObserver observer(Clock::GetRealTimeClock());
- FakeNetworkPipe::Config net_config;
- net_config.queue_delay_ms = 500;
- net_config.loss_percent = 5;
- test::PacketTransport audio_send_transport(
- nullptr, &observer, test::PacketTransport::kSender, net_config);
- audio_send_transport.SetReceiver(&voe_recv_packet_receiver);
- test::PacketTransport audio_receive_transport(
- nullptr, &observer, test::PacketTransport::kReceiver, net_config);
- audio_receive_transport.SetReceiver(&voe_send_packet_receiver);
-
- internal::TransportAdapter send_transport_adapter(&audio_send_transport);
- send_transport_adapter.Enable();
- EXPECT_EQ(0, voe_network->RegisterExternalTransport(send_channel_id,
- send_transport_adapter));
-
- internal::TransportAdapter recv_transport_adapter(&audio_receive_transport);
- recv_transport_adapter.Enable();
- EXPECT_EQ(0, voe_network->RegisterExternalTransport(recv_channel_id,
- recv_transport_adapter));
-
- test::PacketTransport sync_send_transport(sender_call_.get(), &observer,
- test::PacketTransport::kSender,
- FakeNetworkPipe::Config());
- sync_send_transport.SetReceiver(receiver_call_->Receiver());
- test::PacketTransport sync_receive_transport(receiver_call_.get(), &observer,
- test::PacketTransport::kReceiver,
- FakeNetworkPipe::Config());
- sync_receive_transport.SetReceiver(sender_call_->Receiver());
+ // Helper class to ensure we deliver correct media_type to the receiving call.
+ class MediaTypePacketReceiver : public PacketReceiver {
+ public:
+ MediaTypePacketReceiver(PacketReceiver* packet_receiver,
+ MediaType media_type)
+ : packet_receiver_(packet_receiver), media_type_(media_type) {}
+
+ DeliveryStatus DeliverPacket(MediaType media_type,
+ const uint8_t* packet,
+ size_t length,
+ const PacketTime& packet_time) override {
+ return packet_receiver_->DeliverPacket(media_type_, packet, length,
+ packet_time);
+ }
+ private:
+ PacketReceiver* packet_receiver_;
+ const MediaType media_type_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(MediaTypePacketReceiver);
+ };
+
+ FakeNetworkPipe::Config audio_net_config;
+ audio_net_config.queue_delay_ms = 500;
+ audio_net_config.loss_percent = 5;
+ test::PacketTransport audio_send_transport(sender_call_.get(), &observer,
+ test::PacketTransport::kSender,
+ audio_net_config);
+ MediaTypePacketReceiver audio_receiver(receiver_call_->Receiver(),
+ MediaType::AUDIO);
+ audio_send_transport.SetReceiver(&audio_receiver);
+
+ test::PacketTransport video_send_transport(sender_call_.get(), &observer,
+ test::PacketTransport::kSender,
+ FakeNetworkPipe::Config());
+ MediaTypePacketReceiver video_receiver(receiver_call_->Receiver(),
+ MediaType::VIDEO);
+ video_send_transport.SetReceiver(&video_receiver);
+
+ test::PacketTransport receive_transport(
+ receiver_call_.get(), &observer, test::PacketTransport::kReceiver,
+ FakeNetworkPipe::Config());
+ receive_transport.SetReceiver(sender_call_->Receiver());
test::FakeDecoder fake_decoder;
- CreateSendConfig(1, 0, &sync_send_transport);
- CreateMatchingReceiveConfigs(&sync_receive_transport);
+ CreateSendConfig(1, 0, &video_send_transport);
+ CreateMatchingReceiveConfigs(&receive_transport);
AudioSendStream::Config audio_send_config(&audio_send_transport);
audio_send_config.voe_channel_id = send_channel_id;
@@ -298,10 +282,9 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec,
fake_audio_device.Stop();
Stop();
- sync_send_transport.StopSending();
- sync_receive_transport.StopSending();
+ video_send_transport.StopSending();
audio_send_transport.StopSending();
- audio_receive_transport.StopSending();
+ receive_transport.StopSending();
DestroyStreams();
@@ -312,7 +295,6 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec,
voe_base->DeleteChannel(recv_channel_id);
voe_base->Release();
voe_codec->Release();
- voe_network->Release();
DestroyCalls();
@@ -413,7 +395,7 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
EXPECT_TRUE(std::abs(time_offset_ms) < threshold_ms_);
}
- virtual Action OnSendRtp(const uint8_t* packet, size_t length) {
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&crit_);
RTPHeader header;
EXPECT_TRUE(parser_->Parse(packet, length, &header));
diff --git a/chromium/third_party/webrtc/call/mock/mock_rtc_event_log.h b/chromium/third_party/webrtc/call/mock/mock_rtc_event_log.h
index f523105d0e4..8ca73a3dc7d 100644
--- a/chromium/third_party/webrtc/call/mock/mock_rtc_event_log.h
+++ b/chromium/third_party/webrtc/call/mock/mock_rtc_event_log.h
@@ -21,12 +21,11 @@ namespace webrtc {
class MockRtcEventLog : public RtcEventLog {
public:
- MOCK_METHOD1(SetBufferDuration, void(int64_t buffer_duration_us));
-
MOCK_METHOD2(StartLogging,
- void(const std::string& file_name, int duration_ms));
+ bool(const std::string& file_name, int64_t max_size_bytes));
- MOCK_METHOD1(StartLogging, bool(rtc::PlatformFile log_file));
+ MOCK_METHOD2(StartLogging,
+ bool(rtc::PlatformFile log_file, int64_t max_size_bytes));
MOCK_METHOD0(StopLogging, void());
diff --git a/chromium/third_party/webrtc/call/ringbuffer.h b/chromium/third_party/webrtc/call/ringbuffer.h
new file mode 100644
index 00000000000..fa5e4227ff7
--- /dev/null
+++ b/chromium/third_party/webrtc/call/ringbuffer.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_CALL_RINGBUFFER_H_
+#define WEBRTC_CALL_RINGBUFFER_H_
+
+#include <memory>
+#include <utility>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc {
+
+// A RingBuffer works like a fixed size queue which starts discarding
+// the oldest elements when it becomes full.
+template <typename T>
+class RingBuffer {
+ public:
+ // Creates a RingBuffer with space for |capacity| elements.
+ explicit RingBuffer(size_t capacity)
+ : // We allocate space for one extra sentinel element.
+ data_(new T[capacity + 1]) {
+ RTC_DCHECK(capacity > 0);
+ end_ = data_.get() + (capacity + 1);
+ front_ = data_.get();
+ back_ = data_.get();
+ }
+
+ ~RingBuffer() {
+ // The unique_ptr will free the memory.
+ }
+
+ // Removes an element from the front of the queue.
+ void pop_front() {
+ RTC_DCHECK(!empty());
+ ++front_;
+ if (front_ == end_) {
+ front_ = data_.get();
+ }
+ }
+
+ // Appends an element to the back of the queue (and removes an
+ // element from the front if there is no space at the back of the queue).
+ void push_back(const T& elem) {
+ *back_ = elem;
+ ++back_;
+ if (back_ == end_) {
+ back_ = data_.get();
+ }
+ if (back_ == front_) {
+ ++front_;
+ }
+ if (front_ == end_) {
+ front_ = data_.get();
+ }
+ }
+
+ // Appends an element to the back of the queue (and removes an
+ // element from the front if there is no space at the back of the queue).
+ void push_back(T&& elem) {
+ *back_ = std::move(elem);
+ ++back_;
+ if (back_ == end_) {
+ back_ = data_.get();
+ }
+ if (back_ == front_) {
+ ++front_;
+ }
+ if (front_ == end_) {
+ front_ = data_.get();
+ }
+ }
+
+ T& front() { return *front_; }
+
+ const T& front() const { return *front_; }
+
+ bool empty() const { return (front_ == back_); }
+
+ private:
+ std::unique_ptr<T[]> data_;
+ T* end_;
+ T* front_;
+ T* back_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RingBuffer);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_CALL_RINGBUFFER_H_
diff --git a/chromium/third_party/webrtc/call/ringbuffer_unittest.cc b/chromium/third_party/webrtc/call/ringbuffer_unittest.cc
new file mode 100644
index 00000000000..e2b5a41461a
--- /dev/null
+++ b/chromium/third_party/webrtc/call/ringbuffer_unittest.cc
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include <list>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/random.h"
+#include "webrtc/call/ringbuffer.h"
+
+namespace {
+template <typename T>
+class MovableType {
+ public:
+ MovableType() : value_(), moved_from_(false), moved_to_(false) {}
+ explicit MovableType(T value)
+ : value_(value), moved_from_(false), moved_to_(false) {}
+ MovableType(const MovableType<T>& other)
+ : value_(other.value_), moved_from_(false), moved_to_(false) {}
+ MovableType(MovableType<T>&& other)
+ : value_(other.value_), moved_from_(false), moved_to_(true) {
+ other.moved_from_ = true;
+ }
+
+ MovableType& operator=(const MovableType<T>& other) {
+ value_ = other.value_;
+ moved_from_ = false;
+ moved_to_ = false;
+ return *this;
+ }
+
+ MovableType& operator=(MovableType<T>&& other) {
+ value_ = other.value_;
+ moved_from_ = false;
+ moved_to_ = true;
+ other.moved_from_ = true;
+ return *this;
+ }
+
+ T Value() { return value_; }
+ bool IsMovedFrom() { return moved_from_; }
+ bool IsMovedTo() { return moved_to_; }
+
+ private:
+ T value_;
+ bool moved_from_;
+ bool moved_to_;
+};
+
+} // namespace
+
+namespace webrtc {
+
+// Verify that the ring buffer works as a simple queue.
+TEST(RingBufferTest, SimpleQueue) {
+ size_t capacity = 100;
+ RingBuffer<size_t> q(capacity);
+ EXPECT_TRUE(q.empty());
+ for (size_t i = 0; i < capacity; i++) {
+ q.push_back(i);
+ EXPECT_FALSE(q.empty());
+ }
+
+ for (size_t i = 0; i < capacity; i++) {
+ EXPECT_FALSE(q.empty());
+ EXPECT_EQ(i, q.front());
+ q.pop_front();
+ }
+ EXPECT_TRUE(q.empty());
+}
+
+// Do a "random" sequence of queue operations and verify that the
+// result is consistent with the same operation performed on a std::list.
+TEST(RingBufferTest, ConsistentWithStdList) {
+ Random prng(987654321ull);
+ size_t capacity = 10;
+ RingBuffer<int> q(capacity);
+ std::list<int> l;
+ EXPECT_TRUE(q.empty());
+ for (size_t i = 0; i < 100 * capacity; i++) {
+ bool insert = prng.Rand<bool>();
+ if ((insert && l.size() < capacity) || l.size() == 0) {
+ int x = prng.Rand<int>();
+ l.push_back(x);
+ q.push_back(x);
+ EXPECT_FALSE(q.empty());
+ } else {
+ EXPECT_FALSE(q.empty());
+ EXPECT_EQ(l.front(), q.front());
+ l.pop_front();
+ q.pop_front();
+ }
+ }
+ while (!l.empty()) {
+ EXPECT_FALSE(q.empty());
+ EXPECT_EQ(l.front(), q.front());
+ l.pop_front();
+ q.pop_front();
+ }
+ EXPECT_TRUE(q.empty());
+}
+
+// Test that the ringbuffer starts reusing elements from the front
+// when the queue becomes full.
+TEST(RingBufferTest, OverwriteOldElements) {
+ size_t capacity = 100;
+ size_t insertions = 3 * capacity + 25;
+ RingBuffer<size_t> q(capacity);
+ EXPECT_TRUE(q.empty());
+ for (size_t i = 0; i < insertions; i++) {
+ q.push_back(i);
+ EXPECT_FALSE(q.empty());
+ }
+
+ for (size_t i = insertions - capacity; i < insertions; i++) {
+ EXPECT_FALSE(q.empty());
+ EXPECT_EQ(i, q.front());
+ q.pop_front();
+ }
+ EXPECT_TRUE(q.empty());
+}
+
+// Test that the ringbuffer uses std::move when pushing an rvalue reference.
+TEST(RingBufferTest, MoveSemanticsForPushBack) {
+ size_t capacity = 100;
+ size_t insertions = 3 * capacity + 25;
+ RingBuffer<MovableType<size_t>> q(capacity);
+ EXPECT_TRUE(q.empty());
+ for (size_t i = 0; i < insertions; i++) {
+ MovableType<size_t> tmp(i);
+ EXPECT_FALSE(tmp.IsMovedFrom());
+ EXPECT_FALSE(tmp.IsMovedTo());
+ q.push_back(std::move(tmp));
+ EXPECT_TRUE(tmp.IsMovedFrom());
+ EXPECT_FALSE(tmp.IsMovedTo());
+ EXPECT_FALSE(q.empty());
+ }
+
+ for (size_t i = insertions - capacity; i < insertions; i++) {
+ EXPECT_FALSE(q.empty());
+ EXPECT_EQ(i, q.front().Value());
+ EXPECT_FALSE(q.front().IsMovedFrom());
+ EXPECT_TRUE(q.front().IsMovedTo());
+ q.pop_front();
+ }
+ EXPECT_TRUE(q.empty());
+}
+
+TEST(RingBufferTest, SmallCapacity) {
+ size_t capacity = 1;
+ RingBuffer<int> q(capacity);
+ EXPECT_TRUE(q.empty());
+ q.push_back(4711);
+ EXPECT_FALSE(q.empty());
+ EXPECT_EQ(4711, q.front());
+ q.push_back(1024);
+ EXPECT_FALSE(q.empty());
+ EXPECT_EQ(1024, q.front());
+ q.pop_front();
+ EXPECT_TRUE(q.empty());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/rtc_event_log.cc b/chromium/third_party/webrtc/call/rtc_event_log.cc
index ce4d6ef39f4..6dd02c808eb 100644
--- a/chromium/third_party/webrtc/call/rtc_event_log.cc
+++ b/chromium/third_party/webrtc/call/rtc_event_log.cc
@@ -10,18 +10,22 @@
#include "webrtc/call/rtc_event_log.h"
-#include <deque>
+#include <limits>
#include <vector>
#include "webrtc/base/checks.h"
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/thread_annotations.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/swap_queue.h"
+#include "webrtc/base/thread_checker.h"
#include "webrtc/call.h"
+#include "webrtc/call/rtc_event_log_helper_thread.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
+#include "webrtc/system_wrappers/include/logging.h"
#ifdef ENABLE_RTC_EVENT_LOG
// Files generated at build-time by the protobuf compiler.
@@ -37,12 +41,17 @@ namespace webrtc {
#ifndef ENABLE_RTC_EVENT_LOG
// No-op implementation if flag is not set.
-class RtcEventLogImpl final : public RtcEventLog {
+class RtcEventLogNullImpl final : public RtcEventLog {
public:
- void SetBufferDuration(int64_t buffer_duration_us) override {}
- void StartLogging(const std::string& file_name, int duration_ms) override {}
- bool StartLogging(rtc::PlatformFile log_file) override { return false; }
- void StopLogging(void) override {}
+ bool StartLogging(const std::string& file_name,
+ int64_t max_size_bytes) override {
+ return false;
+ }
+ bool StartLogging(rtc::PlatformFile platform_file,
+ int64_t max_size_bytes) override {
+ return false;
+ }
+ void StopLogging() override {}
void LogVideoReceiveStreamConfig(
const VideoReceiveStream::Config& config) override {}
void LogVideoSendStreamConfig(
@@ -65,11 +74,13 @@ class RtcEventLogImpl final : public RtcEventLog {
class RtcEventLogImpl final : public RtcEventLog {
public:
- RtcEventLogImpl();
+ explicit RtcEventLogImpl(const Clock* clock);
+ ~RtcEventLogImpl() override;
- void SetBufferDuration(int64_t buffer_duration_us) override;
- void StartLogging(const std::string& file_name, int duration_ms) override;
- bool StartLogging(rtc::PlatformFile log_file) override;
+ bool StartLogging(const std::string& file_name,
+ int64_t max_size_bytes) override;
+ bool StartLogging(rtc::PlatformFile platform_file,
+ int64_t max_size_bytes) override;
void StopLogging() override;
void LogVideoReceiveStreamConfig(
const VideoReceiveStream::Config& config) override;
@@ -88,37 +99,21 @@ class RtcEventLogImpl final : public RtcEventLog {
int32_t total_packets) override;
private:
- // Starts logging. This function assumes the file_ has been opened succesfully
- // and that the start_time_us_ and _duration_us_ have been set.
- void StartLoggingLocked() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- // Stops logging and clears the stored data and buffers.
- void StopLoggingLocked() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- // Adds a new event to the logfile if logging is active, or adds it to the
- // list of recent log events otherwise.
- void HandleEvent(rtclog::Event* event) EXCLUSIVE_LOCKS_REQUIRED(crit_);
- // Writes the event to the file. Note that this will destroy the state of the
- // input argument.
- void StoreToFile(rtclog::Event* event) EXCLUSIVE_LOCKS_REQUIRED(crit_);
- // Adds the event to the list of recent events, and removes any events that
- // are too old and no longer fall in the time window.
- void AddRecentEvent(const rtclog::Event& event)
- EXCLUSIVE_LOCKS_REQUIRED(crit_);
-
- rtc::CriticalSection crit_;
- std::unique_ptr<FileWrapper> file_ GUARDED_BY(crit_) =
- std::unique_ptr<FileWrapper>(FileWrapper::Create());
- rtc::PlatformFile platform_file_ GUARDED_BY(crit_) =
- rtc::kInvalidPlatformFileValue;
- rtclog::EventStream stream_ GUARDED_BY(crit_);
- std::deque<rtclog::Event> recent_log_events_ GUARDED_BY(crit_);
- std::vector<rtclog::Event> config_events_ GUARDED_BY(crit_);
-
- // Microseconds to record log events, before starting the actual log.
- int64_t buffer_duration_us_ GUARDED_BY(crit_);
- bool currently_logging_ GUARDED_BY(crit_);
- int64_t start_time_us_ GUARDED_BY(crit_);
- int64_t duration_us_ GUARDED_BY(crit_);
+ // Message queue for passing control messages to the logging thread.
+ SwapQueue<RtcEventLogHelperThread::ControlMessage> message_queue_;
+
+ // Message queue for passing events to the logging thread.
+ SwapQueue<std::unique_ptr<rtclog::Event> > event_queue_;
+
+ rtc::Event wake_up_;
+ rtc::Event stopped_;
+
const Clock* const clock_;
+
+ RtcEventLogHelperThread helper_thread_;
+ rtc::ThreadChecker thread_checker_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtcEventLogImpl);
};
namespace {
@@ -126,10 +121,6 @@ namespace {
// that the rest of the WebRtc project can use, to the corresponding
// serialized enum which is defined by the protobuf.
-// Do not add default return values to the conversion functions in this
-// unnamed namespace. The intention is to make the compiler warn if anyone
-// adds unhandled new events/modes/etc.
-
rtclog::VideoReceiveConfig_RtcpMode ConvertRtcpMode(RtcpMode rtcp_mode) {
switch (rtcp_mode) {
case RtcpMode::kCompound:
@@ -159,114 +150,108 @@ rtclog::MediaType ConvertMediaType(MediaType media_type) {
return rtclog::ANY;
}
-} // namespace
-
-namespace {
-bool IsConfigEvent(const rtclog::Event& event) {
- rtclog::Event_EventType event_type = event.type();
- return event_type == rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT ||
- event_type == rtclog::Event::VIDEO_SENDER_CONFIG_EVENT ||
- event_type == rtclog::Event::AUDIO_RECEIVER_CONFIG_EVENT ||
- event_type == rtclog::Event::AUDIO_SENDER_CONFIG_EVENT;
-}
+// The RTP and RTCP buffers reserve space for twice the expected number of
+// sent packets because they also contain received packets.
+static const int kEventsPerSecond = 1000;
+static const int kControlMessagesPerSecond = 10;
} // namespace
// RtcEventLogImpl member functions.
-RtcEventLogImpl::RtcEventLogImpl()
- : file_(FileWrapper::Create()),
- stream_(),
- buffer_duration_us_(10000000),
- currently_logging_(false),
- start_time_us_(0),
- duration_us_(0),
- clock_(Clock::GetRealTimeClock()) {
+RtcEventLogImpl::RtcEventLogImpl(const Clock* clock)
+ // Allocate buffers for roughly one second of history.
+ : message_queue_(kControlMessagesPerSecond),
+ event_queue_(kEventsPerSecond),
+ wake_up_(false, false),
+ stopped_(false, false),
+ clock_(clock),
+ helper_thread_(&message_queue_,
+ &event_queue_,
+ &wake_up_,
+ &stopped_,
+ clock),
+ thread_checker_() {
+ thread_checker_.DetachFromThread();
}
-void RtcEventLogImpl::SetBufferDuration(int64_t buffer_duration_us) {
- rtc::CritScope lock(&crit_);
- buffer_duration_us_ = buffer_duration_us;
+RtcEventLogImpl::~RtcEventLogImpl() {
+ // The RtcEventLogHelperThread destructor closes the file
+ // and waits for the thread to terminate.
}
-void RtcEventLogImpl::StartLogging(const std::string& file_name,
- int duration_ms) {
- rtc::CritScope lock(&crit_);
- if (currently_logging_) {
- StopLoggingLocked();
+bool RtcEventLogImpl::StartLogging(const std::string& file_name,
+ int64_t max_size_bytes) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RtcEventLogHelperThread::ControlMessage message;
+ message.message_type = RtcEventLogHelperThread::ControlMessage::START_FILE;
+ message.max_size_bytes = max_size_bytes <= 0
+ ? std::numeric_limits<int64_t>::max()
+ : max_size_bytes;
+ message.start_time = clock_->TimeInMicroseconds();
+ message.stop_time = std::numeric_limits<int64_t>::max();
+ message.file.reset(FileWrapper::Create());
+ if (message.file->OpenFile(file_name.c_str(), false) != 0) {
+ return false;
}
- if (file_->OpenFile(file_name.c_str(), false) != 0) {
- return;
+ if (!message_queue_.Insert(&message)) {
+ LOG(LS_WARNING) << "Message queue full. Can't start logging.";
+ return false;
}
- start_time_us_ = clock_->TimeInMicroseconds();
- duration_us_ = static_cast<int64_t>(duration_ms) * 1000;
- StartLoggingLocked();
+ return true;
}
-bool RtcEventLogImpl::StartLogging(rtc::PlatformFile log_file) {
- rtc::CritScope lock(&crit_);
-
- if (currently_logging_) {
- StopLoggingLocked();
+bool RtcEventLogImpl::StartLogging(rtc::PlatformFile platform_file,
+ int64_t max_size_bytes) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RtcEventLogHelperThread::ControlMessage message;
+ message.message_type = RtcEventLogHelperThread::ControlMessage::START_FILE;
+ message.max_size_bytes = max_size_bytes <= 0
+ ? std::numeric_limits<int64_t>::max()
+ : max_size_bytes;
+ message.start_time = clock_->TimeInMicroseconds();
+ message.stop_time = std::numeric_limits<int64_t>::max();
+ message.file.reset(FileWrapper::Create());
+ FILE* file_handle = rtc::FdopenPlatformFileForWriting(platform_file);
+ if (!file_handle) {
+ return false;
}
- RTC_DCHECK(platform_file_ == rtc::kInvalidPlatformFileValue);
-
- FILE* file_stream = rtc::FdopenPlatformFileForWriting(log_file);
- if (!file_stream) {
- rtc::ClosePlatformFile(log_file);
+ if (message.file->OpenFromFileHandle(file_handle, true, false) != 0) {
return false;
}
-
- if (file_->OpenFromFileHandle(file_stream, true, false) != 0) {
- rtc::ClosePlatformFile(log_file);
+ if (!message_queue_.Insert(&message)) {
+ LOG(LS_WARNING) << "Message queue full. Can't start logging.";
return false;
}
- platform_file_ = log_file;
- // Set the start time and duration to keep logging for 10 minutes.
- start_time_us_ = clock_->TimeInMicroseconds();
- duration_us_ = 10 * 60 * 1000000;
- StartLoggingLocked();
return true;
}
-void RtcEventLogImpl::StartLoggingLocked() {
- currently_logging_ = true;
-
- // Write all old configuration events to the log file.
- for (auto& event : config_events_) {
- StoreToFile(&event);
- }
- // Write all recent configuration events to the log file, and
- // write all other recent events to the log file, ignoring any old events.
- for (auto& event : recent_log_events_) {
- if (IsConfigEvent(event)) {
- StoreToFile(&event);
- config_events_.push_back(event);
- } else if (event.timestamp_us() >= start_time_us_ - buffer_duration_us_) {
- StoreToFile(&event);
- }
- }
- recent_log_events_.clear();
- // Write a LOG_START event to the file.
- rtclog::Event start_event;
- start_event.set_timestamp_us(start_time_us_);
- start_event.set_type(rtclog::Event::LOG_START);
- StoreToFile(&start_event);
-}
-
void RtcEventLogImpl::StopLogging() {
- rtc::CritScope lock(&crit_);
- StopLoggingLocked();
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RtcEventLogHelperThread::ControlMessage message;
+ message.message_type = RtcEventLogHelperThread::ControlMessage::STOP_FILE;
+ message.stop_time = clock_->TimeInMicroseconds();
+ while (!message_queue_.Insert(&message)) {
+ // TODO(terelius): We would like to have a blocking Insert function in the
+ // SwapQueue, but for the time being we will just clear any previous
+ // messages.
+ // Since StopLogging waits for the thread, it is essential that we don't
+ // clear any STOP_FILE messages. To ensure that there is only one call at a
+ // time, we require that all calls to StopLogging are made on the same
+ // thread.
+ LOG(LS_WARNING) << "Message queue full. Clearing queue to stop logging.";
+ message_queue_.Clear();
+ }
+ wake_up_.Set(); // Request the output thread to wake up.
+ stopped_.Wait(rtc::Event::kForever); // Wait for the log to stop.
}
void RtcEventLogImpl::LogVideoReceiveStreamConfig(
const VideoReceiveStream::Config& config) {
- rtc::CritScope lock(&crit_);
-
- rtclog::Event event;
- event.set_timestamp_us(clock_->TimeInMicroseconds());
- event.set_type(rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT);
+ std::unique_ptr<rtclog::Event> event(new rtclog::Event());
+ event->set_timestamp_us(clock_->TimeInMicroseconds());
+ event->set_type(rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT);
rtclog::VideoReceiveConfig* receiver_config =
- event.mutable_video_receiver_config();
+ event->mutable_video_receiver_config();
receiver_config->set_remote_ssrc(config.rtp.remote_ssrc);
receiver_config->set_local_ssrc(config.rtp.local_ssrc);
@@ -292,18 +277,18 @@ void RtcEventLogImpl::LogVideoReceiveStreamConfig(
decoder->set_name(d.payload_name);
decoder->set_payload_type(d.payload_type);
}
- HandleEvent(&event);
+ if (!event_queue_.Insert(&event)) {
+ LOG(LS_WARNING) << "Config queue full. Not logging config event.";
+ }
}
void RtcEventLogImpl::LogVideoSendStreamConfig(
const VideoSendStream::Config& config) {
- rtc::CritScope lock(&crit_);
+ std::unique_ptr<rtclog::Event> event(new rtclog::Event());
+ event->set_timestamp_us(clock_->TimeInMicroseconds());
+ event->set_type(rtclog::Event::VIDEO_SENDER_CONFIG_EVENT);
- rtclog::Event event;
- event.set_timestamp_us(clock_->TimeInMicroseconds());
- event.set_type(rtclog::Event::VIDEO_SENDER_CONFIG_EVENT);
-
- rtclog::VideoSendConfig* sender_config = event.mutable_video_sender_config();
+ rtclog::VideoSendConfig* sender_config = event->mutable_video_sender_config();
for (const auto& ssrc : config.rtp.ssrcs) {
sender_config->add_ssrcs(ssrc);
@@ -324,7 +309,9 @@ void RtcEventLogImpl::LogVideoSendStreamConfig(
rtclog::EncoderConfig* encoder = sender_config->mutable_encoder();
encoder->set_name(config.encoder_settings.payload_name);
encoder->set_payload_type(config.encoder_settings.payload_type);
- HandleEvent(&event);
+ if (!event_queue_.Insert(&event)) {
+ LOG(LS_WARNING) << "Config queue full. Not logging config event.";
+ }
}
void RtcEventLogImpl::LogRtpHeader(PacketDirection direction,
@@ -347,27 +334,27 @@ void RtcEventLogImpl::LogRtpHeader(PacketDirection direction,
header_length += (x_len + 1) * 4;
}
- rtc::CritScope lock(&crit_);
- rtclog::Event rtp_event;
- rtp_event.set_timestamp_us(clock_->TimeInMicroseconds());
- rtp_event.set_type(rtclog::Event::RTP_EVENT);
- rtp_event.mutable_rtp_packet()->set_incoming(direction == kIncomingPacket);
- rtp_event.mutable_rtp_packet()->set_type(ConvertMediaType(media_type));
- rtp_event.mutable_rtp_packet()->set_packet_length(packet_length);
- rtp_event.mutable_rtp_packet()->set_header(header, header_length);
- HandleEvent(&rtp_event);
+ std::unique_ptr<rtclog::Event> rtp_event(new rtclog::Event());
+ rtp_event->set_timestamp_us(clock_->TimeInMicroseconds());
+ rtp_event->set_type(rtclog::Event::RTP_EVENT);
+ rtp_event->mutable_rtp_packet()->set_incoming(direction == kIncomingPacket);
+ rtp_event->mutable_rtp_packet()->set_type(ConvertMediaType(media_type));
+ rtp_event->mutable_rtp_packet()->set_packet_length(packet_length);
+ rtp_event->mutable_rtp_packet()->set_header(header, header_length);
+ if (!event_queue_.Insert(&rtp_event)) {
+ LOG(LS_WARNING) << "RTP queue full. Not logging RTP packet.";
+ }
}
void RtcEventLogImpl::LogRtcpPacket(PacketDirection direction,
MediaType media_type,
const uint8_t* packet,
size_t length) {
- rtc::CritScope lock(&crit_);
- rtclog::Event rtcp_event;
- rtcp_event.set_timestamp_us(clock_->TimeInMicroseconds());
- rtcp_event.set_type(rtclog::Event::RTCP_EVENT);
- rtcp_event.mutable_rtcp_packet()->set_incoming(direction == kIncomingPacket);
- rtcp_event.mutable_rtcp_packet()->set_type(ConvertMediaType(media_type));
+ std::unique_ptr<rtclog::Event> rtcp_event(new rtclog::Event());
+ rtcp_event->set_timestamp_us(clock_->TimeInMicroseconds());
+ rtcp_event->set_type(rtclog::Event::RTCP_EVENT);
+ rtcp_event->mutable_rtcp_packet()->set_incoming(direction == kIncomingPacket);
+ rtcp_event->mutable_rtcp_packet()->set_type(ConvertMediaType(media_type));
RTCPUtility::RtcpCommonHeader header;
const uint8_t* block_begin = packet;
@@ -413,87 +400,35 @@ void RtcEventLogImpl::LogRtcpPacket(PacketDirection direction,
block_begin += block_size;
}
- rtcp_event.mutable_rtcp_packet()->set_packet_data(buffer, buffer_length);
- HandleEvent(&rtcp_event);
+ rtcp_event->mutable_rtcp_packet()->set_packet_data(buffer, buffer_length);
+ if (!event_queue_.Insert(&rtcp_event)) {
+ LOG(LS_WARNING) << "RTCP queue full. Not logging RTCP packet.";
+ }
}
void RtcEventLogImpl::LogAudioPlayout(uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
- rtclog::Event event;
- event.set_timestamp_us(clock_->TimeInMicroseconds());
- event.set_type(rtclog::Event::AUDIO_PLAYOUT_EVENT);
- auto playout_event = event.mutable_audio_playout_event();
+ std::unique_ptr<rtclog::Event> event(new rtclog::Event());
+ event->set_timestamp_us(clock_->TimeInMicroseconds());
+ event->set_type(rtclog::Event::AUDIO_PLAYOUT_EVENT);
+ auto playout_event = event->mutable_audio_playout_event();
playout_event->set_local_ssrc(ssrc);
- HandleEvent(&event);
+ if (!event_queue_.Insert(&event)) {
+ LOG(LS_WARNING) << "Playout queue full. Not logging ACM playout.";
+ }
}
void RtcEventLogImpl::LogBwePacketLossEvent(int32_t bitrate,
uint8_t fraction_loss,
int32_t total_packets) {
- rtc::CritScope lock(&crit_);
- rtclog::Event event;
- event.set_timestamp_us(clock_->TimeInMicroseconds());
- event.set_type(rtclog::Event::BWE_PACKET_LOSS_EVENT);
- auto bwe_event = event.mutable_bwe_packet_loss_event();
+ std::unique_ptr<rtclog::Event> event(new rtclog::Event());
+ event->set_timestamp_us(clock_->TimeInMicroseconds());
+ event->set_type(rtclog::Event::BWE_PACKET_LOSS_EVENT);
+ auto bwe_event = event->mutable_bwe_packet_loss_event();
bwe_event->set_bitrate(bitrate);
bwe_event->set_fraction_loss(fraction_loss);
bwe_event->set_total_packets(total_packets);
- HandleEvent(&event);
-}
-
-void RtcEventLogImpl::StopLoggingLocked() {
- if (currently_logging_) {
- currently_logging_ = false;
- // Create a LogEnd event
- rtclog::Event event;
- event.set_timestamp_us(clock_->TimeInMicroseconds());
- event.set_type(rtclog::Event::LOG_END);
- // Store the event and close the file
- RTC_DCHECK(file_->Open());
- StoreToFile(&event);
- file_->CloseFile();
- if (platform_file_ != rtc::kInvalidPlatformFileValue) {
- rtc::ClosePlatformFile(platform_file_);
- platform_file_ = rtc::kInvalidPlatformFileValue;
- }
- }
- RTC_DCHECK(!file_->Open());
- stream_.Clear();
-}
-
-void RtcEventLogImpl::HandleEvent(rtclog::Event* event) {
- if (currently_logging_) {
- if (clock_->TimeInMicroseconds() < start_time_us_ + duration_us_) {
- StoreToFile(event);
- return;
- }
- StopLoggingLocked();
- }
- AddRecentEvent(*event);
-}
-
-void RtcEventLogImpl::StoreToFile(rtclog::Event* event) {
- // Reuse the same object at every log event.
- if (stream_.stream_size() < 1) {
- stream_.add_stream();
- }
- RTC_DCHECK_EQ(stream_.stream_size(), 1);
- stream_.mutable_stream(0)->Swap(event);
- // TODO(terelius): Doesn't this create a new EventStream per event?
- // Is this guaranteed to work e.g. in future versions of protobuf?
- std::string dump_buffer;
- stream_.SerializeToString(&dump_buffer);
- file_->Write(dump_buffer.data(), dump_buffer.size());
-}
-
-void RtcEventLogImpl::AddRecentEvent(const rtclog::Event& event) {
- recent_log_events_.push_back(event);
- while (recent_log_events_.front().timestamp_us() <
- event.timestamp_us() - buffer_duration_us_) {
- if (IsConfigEvent(recent_log_events_.front())) {
- config_events_.push_back(recent_log_events_.front());
- }
- recent_log_events_.pop_front();
+ if (!event_queue_.Insert(&event)) {
+ LOG(LS_WARNING) << "BWE loss queue full. Not logging BWE update.";
}
}
@@ -516,8 +451,12 @@ bool RtcEventLog::ParseRtcEventLog(const std::string& file_name,
#endif // ENABLE_RTC_EVENT_LOG
// RtcEventLog member functions.
-std::unique_ptr<RtcEventLog> RtcEventLog::Create() {
- return std::unique_ptr<RtcEventLog>(new RtcEventLogImpl());
+std::unique_ptr<RtcEventLog> RtcEventLog::Create(const Clock* clock) {
+#ifdef ENABLE_RTC_EVENT_LOG
+ return std::unique_ptr<RtcEventLog>(new RtcEventLogImpl(clock));
+#else
+ return std::unique_ptr<RtcEventLog>(new RtcEventLogNullImpl());
+#endif // ENABLE_RTC_EVENT_LOG
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/rtc_event_log.h b/chromium/third_party/webrtc/call/rtc_event_log.h
index 518308bf2d2..bea57b01cd1 100644
--- a/chromium/third_party/webrtc/call/rtc_event_log.h
+++ b/chromium/third_party/webrtc/call/rtc_event_log.h
@@ -26,6 +26,7 @@ namespace rtclog {
class EventStream;
} // namespace rtclog
+class Clock;
class RtcEventLogImpl;
enum class MediaType;
@@ -36,30 +37,40 @@ class RtcEventLog {
public:
virtual ~RtcEventLog() {}
- static std::unique_ptr<RtcEventLog> Create();
+ // Factory method to create an RtcEventLog object.
+ static std::unique_ptr<RtcEventLog> Create(const Clock* clock);
- // Sets the time that events are stored in the internal event buffer
- // before the user calls StartLogging. The default is 10 000 000 us = 10 s
- virtual void SetBufferDuration(int64_t buffer_duration_us) = 0;
-
- // Starts logging for the specified duration to the specified file.
- // The logging will stop automatically after the specified duration.
+ // Starts logging a maximum of max_size_bytes bytes to the specified file.
// If the file already exists it will be overwritten.
- // If the file cannot be opened, the RtcEventLog will not start logging.
- virtual void StartLogging(const std::string& file_name, int duration_ms) = 0;
-
- // Starts logging until either the 10 minute timer runs out or the StopLogging
- // function is called. The RtcEventLog takes ownership of the supplied
- // rtc::PlatformFile.
- virtual bool StartLogging(rtc::PlatformFile log_file) = 0;
-
+ // If max_size_bytes <= 0, logging will be active until StopLogging is called.
+ // The function has no effect and returns false if we can't start a new log
+ // e.g. because we are already logging or the file cannot be opened.
+ virtual bool StartLogging(const std::string& file_name,
+ int64_t max_size_bytes) = 0;
+
+ // Same as above. The RtcEventLog takes ownership of the file if the call
+ // is successful, i.e. if it returns true.
+ virtual bool StartLogging(rtc::PlatformFile platform_file,
+ int64_t max_size_bytes) = 0;
+
+ // Deprecated. Pass an explicit file size limit.
+ bool StartLogging(const std::string& file_name) {
+ return StartLogging(file_name, 10000000);
+ }
+
+ // Deprecated. Pass an explicit file size limit.
+ bool StartLogging(rtc::PlatformFile platform_file) {
+ return StartLogging(platform_file, 10000000);
+ }
+
+ // Stops logging to file and waits until the thread has finished.
virtual void StopLogging() = 0;
- // Logs configuration information for webrtc::VideoReceiveStream
+ // Logs configuration information for webrtc::VideoReceiveStream.
virtual void LogVideoReceiveStreamConfig(
const webrtc::VideoReceiveStream::Config& config) = 0;
- // Logs configuration information for webrtc::VideoSendStream
+ // Logs configuration information for webrtc::VideoSendStream.
virtual void LogVideoSendStreamConfig(
const webrtc::VideoSendStream::Config& config) = 0;
@@ -76,7 +87,7 @@ class RtcEventLog {
const uint8_t* packet,
size_t length) = 0;
- // Logs an audio playout event
+ // Logs an audio playout event.
virtual void LogAudioPlayout(uint32_t ssrc) = 0;
// Logs a bitrate update from the bandwidth estimator based on packet loss.
@@ -86,6 +97,11 @@ class RtcEventLog {
// Reads an RtcEventLog file and returns true when reading was successful.
// The result is stored in the given EventStream object.
+ // The order of the events in the EventStream is implementation defined.
+ // The current implementation writes a LOG_START event, then the old
+ // configurations, then the remaining events in timestamp order and finally
+ // a LOG_END event. However, this might change without further notice.
+ // TODO(terelius): Change result type to a vector?
static bool ParseRtcEventLog(const std::string& file_name,
rtclog::EventStream* result);
};
diff --git a/chromium/third_party/webrtc/call/rtc_event_log2rtp_dump.cc b/chromium/third_party/webrtc/call/rtc_event_log2rtp_dump.cc
index ef0be9a1b73..5733cfa31d2 100644
--- a/chromium/third_party/webrtc/call/rtc_event_log2rtp_dump.cc
+++ b/chromium/third_party/webrtc/call/rtc_event_log2rtp_dump.cc
@@ -15,17 +15,12 @@
#include "gflags/gflags.h"
#include "webrtc/base/checks.h"
+#include "webrtc/call.h"
#include "webrtc/call/rtc_event_log.h"
+#include "webrtc/call/rtc_event_log_parser.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/test/rtp_file_writer.h"
-// Files generated at build-time by the protobuf compiler.
-#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
-#include "external/webrtc/webrtc/call/rtc_event_log.pb.h"
-#else
-#include "webrtc/call/rtc_event_log.pb.h"
-#endif
-
namespace {
DEFINE_bool(noaudio,
@@ -94,8 +89,8 @@ int main(int argc, char* argv[]) {
RTC_CHECK(ParseSsrc(FLAGS_ssrc, &ssrc_filter))
<< "Flag verification has failed.";
- webrtc::rtclog::EventStream event_stream;
- if (!webrtc::RtcEventLog::ParseRtcEventLog(input_file, &event_stream)) {
+ webrtc::ParsedRtcEventLog parsed_stream;
+ if (!parsed_stream.ParseFile(input_file)) {
std::cerr << "Error while parsing input file: " << input_file << std::endl;
return -1;
}
@@ -110,94 +105,78 @@ int main(int argc, char* argv[]) {
return -1;
}
- std::cout << "Found " << event_stream.stream_size()
+ std::cout << "Found " << parsed_stream.GetNumberOfEvents()
<< " events in the input file." << std::endl;
int rtp_counter = 0, rtcp_counter = 0;
bool header_only = false;
- // TODO(ivoc): This can be refactored once the packet interpretation
- // functions are finished.
- for (int i = 0; i < event_stream.stream_size(); i++) {
- const webrtc::rtclog::Event& event = event_stream.stream(i);
- if (!FLAGS_nortp && event.has_type() && event.type() == event.RTP_EVENT) {
- if (event.has_timestamp_us() && event.has_rtp_packet() &&
- event.rtp_packet().has_header() &&
- event.rtp_packet().header().size() >= 12 &&
- event.rtp_packet().has_packet_length() &&
- event.rtp_packet().has_type()) {
- const webrtc::rtclog::RtpPacket& rtp_packet = event.rtp_packet();
- if (FLAGS_noaudio && rtp_packet.type() == webrtc::rtclog::AUDIO)
- continue;
- if (FLAGS_novideo && rtp_packet.type() == webrtc::rtclog::VIDEO)
- continue;
- if (FLAGS_nodata && rtp_packet.type() == webrtc::rtclog::DATA)
+ for (size_t i = 0; i < parsed_stream.GetNumberOfEvents(); i++) {
+ // The parsed_stream will assert if the protobuf event is missing
+ // some required fields and we attempt to access them. We could consider
+ // a softer failure option, but it does not seem useful to generate
+ // RTP dumps based on broken event logs.
+ if (!FLAGS_nortp &&
+ parsed_stream.GetEventType(i) == webrtc::ParsedRtcEventLog::RTP_EVENT) {
+ webrtc::test::RtpPacket packet;
+ webrtc::PacketDirection direction;
+ webrtc::MediaType media_type;
+ parsed_stream.GetRtpHeader(i, &direction, &media_type, packet.data,
+ &packet.length, &packet.original_length);
+ if (packet.original_length > packet.length)
+ header_only = true;
+ packet.time_ms = parsed_stream.GetTimestamp(i) / 1000;
+
+ // TODO(terelius): Maybe add a flag to dump outgoing traffic instead?
+ if (direction == webrtc::kOutgoingPacket)
+ continue;
+ if (FLAGS_noaudio && media_type == webrtc::MediaType::AUDIO)
+ continue;
+ if (FLAGS_novideo && media_type == webrtc::MediaType::VIDEO)
+ continue;
+ if (FLAGS_nodata && media_type == webrtc::MediaType::DATA)
+ continue;
+ if (!FLAGS_ssrc.empty()) {
+ const uint32_t packet_ssrc =
+ webrtc::ByteReader<uint32_t>::ReadBigEndian(
+ reinterpret_cast<const uint8_t*>(packet.data + 8));
+ if (packet_ssrc != ssrc_filter)
continue;
- if (!FLAGS_ssrc.empty()) {
- const uint32_t packet_ssrc =
- webrtc::ByteReader<uint32_t>::ReadBigEndian(
- reinterpret_cast<const uint8_t*>(rtp_packet.header().data() +
- 8));
- if (packet_ssrc != ssrc_filter)
- continue;
- }
-
- webrtc::test::RtpPacket packet;
- packet.length = rtp_packet.header().size();
- if (packet.length > packet.kMaxPacketBufferSize) {
- std::cout << "Skipping packet with size " << packet.length
- << ", the maximum supported size is "
- << packet.kMaxPacketBufferSize << std::endl;
- continue;
- }
- packet.original_length = rtp_packet.packet_length();
- if (packet.original_length > packet.length)
- header_only = true;
- packet.time_ms = event.timestamp_us() / 1000;
- memcpy(packet.data, rtp_packet.header().data(), packet.length);
- rtp_writer->WritePacket(&packet);
- rtp_counter++;
- } else {
- std::cout << "Skipping malformed event." << std::endl;
}
+
+ rtp_writer->WritePacket(&packet);
+ rtp_counter++;
}
- if (!FLAGS_nortcp && event.has_type() && event.type() == event.RTCP_EVENT) {
- if (event.has_timestamp_us() && event.has_rtcp_packet() &&
- event.rtcp_packet().has_type() &&
- event.rtcp_packet().has_packet_data() &&
- event.rtcp_packet().packet_data().size() > 0) {
- const webrtc::rtclog::RtcpPacket& rtcp_packet = event.rtcp_packet();
- if (FLAGS_noaudio && rtcp_packet.type() == webrtc::rtclog::AUDIO)
+ if (!FLAGS_nortcp &&
+ parsed_stream.GetEventType(i) ==
+ webrtc::ParsedRtcEventLog::RTCP_EVENT) {
+ webrtc::test::RtpPacket packet;
+ webrtc::PacketDirection direction;
+ webrtc::MediaType media_type;
+ parsed_stream.GetRtcpPacket(i, &direction, &media_type, packet.data,
+ &packet.length);
+ // For RTCP packets the original_length should be set to 0 in the
+ // RTPdump format.
+ packet.original_length = 0;
+ packet.time_ms = parsed_stream.GetTimestamp(i) / 1000;
+
+ // TODO(terelius): Maybe add a flag to dump outgoing traffic instead?
+ if (direction == webrtc::kOutgoingPacket)
+ continue;
+ if (FLAGS_noaudio && media_type == webrtc::MediaType::AUDIO)
+ continue;
+ if (FLAGS_novideo && media_type == webrtc::MediaType::VIDEO)
+ continue;
+ if (FLAGS_nodata && media_type == webrtc::MediaType::DATA)
+ continue;
+ if (!FLAGS_ssrc.empty()) {
+ const uint32_t packet_ssrc =
+ webrtc::ByteReader<uint32_t>::ReadBigEndian(
+ reinterpret_cast<const uint8_t*>(packet.data + 4));
+ if (packet_ssrc != ssrc_filter)
continue;
- if (FLAGS_novideo && rtcp_packet.type() == webrtc::rtclog::VIDEO)
- continue;
- if (FLAGS_nodata && rtcp_packet.type() == webrtc::rtclog::DATA)
- continue;
- if (!FLAGS_ssrc.empty()) {
- const uint32_t packet_ssrc =
- webrtc::ByteReader<uint32_t>::ReadBigEndian(
- reinterpret_cast<const uint8_t*>(
- rtcp_packet.packet_data().data() + 4));
- if (packet_ssrc != ssrc_filter)
- continue;
- }
-
- webrtc::test::RtpPacket packet;
- packet.length = rtcp_packet.packet_data().size();
- if (packet.length > packet.kMaxPacketBufferSize) {
- std::cout << "Skipping packet with size " << packet.length
- << ", the maximum supported size is "
- << packet.kMaxPacketBufferSize << std::endl;
- continue;
- }
- // For RTCP packets the original_length should be set to 0 in the
- // RTPdump format.
- packet.original_length = 0;
- packet.time_ms = event.timestamp_us() / 1000;
- memcpy(packet.data, rtcp_packet.packet_data().data(), packet.length);
- rtp_writer->WritePacket(&packet);
- rtcp_counter++;
- } else {
- std::cout << "Skipping malformed event." << std::endl;
}
+
+ rtp_writer->WritePacket(&packet);
+ rtcp_counter++;
}
}
std::cout << "Wrote " << rtp_counter << (header_only ? " header-only" : "")
diff --git a/chromium/third_party/webrtc/call/rtc_event_log_helper_thread.cc b/chromium/third_party/webrtc/call/rtc_event_log_helper_thread.cc
new file mode 100644
index 00000000000..a9aa85144f0
--- /dev/null
+++ b/chromium/third_party/webrtc/call/rtc_event_log_helper_thread.cc
@@ -0,0 +1,285 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/call/rtc_event_log_helper_thread.h"
+
+#include <algorithm>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/system_wrappers/include/logging.h"
+
+#ifdef ENABLE_RTC_EVENT_LOG
+
+namespace webrtc {
+
+namespace {
+const int kEventsInHistory = 10000;
+
+bool IsConfigEvent(const rtclog::Event& event) {
+ rtclog::Event_EventType event_type = event.type();
+ return event_type == rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT ||
+ event_type == rtclog::Event::VIDEO_SENDER_CONFIG_EVENT ||
+ event_type == rtclog::Event::AUDIO_RECEIVER_CONFIG_EVENT ||
+ event_type == rtclog::Event::AUDIO_SENDER_CONFIG_EVENT;
+}
+} // namespace
+
+// RtcEventLogImpl member functions.
+RtcEventLogHelperThread::RtcEventLogHelperThread(
+ SwapQueue<ControlMessage>* message_queue,
+ SwapQueue<std::unique_ptr<rtclog::Event>>* event_queue,
+ rtc::Event* wake_up,
+ rtc::Event* stopped,
+ const Clock* const clock)
+ : message_queue_(message_queue),
+ event_queue_(event_queue),
+ history_(kEventsInHistory),
+ config_history_(),
+ file_(FileWrapper::Create()),
+ thread_(&ThreadOutputFunction, this, "RtcEventLog thread"),
+ max_size_bytes_(std::numeric_limits<int64_t>::max()),
+ written_bytes_(0),
+ start_time_(0),
+ stop_time_(std::numeric_limits<int64_t>::max()),
+ has_recent_event_(false),
+ most_recent_event_(),
+ output_string_(),
+ wake_up_(wake_up),
+ stopped_(stopped),
+ clock_(clock) {
+ RTC_DCHECK(message_queue_);
+ RTC_DCHECK(event_queue_);
+ RTC_DCHECK(wake_up_);
+ RTC_DCHECK(stopped_);
+ RTC_DCHECK(clock_);
+ thread_.Start();
+}
+
+RtcEventLogHelperThread::~RtcEventLogHelperThread() {
+ ControlMessage message;
+ message.message_type = ControlMessage::TERMINATE_THREAD;
+ message.stop_time = clock_->TimeInMicroseconds();
+ while (!message_queue_->Insert(&message)) {
+ // We can't destroy the event log until we have stopped the thread,
+ // so clear the message queue and try again. Note that if we clear
+ // any STOP_FILE events, then the threads calling StopLogging would likely
+ // wait indefinitely. However, there should not be any such calls as we
+ // are executing the destructor.
+ LOG(LS_WARNING) << "Clearing message queue to terminate thread.";
+ message_queue_->Clear();
+ }
+ wake_up_->Set(); // Wake up the output thread.
+ thread_.Stop(); // Wait for the thread to terminate.
+}
+
+bool RtcEventLogHelperThread::AppendEventToString(rtclog::Event* event) {
+ rtclog::EventStream event_stream;
+ event_stream.add_stream();
+ event_stream.mutable_stream(0)->Swap(event);
+ // We create a new event stream per event but because of the way protobufs
+ // are encoded, events can be merged by concatenating them. Therefore,
+ // it will look like a single stream when we read it back from file.
+ bool stop = true;
+ if (written_bytes_ + static_cast<int64_t>(output_string_.size()) +
+ event_stream.ByteSize() <=
+ max_size_bytes_) {
+ event_stream.AppendToString(&output_string_);
+ stop = false;
+ }
+ // Swap the event back so that we don't mix event types in the queues.
+ event_stream.mutable_stream(0)->Swap(event);
+ return stop;
+}
+
+void RtcEventLogHelperThread::LogToMemory() {
+ RTC_DCHECK(!file_->Open());
+
+ // Process each event earlier than the current time and append it to the
+ // appropriate history_.
+ int64_t current_time = clock_->TimeInMicroseconds();
+ if (!has_recent_event_) {
+ has_recent_event_ = event_queue_->Remove(&most_recent_event_);
+ }
+ while (has_recent_event_ &&
+ most_recent_event_->timestamp_us() <= current_time) {
+ if (IsConfigEvent(*most_recent_event_)) {
+ config_history_.push_back(std::move(most_recent_event_));
+ } else {
+ history_.push_back(std::move(most_recent_event_));
+ }
+ has_recent_event_ = event_queue_->Remove(&most_recent_event_);
+ }
+}
+
+void RtcEventLogHelperThread::StartLogFile() {
+ RTC_DCHECK(file_->Open());
+ bool stop = false;
+ output_string_.clear();
+
+ // Create and serialize the LOG_START event.
+ rtclog::Event start_event;
+ start_event.set_timestamp_us(start_time_);
+ start_event.set_type(rtclog::Event::LOG_START);
+ AppendEventToString(&start_event);
+
+ // Serialize the config information for all old streams.
+ for (auto& event : config_history_) {
+ AppendEventToString(event.get());
+ }
+
+ // Serialize the events in the event queue.
+ while (!history_.empty() && !stop) {
+ stop = AppendEventToString(history_.front().get());
+ if (!stop) {
+ history_.pop_front();
+ }
+ }
+
+ // Write to file.
+ file_->Write(output_string_.data(), output_string_.size());
+ written_bytes_ += output_string_.size();
+
+ // Free the allocated memory since we probably won't need this amount of
+ // space again.
+ output_string_.clear();
+ output_string_.shrink_to_fit();
+
+ if (stop) {
+ RTC_DCHECK(file_->Open());
+ StopLogFile();
+ }
+}
+
+void RtcEventLogHelperThread::LogToFile() {
+ RTC_DCHECK(file_->Open());
+ output_string_.clear();
+
+ // Append each event older than both the current time and the stop time
+ // to the output_string_.
+ int64_t current_time = clock_->TimeInMicroseconds();
+ int64_t time_limit = std::min(current_time, stop_time_);
+ if (!has_recent_event_) {
+ has_recent_event_ = event_queue_->Remove(&most_recent_event_);
+ }
+ bool stop = false;
+ while (!stop && has_recent_event_ &&
+ most_recent_event_->timestamp_us() <= time_limit) {
+ stop = AppendEventToString(most_recent_event_.get());
+ if (!stop) {
+ if (IsConfigEvent(*most_recent_event_)) {
+ config_history_.push_back(std::move(most_recent_event_));
+ }
+ has_recent_event_ = event_queue_->Remove(&most_recent_event_);
+ }
+ }
+
+ // Write string to file.
+ file_->Write(output_string_.data(), output_string_.size());
+ written_bytes_ += output_string_.size();
+
+ if (!file_->Open()) {
+ LOG(LS_WARNING) << "WebRTC event log file closed by FileWrapper.";
+ }
+
+ // We want to stop logging if we have reached the file size limit. We also
+ // want to stop logging if the remaining events are more recent than the
+ // time limit, or in other words if we have terminated the loop despite
+ // having more events in the queue.
+ if ((has_recent_event_ && most_recent_event_->timestamp_us() > stop_time_) ||
+ stop) {
+ RTC_DCHECK(file_->Open());
+ StopLogFile();
+ }
+}
+
+void RtcEventLogHelperThread::StopLogFile() {
+ RTC_DCHECK(file_->Open());
+ output_string_.clear();
+
+ rtclog::Event end_event;
+ end_event.set_timestamp_us(stop_time_);
+ end_event.set_type(rtclog::Event::LOG_END);
+ AppendEventToString(&end_event);
+
+ if (written_bytes_ + static_cast<int64_t>(output_string_.size()) <=
+ max_size_bytes_) {
+ file_->Write(output_string_.data(), output_string_.size());
+ written_bytes_ += output_string_.size();
+ }
+
+ max_size_bytes_ = std::numeric_limits<int64_t>::max();
+ written_bytes_ = 0;
+ start_time_ = 0;
+ stop_time_ = std::numeric_limits<int64_t>::max();
+ output_string_.clear();
+ file_->CloseFile();
+ RTC_DCHECK(!file_->Open());
+}
+
+void RtcEventLogHelperThread::WriteLog() {
+ ControlMessage message;
+
+ while (true) {
+ // Process control messages.
+ while (message_queue_->Remove(&message)) {
+ switch (message.message_type) {
+ case ControlMessage::START_FILE:
+ if (!file_->Open()) {
+ max_size_bytes_ = message.max_size_bytes;
+ start_time_ = message.start_time;
+ stop_time_ = message.stop_time;
+ file_.swap(message.file);
+ StartLogFile();
+ } else {
+ // Already started. Ignore message and close file handle.
+ message.file->CloseFile();
+ }
+ break;
+ case ControlMessage::STOP_FILE:
+ if (file_->Open()) {
+ stop_time_ = message.stop_time;
+ LogToFile(); // Log remaining events from message queues.
+ }
+ // LogToFile might stop on it's own so we need to recheck the state.
+ if (file_->Open()) {
+ StopLogFile();
+ }
+ stopped_->Set();
+ break;
+ case ControlMessage::TERMINATE_THREAD:
+ if (file_->Open()) {
+ StopLogFile();
+ }
+ return;
+ }
+ }
+
+ // Write events to file or memory
+ if (file_->Open()) {
+ LogToFile();
+ } else {
+ LogToMemory();
+ }
+
+ // Accumulate a new batch of events instead of processing them one at a
+ // time.
+ wake_up_->Wait(50);
+ }
+}
+
+bool RtcEventLogHelperThread::ThreadOutputFunction(void* obj) {
+ RtcEventLogHelperThread* helper = static_cast<RtcEventLogHelperThread*>(obj);
+ helper->WriteLog();
+ return false;
+}
+
+} // namespace webrtc
+
+#endif // ENABLE_RTC_EVENT_LOG
diff --git a/chromium/third_party/webrtc/call/rtc_event_log_helper_thread.h b/chromium/third_party/webrtc/call/rtc_event_log_helper_thread.h
new file mode 100644
index 00000000000..60ed912b651
--- /dev/null
+++ b/chromium/third_party/webrtc/call/rtc_event_log_helper_thread.h
@@ -0,0 +1,123 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_CALL_RTC_EVENT_LOG_HELPER_THREAD_H_
+#define WEBRTC_CALL_RTC_EVENT_LOG_HELPER_THREAD_H_
+
+#include <limits>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/swap_queue.h"
+#include "webrtc/call/ringbuffer.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/system_wrappers/include/file_wrapper.h"
+
+#ifdef ENABLE_RTC_EVENT_LOG
+// Files generated at build-time by the protobuf compiler.
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/call/rtc_event_log.pb.h"
+#else
+#include "webrtc/call/rtc_event_log.pb.h"
+#endif
+#endif
+
+#ifdef ENABLE_RTC_EVENT_LOG
+
+namespace webrtc {
+
+class RtcEventLogHelperThread final {
+ public:
+ struct ControlMessage {
+ ControlMessage()
+ : message_type(STOP_FILE),
+ file(nullptr),
+ max_size_bytes(0),
+ start_time(0),
+ stop_time(0) {}
+ enum { START_FILE, STOP_FILE, TERMINATE_THREAD } message_type;
+
+ std::unique_ptr<FileWrapper> file; // Only used with START_FILE.
+ int64_t max_size_bytes; // Only used with START_FILE.
+ int64_t start_time; // Only used with START_FILE.
+ int64_t stop_time; // Used with all 3 message types.
+
+ friend void swap(ControlMessage& lhs, ControlMessage& rhs) {
+ using std::swap;
+ swap(lhs.message_type, rhs.message_type);
+ lhs.file.swap(rhs.file);
+ swap(lhs.max_size_bytes, rhs.max_size_bytes);
+ swap(lhs.start_time, rhs.start_time);
+ swap(lhs.stop_time, rhs.stop_time);
+ }
+ };
+
+ RtcEventLogHelperThread(
+ SwapQueue<ControlMessage>* message_queue,
+ SwapQueue<std::unique_ptr<rtclog::Event>>* event_queue,
+ rtc::Event* wake_up,
+ rtc::Event* file_finished,
+ const Clock* const clock);
+ ~RtcEventLogHelperThread();
+
+ private:
+ static bool ThreadOutputFunction(void* obj);
+
+ void TerminateThread();
+ bool AppendEventToString(rtclog::Event* event);
+ void AppendEventToHistory(const rtclog::Event& event);
+ void LogToMemory();
+ void StartLogFile();
+ void LogToFile();
+ void StopLogFile();
+ void WriteLog();
+
+ // Message queues for passing events to the logging thread.
+ SwapQueue<ControlMessage>* message_queue_;
+ SwapQueue<std::unique_ptr<rtclog::Event>>* event_queue_;
+
+ // History containing the most recent events (~ 10 s).
+ RingBuffer<std::unique_ptr<rtclog::Event>> history_;
+
+ // History containing all past configuration events.
+ std::vector<std::unique_ptr<rtclog::Event>> config_history_;
+
+ std::unique_ptr<FileWrapper> file_;
+ rtc::PlatformThread thread_;
+
+ int64_t max_size_bytes_;
+ int64_t written_bytes_;
+ int64_t start_time_;
+ int64_t stop_time_;
+
+ bool has_recent_event_;
+ std::unique_ptr<rtclog::Event> most_recent_event_;
+
+ // Temporary space for serializing profobuf data.
+ std::string output_string_;
+
+ rtc::Event* wake_up_;
+ rtc::Event* stopped_;
+
+ const Clock* const clock_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtcEventLogHelperThread);
+};
+
+} // namespace webrtc
+
+#endif // ENABLE_RTC_EVENT_LOG
+
+#endif // WEBRTC_CALL_RTC_EVENT_LOG_HELPER_THREAD_H_
diff --git a/chromium/third_party/webrtc/call/rtc_event_log_parser.cc b/chromium/third_party/webrtc/call/rtc_event_log_parser.cc
new file mode 100644
index 00000000000..c49b9b47b43
--- /dev/null
+++ b/chromium/third_party/webrtc/call/rtc_event_log_parser.cc
@@ -0,0 +1,394 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/call/rtc_event_log_parser.h"
+
+#include <string.h>
+
+#include <fstream>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/call.h"
+#include "webrtc/call/rtc_event_log.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/file_wrapper.h"
+
+namespace webrtc {
+
+namespace {
+MediaType GetRuntimeMediaType(rtclog::MediaType media_type) {
+ switch (media_type) {
+ case rtclog::MediaType::ANY:
+ return MediaType::ANY;
+ case rtclog::MediaType::AUDIO:
+ return MediaType::AUDIO;
+ case rtclog::MediaType::VIDEO:
+ return MediaType::VIDEO;
+ case rtclog::MediaType::DATA:
+ return MediaType::DATA;
+ }
+ RTC_NOTREACHED();
+ return MediaType::ANY;
+}
+
+RtcpMode GetRuntimeRtcpMode(rtclog::VideoReceiveConfig::RtcpMode rtcp_mode) {
+ switch (rtcp_mode) {
+ case rtclog::VideoReceiveConfig::RTCP_COMPOUND:
+ return RtcpMode::kCompound;
+ case rtclog::VideoReceiveConfig::RTCP_REDUCEDSIZE:
+ return RtcpMode::kReducedSize;
+ }
+ RTC_NOTREACHED();
+ return RtcpMode::kOff;
+}
+
+ParsedRtcEventLog::EventType GetRuntimeEventType(
+ rtclog::Event::EventType event_type) {
+ switch (event_type) {
+ case rtclog::Event::UNKNOWN_EVENT:
+ return ParsedRtcEventLog::EventType::UNKNOWN_EVENT;
+ case rtclog::Event::LOG_START:
+ return ParsedRtcEventLog::EventType::LOG_START;
+ case rtclog::Event::LOG_END:
+ return ParsedRtcEventLog::EventType::LOG_END;
+ case rtclog::Event::RTP_EVENT:
+ return ParsedRtcEventLog::EventType::RTP_EVENT;
+ case rtclog::Event::RTCP_EVENT:
+ return ParsedRtcEventLog::EventType::RTCP_EVENT;
+ case rtclog::Event::AUDIO_PLAYOUT_EVENT:
+ return ParsedRtcEventLog::EventType::AUDIO_PLAYOUT_EVENT;
+ case rtclog::Event::BWE_PACKET_LOSS_EVENT:
+ return ParsedRtcEventLog::EventType::BWE_PACKET_LOSS_EVENT;
+ case rtclog::Event::BWE_PACKET_DELAY_EVENT:
+ return ParsedRtcEventLog::EventType::BWE_PACKET_DELAY_EVENT;
+ case rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT:
+ return ParsedRtcEventLog::EventType::VIDEO_RECEIVER_CONFIG_EVENT;
+ case rtclog::Event::VIDEO_SENDER_CONFIG_EVENT:
+ return ParsedRtcEventLog::EventType::VIDEO_SENDER_CONFIG_EVENT;
+ case rtclog::Event::AUDIO_RECEIVER_CONFIG_EVENT:
+ return ParsedRtcEventLog::EventType::AUDIO_RECEIVER_CONFIG_EVENT;
+ case rtclog::Event::AUDIO_SENDER_CONFIG_EVENT:
+ return ParsedRtcEventLog::EventType::AUDIO_SENDER_CONFIG_EVENT;
+ }
+ RTC_NOTREACHED();
+ return ParsedRtcEventLog::EventType::UNKNOWN_EVENT;
+}
+
+bool ParseVarInt(std::FILE* file, uint64_t* varint, size_t* bytes_read) {
+ uint8_t byte;
+ *varint = 0;
+ for (*bytes_read = 0; *bytes_read < 10 && fread(&byte, 1, 1, file) == 1;
+ ++(*bytes_read)) {
+ // The most significant bit of each byte is 0 if it is the last byte in
+ // the varint and 1 otherwise. Thus, we take the 7 least significant bits
+ // of each byte and shift them 7 bits for each byte read previously to get
+ // the (unsigned) integer.
+ *varint |= static_cast<uint64_t>(byte & 0x7F) << (7 * *bytes_read);
+ if ((byte & 0x80) == 0) {
+ return true;
+ }
+ }
+ return false;
+}
+
+} // namespace
+
+bool ParsedRtcEventLog::ParseFile(const std::string& filename) {
+ stream_.clear();
+ const size_t kMaxEventSize = (1u << 16) - 1;
+ char tmp_buffer[kMaxEventSize];
+
+ std::FILE* file = fopen(filename.c_str(), "rb");
+ if (!file) {
+ LOG(LS_WARNING) << "Could not open file for reading.";
+ return false;
+ }
+
+ while (1) {
+ // Peek at the next message tag. The tag number is defined as
+ // (fieldnumber << 3) | wire_type. In our case, the field number is
+ // supposed to be 1 and the wire type for an length-delimited field is 2.
+ const uint64_t kExpectedTag = (1 << 3) | 2;
+ uint64_t tag;
+ size_t bytes_read;
+ if (!ParseVarInt(file, &tag, &bytes_read) || tag != kExpectedTag) {
+ fclose(file);
+ if (bytes_read == 0) {
+ return true; // Reached end of file.
+ }
+ LOG(LS_WARNING) << "Missing field tag from beginning of protobuf event.";
+ return false;
+ }
+
+ // Peek at the length field.
+ uint64_t message_length;
+ if (!ParseVarInt(file, &message_length, &bytes_read)) {
+ LOG(LS_WARNING) << "Missing message length after protobuf field tag.";
+ fclose(file);
+ return false;
+ } else if (message_length > kMaxEventSize) {
+ LOG(LS_WARNING) << "Protobuf message length is too large.";
+ fclose(file);
+ return false;
+ }
+
+ if (fread(tmp_buffer, 1, message_length, file) != message_length) {
+ LOG(LS_WARNING) << "Failed to read protobuf message from file.";
+ fclose(file);
+ return false;
+ }
+
+ rtclog::Event event;
+ if (!event.ParseFromArray(tmp_buffer, message_length)) {
+ LOG(LS_WARNING) << "Failed to parse protobuf message.";
+ fclose(file);
+ return false;
+ }
+ stream_.push_back(event);
+ }
+}
+
+size_t ParsedRtcEventLog::GetNumberOfEvents() const {
+ return stream_.size();
+}
+
+int64_t ParsedRtcEventLog::GetTimestamp(size_t index) const {
+ RTC_CHECK_LT(index, GetNumberOfEvents());
+ const rtclog::Event& event = stream_[index];
+ RTC_CHECK(event.has_timestamp_us());
+ return event.timestamp_us();
+}
+
+ParsedRtcEventLog::EventType ParsedRtcEventLog::GetEventType(
+ size_t index) const {
+ RTC_CHECK_LT(index, GetNumberOfEvents());
+ const rtclog::Event& event = stream_[index];
+ RTC_CHECK(event.has_type());
+ return GetRuntimeEventType(event.type());
+}
+
+// The header must have space for at least IP_PACKET_SIZE bytes.
+void ParsedRtcEventLog::GetRtpHeader(size_t index,
+ PacketDirection* incoming,
+ MediaType* media_type,
+ uint8_t* header,
+ size_t* header_length,
+ size_t* total_length) const {
+ RTC_CHECK_LT(index, GetNumberOfEvents());
+ const rtclog::Event& event = stream_[index];
+ RTC_CHECK(event.has_type());
+ RTC_CHECK_EQ(event.type(), rtclog::Event::RTP_EVENT);
+ RTC_CHECK(event.has_rtp_packet());
+ const rtclog::RtpPacket& rtp_packet = event.rtp_packet();
+ // Get direction of packet.
+ RTC_CHECK(rtp_packet.has_incoming());
+ if (incoming != nullptr) {
+ *incoming = rtp_packet.incoming() ? kIncomingPacket : kOutgoingPacket;
+ }
+ // Get media type.
+ RTC_CHECK(rtp_packet.has_type());
+ if (media_type != nullptr) {
+ *media_type = GetRuntimeMediaType(rtp_packet.type());
+ }
+ // Get packet length.
+ RTC_CHECK(rtp_packet.has_packet_length());
+ if (total_length != nullptr) {
+ *total_length = rtp_packet.packet_length();
+ }
+ // Get header length.
+ RTC_CHECK(rtp_packet.has_header());
+ if (header_length != nullptr) {
+ *header_length = rtp_packet.header().size();
+ }
+ // Get header contents.
+ if (header != nullptr) {
+ const size_t kMinRtpHeaderSize = 12;
+ RTC_CHECK_GE(rtp_packet.header().size(), kMinRtpHeaderSize);
+ RTC_CHECK_LE(rtp_packet.header().size(),
+ static_cast<size_t>(IP_PACKET_SIZE));
+ memcpy(header, rtp_packet.header().data(), rtp_packet.header().size());
+ }
+}
+
+// The packet must have space for at least IP_PACKET_SIZE bytes.
+void ParsedRtcEventLog::GetRtcpPacket(size_t index,
+ PacketDirection* incoming,
+ MediaType* media_type,
+ uint8_t* packet,
+ size_t* length) const {
+ RTC_CHECK_LT(index, GetNumberOfEvents());
+ const rtclog::Event& event = stream_[index];
+ RTC_CHECK(event.has_type());
+ RTC_CHECK_EQ(event.type(), rtclog::Event::RTCP_EVENT);
+ RTC_CHECK(event.has_rtcp_packet());
+ const rtclog::RtcpPacket& rtcp_packet = event.rtcp_packet();
+ // Get direction of packet.
+ RTC_CHECK(rtcp_packet.has_incoming());
+ if (incoming != nullptr) {
+ *incoming = rtcp_packet.incoming() ? kIncomingPacket : kOutgoingPacket;
+ }
+ // Get media type.
+ RTC_CHECK(rtcp_packet.has_type());
+ if (media_type != nullptr) {
+ *media_type = GetRuntimeMediaType(rtcp_packet.type());
+ }
+ // Get packet length.
+ RTC_CHECK(rtcp_packet.has_packet_data());
+ if (length != nullptr) {
+ *length = rtcp_packet.packet_data().size();
+ }
+ // Get packet contents.
+ if (packet != nullptr) {
+ RTC_CHECK_LE(rtcp_packet.packet_data().size(),
+ static_cast<unsigned>(IP_PACKET_SIZE));
+ memcpy(packet, rtcp_packet.packet_data().data(),
+ rtcp_packet.packet_data().size());
+ }
+}
+
+void ParsedRtcEventLog::GetVideoReceiveConfig(
+ size_t index,
+ VideoReceiveStream::Config* config) const {
+ RTC_CHECK_LT(index, GetNumberOfEvents());
+ const rtclog::Event& event = stream_[index];
+ RTC_CHECK(config != nullptr);
+ RTC_CHECK(event.has_type());
+ RTC_CHECK_EQ(event.type(), rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT);
+ RTC_CHECK(event.has_video_receiver_config());
+ const rtclog::VideoReceiveConfig& receiver_config =
+ event.video_receiver_config();
+ // Get SSRCs.
+ RTC_CHECK(receiver_config.has_remote_ssrc());
+ config->rtp.remote_ssrc = receiver_config.remote_ssrc();
+ RTC_CHECK(receiver_config.has_local_ssrc());
+ config->rtp.local_ssrc = receiver_config.local_ssrc();
+ // Get RTCP settings.
+ RTC_CHECK(receiver_config.has_rtcp_mode());
+ config->rtp.rtcp_mode = GetRuntimeRtcpMode(receiver_config.rtcp_mode());
+ RTC_CHECK(receiver_config.has_remb());
+ config->rtp.remb = receiver_config.remb();
+ // Get RTX map.
+ config->rtp.rtx.clear();
+ for (int i = 0; i < receiver_config.rtx_map_size(); i++) {
+ const rtclog::RtxMap& map = receiver_config.rtx_map(i);
+ RTC_CHECK(map.has_payload_type());
+ RTC_CHECK(map.has_config());
+ RTC_CHECK(map.config().has_rtx_ssrc());
+ RTC_CHECK(map.config().has_rtx_payload_type());
+ webrtc::VideoReceiveStream::Config::Rtp::Rtx rtx_pair;
+ rtx_pair.ssrc = map.config().rtx_ssrc();
+ rtx_pair.payload_type = map.config().rtx_payload_type();
+ config->rtp.rtx.insert(std::make_pair(map.payload_type(), rtx_pair));
+ }
+ // Get header extensions.
+ config->rtp.extensions.clear();
+ for (int i = 0; i < receiver_config.header_extensions_size(); i++) {
+ RTC_CHECK(receiver_config.header_extensions(i).has_name());
+ RTC_CHECK(receiver_config.header_extensions(i).has_id());
+ const std::string& name = receiver_config.header_extensions(i).name();
+ int id = receiver_config.header_extensions(i).id();
+ config->rtp.extensions.push_back(RtpExtension(name, id));
+ }
+ // Get decoders.
+ config->decoders.clear();
+ for (int i = 0; i < receiver_config.decoders_size(); i++) {
+ RTC_CHECK(receiver_config.decoders(i).has_name());
+ RTC_CHECK(receiver_config.decoders(i).has_payload_type());
+ VideoReceiveStream::Decoder decoder;
+ decoder.payload_name = receiver_config.decoders(i).name();
+ decoder.payload_type = receiver_config.decoders(i).payload_type();
+ config->decoders.push_back(decoder);
+ }
+}
+
+void ParsedRtcEventLog::GetVideoSendConfig(
+ size_t index,
+ VideoSendStream::Config* config) const {
+ RTC_CHECK_LT(index, GetNumberOfEvents());
+ const rtclog::Event& event = stream_[index];
+ RTC_CHECK(config != nullptr);
+ RTC_CHECK(event.has_type());
+ RTC_CHECK_EQ(event.type(), rtclog::Event::VIDEO_SENDER_CONFIG_EVENT);
+ RTC_CHECK(event.has_video_sender_config());
+ const rtclog::VideoSendConfig& sender_config = event.video_sender_config();
+ // Get SSRCs.
+ config->rtp.ssrcs.clear();
+ for (int i = 0; i < sender_config.ssrcs_size(); i++) {
+ config->rtp.ssrcs.push_back(sender_config.ssrcs(i));
+ }
+ // Get header extensions.
+ config->rtp.extensions.clear();
+ for (int i = 0; i < sender_config.header_extensions_size(); i++) {
+ RTC_CHECK(sender_config.header_extensions(i).has_name());
+ RTC_CHECK(sender_config.header_extensions(i).has_id());
+ const std::string& name = sender_config.header_extensions(i).name();
+ int id = sender_config.header_extensions(i).id();
+ config->rtp.extensions.push_back(RtpExtension(name, id));
+ }
+ // Get RTX settings.
+ config->rtp.rtx.ssrcs.clear();
+ for (int i = 0; i < sender_config.rtx_ssrcs_size(); i++) {
+ config->rtp.rtx.ssrcs.push_back(sender_config.rtx_ssrcs(i));
+ }
+ if (sender_config.rtx_ssrcs_size() > 0) {
+ RTC_CHECK(sender_config.has_rtx_payload_type());
+ config->rtp.rtx.payload_type = sender_config.rtx_payload_type();
+ } else {
+ // Reset RTX payload type default value if no RTX SSRCs are used.
+ config->rtp.rtx.payload_type = -1;
+ }
+ // Get encoder.
+ RTC_CHECK(sender_config.has_encoder());
+ RTC_CHECK(sender_config.encoder().has_name());
+ RTC_CHECK(sender_config.encoder().has_payload_type());
+ config->encoder_settings.payload_name = sender_config.encoder().name();
+ config->encoder_settings.payload_type =
+ sender_config.encoder().payload_type();
+}
+
+void ParsedRtcEventLog::GetAudioPlayout(size_t index, uint32_t* ssrc) const {
+ RTC_CHECK_LT(index, GetNumberOfEvents());
+ const rtclog::Event& event = stream_[index];
+ RTC_CHECK(event.has_type());
+ RTC_CHECK_EQ(event.type(), rtclog::Event::AUDIO_PLAYOUT_EVENT);
+ RTC_CHECK(event.has_audio_playout_event());
+ const rtclog::AudioPlayoutEvent& loss_event = event.audio_playout_event();
+ RTC_CHECK(loss_event.has_local_ssrc());
+ if (ssrc != nullptr) {
+ *ssrc = loss_event.local_ssrc();
+ }
+}
+
+void ParsedRtcEventLog::GetBwePacketLossEvent(size_t index,
+ int32_t* bitrate,
+ uint8_t* fraction_loss,
+ int32_t* total_packets) const {
+ RTC_CHECK_LT(index, GetNumberOfEvents());
+ const rtclog::Event& event = stream_[index];
+ RTC_CHECK(event.has_type());
+ RTC_CHECK_EQ(event.type(), rtclog::Event::BWE_PACKET_LOSS_EVENT);
+ RTC_CHECK(event.has_bwe_packet_loss_event());
+ const rtclog::BwePacketLossEvent& loss_event = event.bwe_packet_loss_event();
+ RTC_CHECK(loss_event.has_bitrate());
+ if (bitrate != nullptr) {
+ *bitrate = loss_event.bitrate();
+ }
+ RTC_CHECK(loss_event.has_fraction_loss());
+ if (fraction_loss != nullptr) {
+ *fraction_loss = loss_event.fraction_loss();
+ }
+ RTC_CHECK(loss_event.has_total_packets());
+ if (total_packets != nullptr) {
+ *total_packets = loss_event.total_packets();
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/call/rtc_event_log_parser.h b/chromium/third_party/webrtc/call/rtc_event_log_parser.h
new file mode 100644
index 00000000000..acdfa77da17
--- /dev/null
+++ b/chromium/third_party/webrtc/call/rtc_event_log_parser.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_CALL_RTC_EVENT_LOG_PARSER_H_
+#define WEBRTC_CALL_RTC_EVENT_LOG_PARSER_H_
+
+#include <string>
+#include <vector>
+
+#include "webrtc/call/rtc_event_log.h"
+#include "webrtc/video_receive_stream.h"
+#include "webrtc/video_send_stream.h"
+
+// Files generated at build-time by the protobuf compiler.
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/call/rtc_event_log.pb.h"
+#else
+#include "webrtc/call/rtc_event_log.pb.h"
+#endif
+
+namespace webrtc {
+
+enum class MediaType;
+
+class ParsedRtcEventLog {
+ friend class RtcEventLogTestHelper;
+
+ public:
+ enum EventType {
+ UNKNOWN_EVENT = 0,
+ LOG_START = 1,
+ LOG_END = 2,
+ RTP_EVENT = 3,
+ RTCP_EVENT = 4,
+ AUDIO_PLAYOUT_EVENT = 5,
+ BWE_PACKET_LOSS_EVENT = 6,
+ BWE_PACKET_DELAY_EVENT = 7,
+ VIDEO_RECEIVER_CONFIG_EVENT = 8,
+ VIDEO_SENDER_CONFIG_EVENT = 9,
+ AUDIO_RECEIVER_CONFIG_EVENT = 10,
+ AUDIO_SENDER_CONFIG_EVENT = 11
+ };
+
+ // Reads an RtcEventLog file and returns true if parsing was successful.
+ bool ParseFile(const std::string& file_name);
+
+ // Returns the number of events in an EventStream.
+ size_t GetNumberOfEvents() const;
+
+ // Reads the arrival timestamp (in microseconds) from a rtclog::Event.
+ int64_t GetTimestamp(size_t index) const;
+
+ // Reads the event type of the rtclog::Event at |index|.
+ EventType GetEventType(size_t index) const;
+
+ // Reads the header, direction, media type, header length and packet length
+ // from the RTP event at |index|, and stores the values in the corresponding
+ // output parameters. The output parameters can be set to nullptr if those
+ // values aren't needed.
+ // NB: The header must have space for at least IP_PACKET_SIZE bytes.
+ void GetRtpHeader(size_t index,
+ PacketDirection* incoming,
+ MediaType* media_type,
+ uint8_t* header,
+ size_t* header_length,
+ size_t* total_length) const;
+
+ // Reads packet, direction, media type and packet length from the RTCP event
+ // at |index|, and stores the values in the corresponding output parameters.
+ // The output parameters can be set to nullptr if those values aren't needed.
+ // NB: The packet must have space for at least IP_PACKET_SIZE bytes.
+ void GetRtcpPacket(size_t index,
+ PacketDirection* incoming,
+ MediaType* media_type,
+ uint8_t* packet,
+ size_t* length) const;
+
+ // Reads a config event to a (non-NULL) VideoReceiveStream::Config struct.
+ // Only the fields that are stored in the protobuf will be written.
+ void GetVideoReceiveConfig(size_t index,
+ VideoReceiveStream::Config* config) const;
+
+ // Reads a config event to a (non-NULL) VideoSendStream::Config struct.
+ // Only the fields that are stored in the protobuf will be written.
+ void GetVideoSendConfig(size_t index, VideoSendStream::Config* config) const;
+
+ // Reads the SSRC from the audio playout event at |index|. The SSRC is stored
+ // in the output parameter ssrc. The output parameter can be set to nullptr
+ // and in that case the function only asserts that the event is well formed.
+ void GetAudioPlayout(size_t index, uint32_t* ssrc) const;
+
+ // Reads bitrate, fraction loss (as defined in RFC 1889) and total number of
+ // expected packets from the BWE event at |index| and stores the values in
+ // the corresponding output parameters. The output parameters can be set to
+ // nullptr if those values aren't needed.
+ // NB: The packet must have space for at least IP_PACKET_SIZE bytes.
+ void GetBwePacketLossEvent(size_t index,
+ int32_t* bitrate,
+ uint8_t* fraction_loss,
+ int32_t* total_packets) const;
+
+ private:
+ std::vector<rtclog::Event> stream_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_CALL_RTC_EVENT_LOG_PARSER_H_
diff --git a/chromium/third_party/webrtc/call/rtc_event_log_unittest.cc b/chromium/third_party/webrtc/call/rtc_event_log_unittest.cc
index e3104591d9a..067c44123b9 100644
--- a/chromium/third_party/webrtc/call/rtc_event_log_unittest.cc
+++ b/chromium/third_party/webrtc/call/rtc_event_log_unittest.cc
@@ -10,6 +10,7 @@
#ifdef ENABLE_RTC_EVENT_LOG
+#include <map>
#include <memory>
#include <string>
#include <utility>
@@ -19,9 +20,10 @@
#include "webrtc/base/buffer.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/random.h"
-#include "webrtc/base/thread.h"
#include "webrtc/call.h"
#include "webrtc/call/rtc_event_log.h"
+#include "webrtc/call/rtc_event_log_parser.h"
+#include "webrtc/call/rtc_event_log_unittest_helper.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
@@ -53,244 +55,50 @@ const char* kExtensionNames[] = {RtpExtension::kTOffset,
RtpExtension::kTransportSequenceNumber};
const size_t kNumExtensions = 5;
-} // namespace
-
-// TODO(terelius): Place this definition with other parsing functions?
-MediaType GetRuntimeMediaType(rtclog::MediaType media_type) {
- switch (media_type) {
- case rtclog::MediaType::ANY:
- return MediaType::ANY;
- case rtclog::MediaType::AUDIO:
- return MediaType::AUDIO;
- case rtclog::MediaType::VIDEO:
- return MediaType::VIDEO;
- case rtclog::MediaType::DATA:
- return MediaType::DATA;
- }
- RTC_NOTREACHED();
- return MediaType::ANY;
-}
-
-// Checks that the event has a timestamp, a type and exactly the data field
-// corresponding to the type.
-::testing::AssertionResult IsValidBasicEvent(const rtclog::Event& event) {
- if (!event.has_timestamp_us())
- return ::testing::AssertionFailure() << "Event has no timestamp";
- if (!event.has_type())
- return ::testing::AssertionFailure() << "Event has no event type";
- rtclog::Event_EventType type = event.type();
- if ((type == rtclog::Event::RTP_EVENT) != event.has_rtp_packet())
- return ::testing::AssertionFailure()
- << "Event of type " << type << " has "
- << (event.has_rtp_packet() ? "" : "no ") << "RTP packet";
- if ((type == rtclog::Event::RTCP_EVENT) != event.has_rtcp_packet())
- return ::testing::AssertionFailure()
- << "Event of type " << type << " has "
- << (event.has_rtcp_packet() ? "" : "no ") << "RTCP packet";
- if ((type == rtclog::Event::AUDIO_PLAYOUT_EVENT) !=
- event.has_audio_playout_event())
- return ::testing::AssertionFailure()
- << "Event of type " << type << " has "
- << (event.has_audio_playout_event() ? "" : "no ")
- << "audio_playout event";
- if ((type == rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT) !=
- event.has_video_receiver_config())
- return ::testing::AssertionFailure()
- << "Event of type " << type << " has "
- << (event.has_video_receiver_config() ? "" : "no ")
- << "receiver config";
- if ((type == rtclog::Event::VIDEO_SENDER_CONFIG_EVENT) !=
- event.has_video_sender_config())
- return ::testing::AssertionFailure()
- << "Event of type " << type << " has "
- << (event.has_video_sender_config() ? "" : "no ") << "sender config";
- if ((type == rtclog::Event::AUDIO_RECEIVER_CONFIG_EVENT) !=
- event.has_audio_receiver_config()) {
- return ::testing::AssertionFailure()
- << "Event of type " << type << " has "
- << (event.has_audio_receiver_config() ? "" : "no ")
- << "audio receiver config";
- }
- if ((type == rtclog::Event::AUDIO_SENDER_CONFIG_EVENT) !=
- event.has_audio_sender_config()) {
- return ::testing::AssertionFailure()
- << "Event of type " << type << " has "
- << (event.has_audio_sender_config() ? "" : "no ")
- << "audio sender config";
- }
- return ::testing::AssertionSuccess();
-}
-
-void VerifyReceiveStreamConfig(const rtclog::Event& event,
- const VideoReceiveStream::Config& config) {
- ASSERT_TRUE(IsValidBasicEvent(event));
- ASSERT_EQ(rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT, event.type());
- const rtclog::VideoReceiveConfig& receiver_config =
- event.video_receiver_config();
- // Check SSRCs.
- ASSERT_TRUE(receiver_config.has_remote_ssrc());
- EXPECT_EQ(config.rtp.remote_ssrc, receiver_config.remote_ssrc());
- ASSERT_TRUE(receiver_config.has_local_ssrc());
- EXPECT_EQ(config.rtp.local_ssrc, receiver_config.local_ssrc());
- // Check RTCP settings.
- ASSERT_TRUE(receiver_config.has_rtcp_mode());
- if (config.rtp.rtcp_mode == RtcpMode::kCompound)
- EXPECT_EQ(rtclog::VideoReceiveConfig::RTCP_COMPOUND,
- receiver_config.rtcp_mode());
- else
- EXPECT_EQ(rtclog::VideoReceiveConfig::RTCP_REDUCEDSIZE,
- receiver_config.rtcp_mode());
- ASSERT_TRUE(receiver_config.has_remb());
- EXPECT_EQ(config.rtp.remb, receiver_config.remb());
- // Check RTX map.
- ASSERT_EQ(static_cast<int>(config.rtp.rtx.size()),
- receiver_config.rtx_map_size());
- for (const rtclog::RtxMap& rtx_map : receiver_config.rtx_map()) {
- ASSERT_TRUE(rtx_map.has_payload_type());
- ASSERT_TRUE(rtx_map.has_config());
- EXPECT_EQ(1u, config.rtp.rtx.count(rtx_map.payload_type()));
- const rtclog::RtxConfig& rtx_config = rtx_map.config();
- const VideoReceiveStream::Config::Rtp::Rtx& rtx =
- config.rtp.rtx.at(rtx_map.payload_type());
- ASSERT_TRUE(rtx_config.has_rtx_ssrc());
- ASSERT_TRUE(rtx_config.has_rtx_payload_type());
- EXPECT_EQ(rtx.ssrc, rtx_config.rtx_ssrc());
- EXPECT_EQ(rtx.payload_type, rtx_config.rtx_payload_type());
- }
- // Check header extensions.
- ASSERT_EQ(static_cast<int>(config.rtp.extensions.size()),
- receiver_config.header_extensions_size());
- for (int i = 0; i < receiver_config.header_extensions_size(); i++) {
- ASSERT_TRUE(receiver_config.header_extensions(i).has_name());
- ASSERT_TRUE(receiver_config.header_extensions(i).has_id());
- const std::string& name = receiver_config.header_extensions(i).name();
- int id = receiver_config.header_extensions(i).id();
- EXPECT_EQ(config.rtp.extensions[i].id, id);
- EXPECT_EQ(config.rtp.extensions[i].name, name);
- }
- // Check decoders.
- ASSERT_EQ(static_cast<int>(config.decoders.size()),
- receiver_config.decoders_size());
- for (int i = 0; i < receiver_config.decoders_size(); i++) {
- ASSERT_TRUE(receiver_config.decoders(i).has_name());
- ASSERT_TRUE(receiver_config.decoders(i).has_payload_type());
- const std::string& decoder_name = receiver_config.decoders(i).name();
- int decoder_type = receiver_config.decoders(i).payload_type();
- EXPECT_EQ(config.decoders[i].payload_name, decoder_name);
- EXPECT_EQ(config.decoders[i].payload_type, decoder_type);
- }
-}
-
-void VerifySendStreamConfig(const rtclog::Event& event,
- const VideoSendStream::Config& config) {
- ASSERT_TRUE(IsValidBasicEvent(event));
- ASSERT_EQ(rtclog::Event::VIDEO_SENDER_CONFIG_EVENT, event.type());
- const rtclog::VideoSendConfig& sender_config = event.video_sender_config();
- // Check SSRCs.
- ASSERT_EQ(static_cast<int>(config.rtp.ssrcs.size()),
- sender_config.ssrcs_size());
- for (int i = 0; i < sender_config.ssrcs_size(); i++) {
- EXPECT_EQ(config.rtp.ssrcs[i], sender_config.ssrcs(i));
+void PrintActualEvents(const ParsedRtcEventLog& parsed_log) {
+ std::map<int, size_t> actual_event_counts;
+ for (size_t i = 0; i < parsed_log.GetNumberOfEvents(); i++) {
+ actual_event_counts[parsed_log.GetEventType(i)]++;
}
- // Check header extensions.
- ASSERT_EQ(static_cast<int>(config.rtp.extensions.size()),
- sender_config.header_extensions_size());
- for (int i = 0; i < sender_config.header_extensions_size(); i++) {
- ASSERT_TRUE(sender_config.header_extensions(i).has_name());
- ASSERT_TRUE(sender_config.header_extensions(i).has_id());
- const std::string& name = sender_config.header_extensions(i).name();
- int id = sender_config.header_extensions(i).id();
- EXPECT_EQ(config.rtp.extensions[i].id, id);
- EXPECT_EQ(config.rtp.extensions[i].name, name);
+ printf("Actual events: ");
+ for (auto kv : actual_event_counts) {
+ printf("%d_count = %zu, ", kv.first, kv.second);
}
- // Check RTX settings.
- ASSERT_EQ(static_cast<int>(config.rtp.rtx.ssrcs.size()),
- sender_config.rtx_ssrcs_size());
- for (int i = 0; i < sender_config.rtx_ssrcs_size(); i++) {
- EXPECT_EQ(config.rtp.rtx.ssrcs[i], sender_config.rtx_ssrcs(i));
+ printf("\n");
+ for (size_t i = 0; i < parsed_log.GetNumberOfEvents(); i++) {
+ printf("%4d ", parsed_log.GetEventType(i));
}
- if (sender_config.rtx_ssrcs_size() > 0) {
- ASSERT_TRUE(sender_config.has_rtx_payload_type());
- EXPECT_EQ(config.rtp.rtx.payload_type, sender_config.rtx_payload_type());
- }
- // Check encoder.
- ASSERT_TRUE(sender_config.has_encoder());
- ASSERT_TRUE(sender_config.encoder().has_name());
- ASSERT_TRUE(sender_config.encoder().has_payload_type());
- EXPECT_EQ(config.encoder_settings.payload_name,
- sender_config.encoder().name());
- EXPECT_EQ(config.encoder_settings.payload_type,
- sender_config.encoder().payload_type());
+ printf("\n");
}
-void VerifyRtpEvent(const rtclog::Event& event,
- bool incoming,
- MediaType media_type,
- const uint8_t* header,
- size_t header_size,
- size_t total_size) {
- ASSERT_TRUE(IsValidBasicEvent(event));
- ASSERT_EQ(rtclog::Event::RTP_EVENT, event.type());
- const rtclog::RtpPacket& rtp_packet = event.rtp_packet();
- ASSERT_TRUE(rtp_packet.has_incoming());
- EXPECT_EQ(incoming, rtp_packet.incoming());
- ASSERT_TRUE(rtp_packet.has_type());
- EXPECT_EQ(media_type, GetRuntimeMediaType(rtp_packet.type()));
- ASSERT_TRUE(rtp_packet.has_packet_length());
- EXPECT_EQ(total_size, rtp_packet.packet_length());
- ASSERT_TRUE(rtp_packet.has_header());
- ASSERT_EQ(header_size, rtp_packet.header().size());
- for (size_t i = 0; i < header_size; i++) {
- EXPECT_EQ(header[i], static_cast<uint8_t>(rtp_packet.header()[i]));
- }
-}
-
-void VerifyRtcpEvent(const rtclog::Event& event,
- bool incoming,
- MediaType media_type,
- const uint8_t* packet,
- size_t total_size) {
- ASSERT_TRUE(IsValidBasicEvent(event));
- ASSERT_EQ(rtclog::Event::RTCP_EVENT, event.type());
- const rtclog::RtcpPacket& rtcp_packet = event.rtcp_packet();
- ASSERT_TRUE(rtcp_packet.has_incoming());
- EXPECT_EQ(incoming, rtcp_packet.incoming());
- ASSERT_TRUE(rtcp_packet.has_type());
- EXPECT_EQ(media_type, GetRuntimeMediaType(rtcp_packet.type()));
- ASSERT_TRUE(rtcp_packet.has_packet_data());
- ASSERT_EQ(total_size, rtcp_packet.packet_data().size());
- for (size_t i = 0; i < total_size; i++) {
- EXPECT_EQ(packet[i], static_cast<uint8_t>(rtcp_packet.packet_data()[i]));
+void PrintExpectedEvents(size_t rtp_count,
+ size_t rtcp_count,
+ size_t playout_count,
+ size_t bwe_loss_count) {
+ printf(
+ "Expected events: rtp_count = %zu, rtcp_count = %zu,"
+ "playout_count = %zu, bwe_loss_count = %zu\n",
+ rtp_count, rtcp_count, playout_count, bwe_loss_count);
+ size_t rtcp_index = 1, playout_index = 1, bwe_loss_index = 1;
+ printf("strt cfg cfg ");
+ for (size_t i = 1; i <= rtp_count; i++) {
+ printf(" rtp ");
+ if (i * rtcp_count >= rtcp_index * rtp_count) {
+ printf("rtcp ");
+ rtcp_index++;
+ }
+ if (i * playout_count >= playout_index * rtp_count) {
+ printf("play ");
+ playout_index++;
+ }
+ if (i * bwe_loss_count >= bwe_loss_index * rtp_count) {
+ printf("loss ");
+ bwe_loss_index++;
+ }
}
+ printf("end \n");
}
-
-void VerifyPlayoutEvent(const rtclog::Event& event, uint32_t ssrc) {
- ASSERT_TRUE(IsValidBasicEvent(event));
- ASSERT_EQ(rtclog::Event::AUDIO_PLAYOUT_EVENT, event.type());
- const rtclog::AudioPlayoutEvent& playout_event = event.audio_playout_event();
- ASSERT_TRUE(playout_event.has_local_ssrc());
- EXPECT_EQ(ssrc, playout_event.local_ssrc());
-}
-
-void VerifyBweLossEvent(const rtclog::Event& event,
- int32_t bitrate,
- uint8_t fraction_loss,
- int32_t total_packets) {
- ASSERT_TRUE(IsValidBasicEvent(event));
- ASSERT_EQ(rtclog::Event::BWE_PACKET_LOSS_EVENT, event.type());
- const rtclog::BwePacketLossEvent& bwe_event = event.bwe_packet_loss_event();
- ASSERT_TRUE(bwe_event.has_bitrate());
- EXPECT_EQ(bitrate, bwe_event.bitrate());
- ASSERT_TRUE(bwe_event.has_fraction_loss());
- EXPECT_EQ(fraction_loss, bwe_event.fraction_loss());
- ASSERT_TRUE(bwe_event.has_total_packets());
- EXPECT_EQ(total_packets, bwe_event.total_packets());
-}
-
-void VerifyLogStartEvent(const rtclog::Event& event) {
- ASSERT_TRUE(IsValidBasicEvent(event));
- EXPECT_EQ(rtclog::Event::LOG_START, event.type());
-}
+} // namespace
/*
* Bit number i of extension_bitvector is set to indicate the
@@ -314,7 +122,8 @@ size_t GenerateRtpPacket(uint32_t extensions_bitvector,
nullptr, // BitrateStatisticsObserver*
nullptr, // FrameCountObserver*
nullptr, // SendSideDelayObserver*
- nullptr); // RtcEventLog*
+ nullptr, // RtcEventLog*
+ nullptr); // SendPacketObserver*
std::vector<uint32_t> csrcs;
for (unsigned i = 0; i < csrcs_count; i++) {
@@ -472,9 +281,12 @@ void LogSessionAndReadBack(size_t rtp_count,
// When log_dumper goes out of scope, it causes the log file to be flushed
// to disk.
{
- std::unique_ptr<RtcEventLog> log_dumper(RtcEventLog::Create());
+ SimulatedClock fake_clock(prng.Rand<uint32_t>());
+ std::unique_ptr<RtcEventLog> log_dumper(RtcEventLog::Create(&fake_clock));
log_dumper->LogVideoReceiveStreamConfig(receiver_config);
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
log_dumper->LogVideoSendStreamConfig(sender_config);
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
size_t rtcp_index = 1;
size_t playout_index = 1;
size_t bwe_loss_index = 1;
@@ -483,6 +295,7 @@ void LogSessionAndReadBack(size_t rtp_count,
(i % 2 == 0) ? kIncomingPacket : kOutgoingPacket,
(i % 3 == 0) ? MediaType::AUDIO : MediaType::VIDEO,
rtp_packets[i - 1].data(), rtp_packets[i - 1].size());
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
if (i * rtcp_count >= rtcp_index * rtp_count) {
log_dumper->LogRtcpPacket(
(rtcp_index % 2 == 0) ? kIncomingPacket : kOutgoingPacket,
@@ -490,73 +303,84 @@ void LogSessionAndReadBack(size_t rtp_count,
rtcp_packets[rtcp_index - 1].data(),
rtcp_packets[rtcp_index - 1].size());
rtcp_index++;
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
}
if (i * playout_count >= playout_index * rtp_count) {
log_dumper->LogAudioPlayout(playout_ssrcs[playout_index - 1]);
playout_index++;
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
}
if (i * bwe_loss_count >= bwe_loss_index * rtp_count) {
log_dumper->LogBwePacketLossEvent(
bwe_loss_updates[bwe_loss_index - 1].first,
bwe_loss_updates[bwe_loss_index - 1].second, i);
bwe_loss_index++;
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
}
if (i == rtp_count / 2) {
log_dumper->StartLogging(temp_filename, 10000000);
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
}
}
+ log_dumper->StopLogging();
}
// Read the generated file from disk.
- rtclog::EventStream parsed_stream;
+ ParsedRtcEventLog parsed_log;
- ASSERT_TRUE(RtcEventLog::ParseRtcEventLog(temp_filename, &parsed_stream));
+ ASSERT_TRUE(parsed_log.ParseFile(temp_filename));
// Verify that what we read back from the event log is the same as
// what we wrote down. For RTCP we log the full packets, but for
// RTP we should only log the header.
- const int event_count = config_count + playout_count + bwe_loss_count +
- rtcp_count + rtp_count + 1;
- EXPECT_EQ(event_count, parsed_stream.stream_size());
- VerifyReceiveStreamConfig(parsed_stream.stream(0), receiver_config);
- VerifySendStreamConfig(parsed_stream.stream(1), sender_config);
- size_t event_index = config_count;
+ const size_t event_count = config_count + playout_count + bwe_loss_count +
+ rtcp_count + rtp_count + 2;
+ EXPECT_GE(1000u, event_count); // The events must fit in the message queue.
+ EXPECT_EQ(event_count, parsed_log.GetNumberOfEvents());
+ if (event_count != parsed_log.GetNumberOfEvents()) {
+ // Print the expected and actual event types for easier debugging.
+ PrintActualEvents(parsed_log);
+ PrintExpectedEvents(rtp_count, rtcp_count, playout_count, bwe_loss_count);
+ }
+ RtcEventLogTestHelper::VerifyLogStartEvent(parsed_log, 0);
+ RtcEventLogTestHelper::VerifyReceiveStreamConfig(parsed_log, 1,
+ receiver_config);
+ RtcEventLogTestHelper::VerifySendStreamConfig(parsed_log, 2, sender_config);
+ size_t event_index = config_count + 1;
size_t rtcp_index = 1;
size_t playout_index = 1;
size_t bwe_loss_index = 1;
for (size_t i = 1; i <= rtp_count; i++) {
- VerifyRtpEvent(parsed_stream.stream(event_index),
- (i % 2 == 0), // Every second packet is incoming.
- (i % 3 == 0) ? MediaType::AUDIO : MediaType::VIDEO,
- rtp_packets[i - 1].data(), rtp_header_sizes[i - 1],
- rtp_packets[i - 1].size());
+ RtcEventLogTestHelper::VerifyRtpEvent(
+ parsed_log, event_index,
+ (i % 2 == 0) ? kIncomingPacket : kOutgoingPacket,
+ (i % 3 == 0) ? MediaType::AUDIO : MediaType::VIDEO,
+ rtp_packets[i - 1].data(), rtp_header_sizes[i - 1],
+ rtp_packets[i - 1].size());
event_index++;
if (i * rtcp_count >= rtcp_index * rtp_count) {
- VerifyRtcpEvent(parsed_stream.stream(event_index),
- rtcp_index % 2 == 0, // Every second packet is incoming.
- rtcp_index % 3 == 0 ? MediaType::AUDIO : MediaType::VIDEO,
- rtcp_packets[rtcp_index - 1].data(),
- rtcp_packets[rtcp_index - 1].size());
+ RtcEventLogTestHelper::VerifyRtcpEvent(
+ parsed_log, event_index,
+ rtcp_index % 2 == 0 ? kIncomingPacket : kOutgoingPacket,
+ rtcp_index % 3 == 0 ? MediaType::AUDIO : MediaType::VIDEO,
+ rtcp_packets[rtcp_index - 1].data(),
+ rtcp_packets[rtcp_index - 1].size());
event_index++;
rtcp_index++;
}
if (i * playout_count >= playout_index * rtp_count) {
- VerifyPlayoutEvent(parsed_stream.stream(event_index),
- playout_ssrcs[playout_index - 1]);
+ RtcEventLogTestHelper::VerifyPlayoutEvent(
+ parsed_log, event_index, playout_ssrcs[playout_index - 1]);
event_index++;
playout_index++;
}
if (i * bwe_loss_count >= bwe_loss_index * rtp_count) {
- VerifyBweLossEvent(parsed_stream.stream(event_index),
- bwe_loss_updates[bwe_loss_index - 1].first,
- bwe_loss_updates[bwe_loss_index - 1].second, i);
+ RtcEventLogTestHelper::VerifyBweLossEvent(
+ parsed_log, event_index, bwe_loss_updates[bwe_loss_index - 1].first,
+ bwe_loss_updates[bwe_loss_index - 1].second, i);
event_index++;
bwe_loss_index++;
}
- if (i == rtp_count / 2) {
- VerifyLogStartEvent(parsed_stream.stream(event_index));
- event_index++;
- }
}
// Clean up temporary file - can be pretty slow.
@@ -596,39 +420,15 @@ TEST(RtcEventLogTest, LogSessionAndReadBack) {
}
}
-// Tests that the event queue works correctly, i.e. drops old RTP, RTCP and
-// debug events, but keeps config events even if they are older than the limit.
-void DropOldEvents(uint32_t extensions_bitvector,
- uint32_t csrcs_count,
- unsigned int random_seed) {
- rtc::Buffer old_rtp_packet;
- rtc::Buffer recent_rtp_packet;
- rtc::Buffer old_rtcp_packet;
- rtc::Buffer recent_rtcp_packet;
-
- VideoReceiveStream::Config receiver_config(nullptr);
- VideoSendStream::Config sender_config(nullptr);
-
- Random prng(random_seed);
+TEST(RtcEventLogTest, LogEventAndReadBack) {
+ Random prng(987654321);
- // Create two RTP packets containing random data.
+ // Create one RTP and one RTCP packet containing random data.
size_t packet_size = prng.Rand(1000, 1100);
- old_rtp_packet.SetSize(packet_size);
- GenerateRtpPacket(extensions_bitvector, csrcs_count, old_rtp_packet.data(),
- packet_size, &prng);
- packet_size = prng.Rand(1000, 1100);
- recent_rtp_packet.SetSize(packet_size);
- size_t recent_header_size =
- GenerateRtpPacket(extensions_bitvector, csrcs_count,
- recent_rtp_packet.data(), packet_size, &prng);
-
- // Create two RTCP packets containing random data.
- old_rtcp_packet = GenerateRtcpPacket(&prng);
- recent_rtcp_packet = GenerateRtcpPacket(&prng);
-
- // Create configurations for the video streams.
- GenerateVideoReceiveConfig(extensions_bitvector, &receiver_config, &prng);
- GenerateVideoSendConfig(extensions_bitvector, &sender_config, &prng);
+ rtc::Buffer rtp_packet(packet_size);
+ size_t header_size =
+ GenerateRtpPacket(0, 0, rtp_packet.data(), packet_size, &prng);
+ rtc::Buffer rtcp_packet = GenerateRtcpPacket(&prng);
// Find the name of the current test, in order to use it as a temporary
// filename.
@@ -636,58 +436,46 @@ void DropOldEvents(uint32_t extensions_bitvector,
const std::string temp_filename =
test::OutputPath() + test_info->test_case_name() + test_info->name();
- // The log file will be flushed to disk when the log_dumper goes out of scope.
- {
- std::unique_ptr<RtcEventLog> log_dumper(RtcEventLog::Create());
- // Reduce the time old events are stored to 50 ms.
- log_dumper->SetBufferDuration(50000);
- log_dumper->LogVideoReceiveStreamConfig(receiver_config);
- log_dumper->LogVideoSendStreamConfig(sender_config);
- log_dumper->LogRtpHeader(kOutgoingPacket, MediaType::AUDIO,
- old_rtp_packet.data(), old_rtp_packet.size());
- log_dumper->LogRtcpPacket(kIncomingPacket, MediaType::AUDIO,
- old_rtcp_packet.data(),
- old_rtcp_packet.size());
- // Sleep 55 ms to let old events be removed from the queue.
- rtc::Thread::SleepMs(55);
- log_dumper->StartLogging(temp_filename, 10000000);
- log_dumper->LogRtpHeader(kIncomingPacket, MediaType::VIDEO,
- recent_rtp_packet.data(),
- recent_rtp_packet.size());
- log_dumper->LogRtcpPacket(kOutgoingPacket, MediaType::VIDEO,
- recent_rtcp_packet.data(),
- recent_rtcp_packet.size());
- }
+ // Add RTP, start logging, add RTCP and then stop logging
+ SimulatedClock fake_clock(prng.Rand<uint32_t>());
+ std::unique_ptr<RtcEventLog> log_dumper(RtcEventLog::Create(&fake_clock));
+
+ log_dumper->LogRtpHeader(kIncomingPacket, MediaType::VIDEO, rtp_packet.data(),
+ rtp_packet.size());
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
+
+ log_dumper->StartLogging(temp_filename, 10000000);
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
+
+ log_dumper->LogRtcpPacket(kOutgoingPacket, MediaType::VIDEO,
+ rtcp_packet.data(), rtcp_packet.size());
+ fake_clock.AdvanceTimeMicroseconds(prng.Rand(1, 1000));
+
+ log_dumper->StopLogging();
// Read the generated file from disk.
- rtclog::EventStream parsed_stream;
- ASSERT_TRUE(RtcEventLog::ParseRtcEventLog(temp_filename, &parsed_stream));
+ ParsedRtcEventLog parsed_log;
+ ASSERT_TRUE(parsed_log.ParseFile(temp_filename));
// Verify that what we read back from the event log is the same as
- // what we wrote. Old RTP and RTCP events should have been discarded,
- // but old configuration events should still be available.
- EXPECT_EQ(5, parsed_stream.stream_size());
- VerifyReceiveStreamConfig(parsed_stream.stream(0), receiver_config);
- VerifySendStreamConfig(parsed_stream.stream(1), sender_config);
- VerifyLogStartEvent(parsed_stream.stream(2));
- VerifyRtpEvent(parsed_stream.stream(3), true, MediaType::VIDEO,
- recent_rtp_packet.data(), recent_header_size,
- recent_rtp_packet.size());
- VerifyRtcpEvent(parsed_stream.stream(4), false, MediaType::VIDEO,
- recent_rtcp_packet.data(), recent_rtcp_packet.size());
+ // what we wrote down.
+ EXPECT_EQ(4u, parsed_log.GetNumberOfEvents());
+
+ RtcEventLogTestHelper::VerifyLogStartEvent(parsed_log, 0);
+
+ RtcEventLogTestHelper::VerifyRtpEvent(parsed_log, 1, kIncomingPacket,
+ MediaType::VIDEO, rtp_packet.data(),
+ header_size, rtp_packet.size());
+
+ RtcEventLogTestHelper::VerifyRtcpEvent(parsed_log, 2, kOutgoingPacket,
+ MediaType::VIDEO, rtcp_packet.data(),
+ rtcp_packet.size());
+
+ RtcEventLogTestHelper::VerifyLogEndEvent(parsed_log, 3);
// Clean up temporary file - can be pretty slow.
remove(temp_filename.c_str());
}
-
-TEST(RtcEventLogTest, DropOldEvents) {
- // Enable all header extensions
- uint32_t extensions = (1u << kNumExtensions) - 1;
- uint32_t csrcs_count = 2;
- DropOldEvents(extensions, csrcs_count, 141421356);
- DropOldEvents(extensions, csrcs_count, 173205080);
-}
-
} // namespace webrtc
#endif // ENABLE_RTC_EVENT_LOG
diff --git a/chromium/third_party/webrtc/call/rtc_event_log_unittest_helper.cc b/chromium/third_party/webrtc/call/rtc_event_log_unittest_helper.cc
new file mode 100644
index 00000000000..5a06d97170e
--- /dev/null
+++ b/chromium/third_party/webrtc/call/rtc_event_log_unittest_helper.cc
@@ -0,0 +1,409 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef ENABLE_RTC_EVENT_LOG
+
+#include "webrtc/call/rtc_event_log_unittest_helper.h"
+
+#include <string.h>
+
+#include <string>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/test/test_suite.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+// Files generated at build-time by the protobuf compiler.
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
+#include "external/webrtc/webrtc/call/rtc_event_log.pb.h"
+#else
+#include "webrtc/call/rtc_event_log.pb.h"
+#endif
+
+namespace webrtc {
+
+namespace {
+MediaType GetRuntimeMediaType(rtclog::MediaType media_type) {
+ switch (media_type) {
+ case rtclog::MediaType::ANY:
+ return MediaType::ANY;
+ case rtclog::MediaType::AUDIO:
+ return MediaType::AUDIO;
+ case rtclog::MediaType::VIDEO:
+ return MediaType::VIDEO;
+ case rtclog::MediaType::DATA:
+ return MediaType::DATA;
+ }
+ RTC_NOTREACHED();
+ return MediaType::ANY;
+}
+} // namespace
+
+// Checks that the event has a timestamp, a type and exactly the data field
+// corresponding to the type.
+::testing::AssertionResult IsValidBasicEvent(const rtclog::Event& event) {
+ if (!event.has_timestamp_us()) {
+ return ::testing::AssertionFailure() << "Event has no timestamp";
+ }
+ if (!event.has_type()) {
+ return ::testing::AssertionFailure() << "Event has no event type";
+ }
+ rtclog::Event_EventType type = event.type();
+ if ((type == rtclog::Event::RTP_EVENT) != event.has_rtp_packet()) {
+ return ::testing::AssertionFailure()
+ << "Event of type " << type << " has "
+ << (event.has_rtp_packet() ? "" : "no ") << "RTP packet";
+ }
+ if ((type == rtclog::Event::RTCP_EVENT) != event.has_rtcp_packet()) {
+ return ::testing::AssertionFailure()
+ << "Event of type " << type << " has "
+ << (event.has_rtcp_packet() ? "" : "no ") << "RTCP packet";
+ }
+ if ((type == rtclog::Event::AUDIO_PLAYOUT_EVENT) !=
+ event.has_audio_playout_event()) {
+ return ::testing::AssertionFailure()
+ << "Event of type " << type << " has "
+ << (event.has_audio_playout_event() ? "" : "no ")
+ << "audio_playout event";
+ }
+ if ((type == rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT) !=
+ event.has_video_receiver_config()) {
+ return ::testing::AssertionFailure()
+ << "Event of type " << type << " has "
+ << (event.has_video_receiver_config() ? "" : "no ")
+ << "receiver config";
+ }
+ if ((type == rtclog::Event::VIDEO_SENDER_CONFIG_EVENT) !=
+ event.has_video_sender_config()) {
+ return ::testing::AssertionFailure()
+ << "Event of type " << type << " has "
+ << (event.has_video_sender_config() ? "" : "no ") << "sender config";
+ }
+ if ((type == rtclog::Event::AUDIO_RECEIVER_CONFIG_EVENT) !=
+ event.has_audio_receiver_config()) {
+ return ::testing::AssertionFailure()
+ << "Event of type " << type << " has "
+ << (event.has_audio_receiver_config() ? "" : "no ")
+ << "audio receiver config";
+ }
+ if ((type == rtclog::Event::AUDIO_SENDER_CONFIG_EVENT) !=
+ event.has_audio_sender_config()) {
+ return ::testing::AssertionFailure()
+ << "Event of type " << type << " has "
+ << (event.has_audio_sender_config() ? "" : "no ")
+ << "audio sender config";
+ }
+ return ::testing::AssertionSuccess();
+}
+
+void RtcEventLogTestHelper::VerifyReceiveStreamConfig(
+ const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ const VideoReceiveStream::Config& config) {
+ const rtclog::Event& event = parsed_log.stream_[index];
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ ASSERT_EQ(rtclog::Event::VIDEO_RECEIVER_CONFIG_EVENT, event.type());
+ const rtclog::VideoReceiveConfig& receiver_config =
+ event.video_receiver_config();
+ // Check SSRCs.
+ ASSERT_TRUE(receiver_config.has_remote_ssrc());
+ EXPECT_EQ(config.rtp.remote_ssrc, receiver_config.remote_ssrc());
+ ASSERT_TRUE(receiver_config.has_local_ssrc());
+ EXPECT_EQ(config.rtp.local_ssrc, receiver_config.local_ssrc());
+ // Check RTCP settings.
+ ASSERT_TRUE(receiver_config.has_rtcp_mode());
+ if (config.rtp.rtcp_mode == RtcpMode::kCompound) {
+ EXPECT_EQ(rtclog::VideoReceiveConfig::RTCP_COMPOUND,
+ receiver_config.rtcp_mode());
+ } else {
+ EXPECT_EQ(rtclog::VideoReceiveConfig::RTCP_REDUCEDSIZE,
+ receiver_config.rtcp_mode());
+ }
+ ASSERT_TRUE(receiver_config.has_remb());
+ EXPECT_EQ(config.rtp.remb, receiver_config.remb());
+ // Check RTX map.
+ ASSERT_EQ(static_cast<int>(config.rtp.rtx.size()),
+ receiver_config.rtx_map_size());
+ for (const rtclog::RtxMap& rtx_map : receiver_config.rtx_map()) {
+ ASSERT_TRUE(rtx_map.has_payload_type());
+ ASSERT_TRUE(rtx_map.has_config());
+ EXPECT_EQ(1u, config.rtp.rtx.count(rtx_map.payload_type()));
+ const rtclog::RtxConfig& rtx_config = rtx_map.config();
+ const VideoReceiveStream::Config::Rtp::Rtx& rtx =
+ config.rtp.rtx.at(rtx_map.payload_type());
+ ASSERT_TRUE(rtx_config.has_rtx_ssrc());
+ ASSERT_TRUE(rtx_config.has_rtx_payload_type());
+ EXPECT_EQ(rtx.ssrc, rtx_config.rtx_ssrc());
+ EXPECT_EQ(rtx.payload_type, rtx_config.rtx_payload_type());
+ }
+ // Check header extensions.
+ ASSERT_EQ(static_cast<int>(config.rtp.extensions.size()),
+ receiver_config.header_extensions_size());
+ for (int i = 0; i < receiver_config.header_extensions_size(); i++) {
+ ASSERT_TRUE(receiver_config.header_extensions(i).has_name());
+ ASSERT_TRUE(receiver_config.header_extensions(i).has_id());
+ const std::string& name = receiver_config.header_extensions(i).name();
+ int id = receiver_config.header_extensions(i).id();
+ EXPECT_EQ(config.rtp.extensions[i].id, id);
+ EXPECT_EQ(config.rtp.extensions[i].name, name);
+ }
+ // Check decoders.
+ ASSERT_EQ(static_cast<int>(config.decoders.size()),
+ receiver_config.decoders_size());
+ for (int i = 0; i < receiver_config.decoders_size(); i++) {
+ ASSERT_TRUE(receiver_config.decoders(i).has_name());
+ ASSERT_TRUE(receiver_config.decoders(i).has_payload_type());
+ const std::string& decoder_name = receiver_config.decoders(i).name();
+ int decoder_type = receiver_config.decoders(i).payload_type();
+ EXPECT_EQ(config.decoders[i].payload_name, decoder_name);
+ EXPECT_EQ(config.decoders[i].payload_type, decoder_type);
+ }
+
+ // Check consistency of the parser.
+ VideoReceiveStream::Config parsed_config(nullptr);
+ parsed_log.GetVideoReceiveConfig(index, &parsed_config);
+ EXPECT_EQ(config.rtp.remote_ssrc, parsed_config.rtp.remote_ssrc);
+ EXPECT_EQ(config.rtp.local_ssrc, parsed_config.rtp.local_ssrc);
+ // Check RTCP settings.
+ EXPECT_EQ(config.rtp.rtcp_mode, parsed_config.rtp.rtcp_mode);
+ EXPECT_EQ(config.rtp.remb, parsed_config.rtp.remb);
+ // Check RTX map.
+ EXPECT_EQ(config.rtp.rtx.size(), parsed_config.rtp.rtx.size());
+ for (const auto& kv : config.rtp.rtx) {
+ auto parsed_kv = parsed_config.rtp.rtx.find(kv.first);
+ EXPECT_EQ(kv.first, parsed_kv->first);
+ EXPECT_EQ(kv.second.ssrc, parsed_kv->second.ssrc);
+ EXPECT_EQ(kv.second.payload_type, parsed_kv->second.payload_type);
+ }
+ // Check header extensions.
+ EXPECT_EQ(config.rtp.extensions.size(), parsed_config.rtp.extensions.size());
+ for (size_t i = 0; i < parsed_config.rtp.extensions.size(); i++) {
+ EXPECT_EQ(config.rtp.extensions[i].name,
+ parsed_config.rtp.extensions[i].name);
+ EXPECT_EQ(config.rtp.extensions[i].id, parsed_config.rtp.extensions[i].id);
+ }
+ // Check decoders.
+ EXPECT_EQ(config.decoders.size(), parsed_config.decoders.size());
+ for (size_t i = 0; i < parsed_config.decoders.size(); i++) {
+ EXPECT_EQ(config.decoders[i].payload_name,
+ parsed_config.decoders[i].payload_name);
+ EXPECT_EQ(config.decoders[i].payload_type,
+ parsed_config.decoders[i].payload_type);
+ }
+}
+
+void RtcEventLogTestHelper::VerifySendStreamConfig(
+ const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ const VideoSendStream::Config& config) {
+ const rtclog::Event& event = parsed_log.stream_[index];
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ ASSERT_EQ(rtclog::Event::VIDEO_SENDER_CONFIG_EVENT, event.type());
+ const rtclog::VideoSendConfig& sender_config = event.video_sender_config();
+ // Check SSRCs.
+ ASSERT_EQ(static_cast<int>(config.rtp.ssrcs.size()),
+ sender_config.ssrcs_size());
+ for (int i = 0; i < sender_config.ssrcs_size(); i++) {
+ EXPECT_EQ(config.rtp.ssrcs[i], sender_config.ssrcs(i));
+ }
+ // Check header extensions.
+ ASSERT_EQ(static_cast<int>(config.rtp.extensions.size()),
+ sender_config.header_extensions_size());
+ for (int i = 0; i < sender_config.header_extensions_size(); i++) {
+ ASSERT_TRUE(sender_config.header_extensions(i).has_name());
+ ASSERT_TRUE(sender_config.header_extensions(i).has_id());
+ const std::string& name = sender_config.header_extensions(i).name();
+ int id = sender_config.header_extensions(i).id();
+ EXPECT_EQ(config.rtp.extensions[i].id, id);
+ EXPECT_EQ(config.rtp.extensions[i].name, name);
+ }
+ // Check RTX settings.
+ ASSERT_EQ(static_cast<int>(config.rtp.rtx.ssrcs.size()),
+ sender_config.rtx_ssrcs_size());
+ for (int i = 0; i < sender_config.rtx_ssrcs_size(); i++) {
+ EXPECT_EQ(config.rtp.rtx.ssrcs[i], sender_config.rtx_ssrcs(i));
+ }
+ if (sender_config.rtx_ssrcs_size() > 0) {
+ ASSERT_TRUE(sender_config.has_rtx_payload_type());
+ EXPECT_EQ(config.rtp.rtx.payload_type, sender_config.rtx_payload_type());
+ }
+ // Check encoder.
+ ASSERT_TRUE(sender_config.has_encoder());
+ ASSERT_TRUE(sender_config.encoder().has_name());
+ ASSERT_TRUE(sender_config.encoder().has_payload_type());
+ EXPECT_EQ(config.encoder_settings.payload_name,
+ sender_config.encoder().name());
+ EXPECT_EQ(config.encoder_settings.payload_type,
+ sender_config.encoder().payload_type());
+
+ // Check consistency of the parser.
+ VideoSendStream::Config parsed_config(nullptr);
+ parsed_log.GetVideoSendConfig(index, &parsed_config);
+ // Check SSRCs
+ EXPECT_EQ(config.rtp.ssrcs.size(), parsed_config.rtp.ssrcs.size());
+ for (size_t i = 0; i < config.rtp.ssrcs.size(); i++) {
+ EXPECT_EQ(config.rtp.ssrcs[i], parsed_config.rtp.ssrcs[i]);
+ }
+ // Check header extensions.
+ EXPECT_EQ(config.rtp.extensions.size(), parsed_config.rtp.extensions.size());
+ for (size_t i = 0; i < parsed_config.rtp.extensions.size(); i++) {
+ EXPECT_EQ(config.rtp.extensions[i].name,
+ parsed_config.rtp.extensions[i].name);
+ EXPECT_EQ(config.rtp.extensions[i].id, parsed_config.rtp.extensions[i].id);
+ }
+ // Check RTX settings.
+ EXPECT_EQ(config.rtp.rtx.ssrcs.size(), parsed_config.rtp.rtx.ssrcs.size());
+ for (size_t i = 0; i < config.rtp.rtx.ssrcs.size(); i++) {
+ EXPECT_EQ(config.rtp.rtx.ssrcs[i], parsed_config.rtp.rtx.ssrcs[i]);
+ }
+ EXPECT_EQ(config.rtp.rtx.payload_type, parsed_config.rtp.rtx.payload_type);
+ // Check encoder.
+ EXPECT_EQ(config.encoder_settings.payload_name,
+ parsed_config.encoder_settings.payload_name);
+ EXPECT_EQ(config.encoder_settings.payload_type,
+ parsed_config.encoder_settings.payload_type);
+}
+
+void RtcEventLogTestHelper::VerifyRtpEvent(const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ PacketDirection direction,
+ MediaType media_type,
+ const uint8_t* header,
+ size_t header_size,
+ size_t total_size) {
+ const rtclog::Event& event = parsed_log.stream_[index];
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ ASSERT_EQ(rtclog::Event::RTP_EVENT, event.type());
+ const rtclog::RtpPacket& rtp_packet = event.rtp_packet();
+ ASSERT_TRUE(rtp_packet.has_incoming());
+ EXPECT_EQ(direction == kIncomingPacket, rtp_packet.incoming());
+ ASSERT_TRUE(rtp_packet.has_type());
+ EXPECT_EQ(media_type, GetRuntimeMediaType(rtp_packet.type()));
+ ASSERT_TRUE(rtp_packet.has_packet_length());
+ EXPECT_EQ(total_size, rtp_packet.packet_length());
+ ASSERT_TRUE(rtp_packet.has_header());
+ ASSERT_EQ(header_size, rtp_packet.header().size());
+ for (size_t i = 0; i < header_size; i++) {
+ EXPECT_EQ(header[i], static_cast<uint8_t>(rtp_packet.header()[i]));
+ }
+
+ // Check consistency of the parser.
+ PacketDirection parsed_direction;
+ MediaType parsed_media_type;
+ uint8_t parsed_header[1500];
+ size_t parsed_header_size, parsed_total_size;
+ parsed_log.GetRtpHeader(index, &parsed_direction, &parsed_media_type,
+ parsed_header, &parsed_header_size,
+ &parsed_total_size);
+ EXPECT_EQ(direction, parsed_direction);
+ EXPECT_EQ(media_type, parsed_media_type);
+ ASSERT_EQ(header_size, parsed_header_size);
+ EXPECT_EQ(0, std::memcmp(header, parsed_header, header_size));
+ EXPECT_EQ(total_size, parsed_total_size);
+}
+
+void RtcEventLogTestHelper::VerifyRtcpEvent(const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ PacketDirection direction,
+ MediaType media_type,
+ const uint8_t* packet,
+ size_t total_size) {
+ const rtclog::Event& event = parsed_log.stream_[index];
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ ASSERT_EQ(rtclog::Event::RTCP_EVENT, event.type());
+ const rtclog::RtcpPacket& rtcp_packet = event.rtcp_packet();
+ ASSERT_TRUE(rtcp_packet.has_incoming());
+ EXPECT_EQ(direction == kIncomingPacket, rtcp_packet.incoming());
+ ASSERT_TRUE(rtcp_packet.has_type());
+ EXPECT_EQ(media_type, GetRuntimeMediaType(rtcp_packet.type()));
+ ASSERT_TRUE(rtcp_packet.has_packet_data());
+ ASSERT_EQ(total_size, rtcp_packet.packet_data().size());
+ for (size_t i = 0; i < total_size; i++) {
+ EXPECT_EQ(packet[i], static_cast<uint8_t>(rtcp_packet.packet_data()[i]));
+ }
+
+ // Check consistency of the parser.
+ PacketDirection parsed_direction;
+ MediaType parsed_media_type;
+ uint8_t parsed_packet[1500];
+ size_t parsed_total_size;
+ parsed_log.GetRtcpPacket(index, &parsed_direction, &parsed_media_type,
+ parsed_packet, &parsed_total_size);
+ EXPECT_EQ(direction, parsed_direction);
+ EXPECT_EQ(media_type, parsed_media_type);
+ ASSERT_EQ(total_size, parsed_total_size);
+ EXPECT_EQ(0, std::memcmp(packet, parsed_packet, total_size));
+}
+
+void RtcEventLogTestHelper::VerifyPlayoutEvent(
+ const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ uint32_t ssrc) {
+ const rtclog::Event& event = parsed_log.stream_[index];
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ ASSERT_EQ(rtclog::Event::AUDIO_PLAYOUT_EVENT, event.type());
+ const rtclog::AudioPlayoutEvent& playout_event = event.audio_playout_event();
+ ASSERT_TRUE(playout_event.has_local_ssrc());
+ EXPECT_EQ(ssrc, playout_event.local_ssrc());
+
+ // Check consistency of the parser.
+ uint32_t parsed_ssrc;
+ parsed_log.GetAudioPlayout(index, &parsed_ssrc);
+ EXPECT_EQ(ssrc, parsed_ssrc);
+}
+
+void RtcEventLogTestHelper::VerifyBweLossEvent(
+ const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ int32_t bitrate,
+ uint8_t fraction_loss,
+ int32_t total_packets) {
+ const rtclog::Event& event = parsed_log.stream_[index];
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ ASSERT_EQ(rtclog::Event::BWE_PACKET_LOSS_EVENT, event.type());
+ const rtclog::BwePacketLossEvent& bwe_event = event.bwe_packet_loss_event();
+ ASSERT_TRUE(bwe_event.has_bitrate());
+ EXPECT_EQ(bitrate, bwe_event.bitrate());
+ ASSERT_TRUE(bwe_event.has_fraction_loss());
+ EXPECT_EQ(fraction_loss, bwe_event.fraction_loss());
+ ASSERT_TRUE(bwe_event.has_total_packets());
+ EXPECT_EQ(total_packets, bwe_event.total_packets());
+
+ // Check consistency of the parser.
+ int32_t parsed_bitrate;
+ uint8_t parsed_fraction_loss;
+ int32_t parsed_total_packets;
+ parsed_log.GetBwePacketLossEvent(
+ index, &parsed_bitrate, &parsed_fraction_loss, &parsed_total_packets);
+ EXPECT_EQ(bitrate, parsed_bitrate);
+ EXPECT_EQ(fraction_loss, parsed_fraction_loss);
+ EXPECT_EQ(total_packets, parsed_total_packets);
+}
+
+void RtcEventLogTestHelper::VerifyLogStartEvent(
+ const ParsedRtcEventLog& parsed_log,
+ size_t index) {
+ const rtclog::Event& event = parsed_log.stream_[index];
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ EXPECT_EQ(rtclog::Event::LOG_START, event.type());
+}
+
+void RtcEventLogTestHelper::VerifyLogEndEvent(
+ const ParsedRtcEventLog& parsed_log,
+ size_t index) {
+ const rtclog::Event& event = parsed_log.stream_[index];
+ ASSERT_TRUE(IsValidBasicEvent(event));
+ EXPECT_EQ(rtclog::Event::LOG_END, event.type());
+}
+
+} // namespace webrtc
+
+#endif // ENABLE_RTC_EVENT_LOG
diff --git a/chromium/third_party/webrtc/call/rtc_event_log_unittest_helper.h b/chromium/third_party/webrtc/call/rtc_event_log_unittest_helper.h
new file mode 100644
index 00000000000..b662c3ccc36
--- /dev/null
+++ b/chromium/third_party/webrtc/call/rtc_event_log_unittest_helper.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_CALL_RTC_EVENT_LOG_UNITTEST_HELPER_H_
+#define WEBRTC_CALL_RTC_EVENT_LOG_UNITTEST_HELPER_H_
+
+#include "webrtc/call.h"
+#include "webrtc/call/rtc_event_log_parser.h"
+
+namespace webrtc {
+
+class RtcEventLogTestHelper {
+ public:
+ static void VerifyReceiveStreamConfig(
+ const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ const VideoReceiveStream::Config& config);
+ static void VerifySendStreamConfig(const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ const VideoSendStream::Config& config);
+ static void VerifyRtpEvent(const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ PacketDirection direction,
+ MediaType media_type,
+ const uint8_t* header,
+ size_t header_size,
+ size_t total_size);
+ static void VerifyRtcpEvent(const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ PacketDirection direction,
+ MediaType media_type,
+ const uint8_t* packet,
+ size_t total_size);
+ static void VerifyPlayoutEvent(const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ uint32_t ssrc);
+ static void VerifyBweLossEvent(const ParsedRtcEventLog& parsed_log,
+ size_t index,
+ int32_t bitrate,
+ uint8_t fraction_loss,
+ int32_t total_packets);
+
+ static void VerifyLogStartEvent(const ParsedRtcEventLog& parsed_log,
+ size_t index);
+ static void VerifyLogEndEvent(const ParsedRtcEventLog& parsed_log,
+ size_t index);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_CALL_RTC_EVENT_LOG_UNITTEST_HELPER_H_
diff --git a/chromium/third_party/webrtc/common.h b/chromium/third_party/webrtc/common.h
index f2a868241d2..3aeea814c6b 100644
--- a/chromium/third_party/webrtc/common.h
+++ b/chromium/third_party/webrtc/common.h
@@ -14,6 +14,7 @@
#include <map>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/common_audio/audio_ring_buffer.h b/chromium/third_party/webrtc/common_audio/audio_ring_buffer.h
index ae825a3cd0b..6bf3a195314 100644
--- a/chromium/third_party/webrtc/common_audio/audio_ring_buffer.h
+++ b/chromium/third_party/webrtc/common_audio/audio_ring_buffer.h
@@ -11,6 +11,8 @@
#define WEBRTC_COMMON_AUDIO_AUDIO_RING_BUFFER_H_
#include <stddef.h>
+
+#include <memory>
#include <vector>
struct RingBuffer;
diff --git a/chromium/third_party/webrtc/common_audio/common_audio.gyp b/chromium/third_party/webrtc/common_audio/common_audio.gyp
index 57d9f1ca645..30ba322dfe8 100644
--- a/chromium/third_party/webrtc/common_audio/common_audio.gyp
+++ b/chromium/third_party/webrtc/common_audio/common_audio.gyp
@@ -294,7 +294,28 @@
],
},
],
- }],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'common_audio_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):common_audio_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'common_audio_unittests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
+ }], # OS=="android"
['test_isolation_mode != "noop"', {
'targets': [
{
diff --git a/chromium/third_party/webrtc/common_audio/common_audio_unittests_apk.isolate b/chromium/third_party/webrtc/common_audio/common_audio_unittests_apk.isolate
new file mode 100644
index 00000000000..d426732fe9e
--- /dev/null
+++ b/chromium/third_party/webrtc/common_audio/common_audio_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'common_audio_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_common_audio_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/common_audio_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_common_audio_unittests',
+ 'common_audio_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/common_audio/fir_filter.cc b/chromium/third_party/webrtc/common_audio/fir_filter.cc
index 13a2237dae1..ed1f1bbfb81 100644
--- a/chromium/third_party/webrtc/common_audio/fir_filter.cc
+++ b/chromium/third_party/webrtc/common_audio/fir_filter.cc
@@ -61,13 +61,6 @@ FIRFilter* FIRFilter::Create(const float* coefficients,
#elif defined(WEBRTC_HAS_NEON)
filter =
new FIRFilterNEON(coefficients, coefficients_length, max_input_length);
-#elif defined(WEBRTC_DETECT_NEON)
- if (WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) {
- filter =
- new FIRFilterNEON(coefficients, coefficients_length, max_input_length);
- } else {
- filter = new FIRFilterC(coefficients, coefficients_length);
- }
#else
filter = new FIRFilterC(coefficients, coefficients_length);
#endif
diff --git a/chromium/third_party/webrtc/common_audio/lapped_transform.cc b/chromium/third_party/webrtc/common_audio/lapped_transform.cc
index 0edf586d783..5ab1db1b258 100644
--- a/chromium/third_party/webrtc/common_audio/lapped_transform.cc
+++ b/chromium/third_party/webrtc/common_audio/lapped_transform.cc
@@ -72,8 +72,7 @@ LappedTransform::LappedTransform(size_t num_in_channels,
window,
shift_amount,
&blocker_callback_),
- fft_(rtc::ScopedToUnique(
- RealFourier::Create(RealFourier::FftOrder(block_length_)))),
+ fft_(RealFourier::Create(RealFourier::FftOrder(block_length_))),
cplx_length_(RealFourier::ComplexLength(fft_->order())),
real_buf_(num_in_channels,
block_length_,
diff --git a/chromium/third_party/webrtc/common_audio/real_fourier.cc b/chromium/third_party/webrtc/common_audio/real_fourier.cc
index 55ec49cba2c..67f942d560a 100644
--- a/chromium/third_party/webrtc/common_audio/real_fourier.cc
+++ b/chromium/third_party/webrtc/common_audio/real_fourier.cc
@@ -21,11 +21,11 @@ using std::complex;
const size_t RealFourier::kFftBufferAlignment = 32;
-rtc::scoped_ptr<RealFourier> RealFourier::Create(int fft_order) {
+std::unique_ptr<RealFourier> RealFourier::Create(int fft_order) {
#if defined(RTC_USE_OPENMAX_DL)
- return rtc::scoped_ptr<RealFourier>(new RealFourierOpenmax(fft_order));
+ return std::unique_ptr<RealFourier>(new RealFourierOpenmax(fft_order));
#else
- return rtc::scoped_ptr<RealFourier>(new RealFourierOoura(fft_order));
+ return std::unique_ptr<RealFourier>(new RealFourierOoura(fft_order));
#endif
}
diff --git a/chromium/third_party/webrtc/common_audio/real_fourier.h b/chromium/third_party/webrtc/common_audio/real_fourier.h
index 0be56a58b0e..5e83e37f70e 100644
--- a/chromium/third_party/webrtc/common_audio/real_fourier.h
+++ b/chromium/third_party/webrtc/common_audio/real_fourier.h
@@ -12,8 +12,8 @@
#define WEBRTC_COMMON_AUDIO_REAL_FOURIER_H_
#include <complex>
+#include <memory>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/aligned_malloc.h"
// Uniform interface class for the real DFT and its inverse, for power-of-2
@@ -25,8 +25,8 @@ namespace webrtc {
class RealFourier {
public:
// Shorthand typenames for the scopers used by the buffer allocation helpers.
- typedef rtc::scoped_ptr<float[], AlignedFreeDeleter> fft_real_scoper;
- typedef rtc::scoped_ptr<std::complex<float>[], AlignedFreeDeleter>
+ typedef std::unique_ptr<float[], AlignedFreeDeleter> fft_real_scoper;
+ typedef std::unique_ptr<std::complex<float>[], AlignedFreeDeleter>
fft_cplx_scoper;
// The alignment required for all input and output buffers, in bytes.
@@ -34,7 +34,7 @@ class RealFourier {
// Construct a wrapper instance for the given input order, which must be
// between 1 and kMaxFftOrder, inclusively.
- static rtc::scoped_ptr<RealFourier> Create(int fft_order);
+ static std::unique_ptr<RealFourier> Create(int fft_order);
virtual ~RealFourier() {};
// Helper to compute the smallest FFT order (a power of 2) which will contain
diff --git a/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc b/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc
index afb0963c3fd..aca73b2d4bc 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/push_sinc_resampler_unittest.cc
@@ -14,10 +14,10 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_audio/include/audio_util.h"
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
#include "webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -86,16 +86,17 @@ void PushSincResamplerTest::ResampleBenchmarkTest(bool int_format) {
const double io_ratio = input_rate_ / static_cast<double>(output_rate_);
SincResampler sinc_resampler(io_ratio, SincResampler::kDefaultRequestSize,
&resampler_source);
- TickTime start = TickTime::Now();
+ int64_t start = rtc::TimeNanos();
for (int i = 0; i < kResampleIterations; ++i) {
sinc_resampler.Resample(output_samples, resampled_destination.get());
}
- double total_time_sinc_us = (TickTime::Now() - start).Microseconds();
+ double total_time_sinc_us =
+ (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec;
printf("SincResampler took %.2f us per frame.\n",
total_time_sinc_us / kResampleIterations);
PushSincResampler resampler(input_samples, output_samples);
- start = TickTime::Now();
+ start = rtc::TimeNanos();
if (int_format) {
for (int i = 0; i < kResampleIterations; ++i) {
EXPECT_EQ(output_samples,
@@ -113,7 +114,8 @@ void PushSincResamplerTest::ResampleBenchmarkTest(bool int_format) {
output_samples));
}
}
- double total_time_us = (TickTime::Now() - start).Microseconds();
+ double total_time_us =
+ (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec;
printf("PushSincResampler took %.2f us per frame; which is a %.1f%% overhead "
"on SincResampler.\n\n", total_time_us / kResampleIterations,
(total_time_us - total_time_sinc_us) / total_time_sinc_us * 100);
diff --git a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc
index 69ac2208cfb..c8bc15a362f 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.cc
@@ -136,12 +136,6 @@ void SincResampler::InitializeCPUSpecificFeatures() {
#elif defined(WEBRTC_HAS_NEON)
#define CONVOLVE_FUNC Convolve_NEON
void SincResampler::InitializeCPUSpecificFeatures() {}
-#elif defined(WEBRTC_DETECT_NEON)
-#define CONVOLVE_FUNC convolve_proc_
-void SincResampler::InitializeCPUSpecificFeatures() {
- convolve_proc_ = WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON ?
- Convolve_NEON : Convolve_C;
-}
#else
// Unknown architecture.
#define CONVOLVE_FUNC Convolve_C
diff --git a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h
index d8ea6df72f7..ecfd64c0a64 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h
+++ b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler.h
@@ -107,7 +107,7 @@ class SincResampler {
static float Convolve_SSE(const float* input_ptr, const float* k1,
const float* k2,
double kernel_interpolation_factor);
-#elif defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#elif defined(WEBRTC_HAS_NEON)
static float Convolve_NEON(const float* input_ptr, const float* k1,
const float* k2,
double kernel_interpolation_factor);
diff --git a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
index 42172ebdda7..d3b0dcd428a 100644
--- a/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
+++ b/chromium/third_party/webrtc/common_audio/resampler/sinc_resampler_unittest.cc
@@ -20,11 +20,11 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_audio/resampler/sinc_resampler.h"
#include "webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
#include "webrtc/system_wrappers/include/stringize_macros.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/test_suite.h"
using testing::_;
@@ -107,10 +107,11 @@ TEST(SincResamplerTest, DISABLED_SetRatioBench) {
SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize,
&mock_source);
- TickTime start = TickTime::Now();
+ int64_t start = rtc::TimeNanos();
for (int i = 1; i < 10000; ++i)
resampler.SetRatio(1.0 / i);
- double total_time_c_us = (TickTime::Now() - start).Microseconds();
+ double total_time_c_us =
+ (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec;
printf("SetRatio() took %.2fms.\n", total_time_c_us / 1000);
}
@@ -179,13 +180,14 @@ TEST(SincResamplerTest, ConvolveBenchmark) {
printf("Benchmarking %d iterations:\n", kConvolveIterations);
// Benchmark Convolve_C().
- TickTime start = TickTime::Now();
+ int64_t start = rtc::TimeNanos();
for (int i = 0; i < kConvolveIterations; ++i) {
resampler.Convolve_C(
resampler.kernel_storage_.get(), resampler.kernel_storage_.get(),
resampler.kernel_storage_.get(), kKernelInterpolationFactor);
}
- double total_time_c_us = (TickTime::Now() - start).Microseconds();
+ double total_time_c_us =
+ (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec;
printf("Convolve_C took %.2fms.\n", total_time_c_us / 1000);
#if defined(CONVOLVE_FUNC)
@@ -196,27 +198,27 @@ TEST(SincResamplerTest, ConvolveBenchmark) {
#endif
// Benchmark with unaligned input pointer.
- start = TickTime::Now();
+ start = rtc::TimeNanos();
for (int j = 0; j < kConvolveIterations; ++j) {
resampler.CONVOLVE_FUNC(
resampler.kernel_storage_.get() + 1, resampler.kernel_storage_.get(),
resampler.kernel_storage_.get(), kKernelInterpolationFactor);
}
double total_time_optimized_unaligned_us =
- (TickTime::Now() - start).Microseconds();
+ (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec;
printf(STRINGIZE(CONVOLVE_FUNC) "(unaligned) took %.2fms; which is %.2fx "
"faster than Convolve_C.\n", total_time_optimized_unaligned_us / 1000,
total_time_c_us / total_time_optimized_unaligned_us);
// Benchmark with aligned input pointer.
- start = TickTime::Now();
+ start = rtc::TimeNanos();
for (int j = 0; j < kConvolveIterations; ++j) {
resampler.CONVOLVE_FUNC(
resampler.kernel_storage_.get(), resampler.kernel_storage_.get(),
resampler.kernel_storage_.get(), kKernelInterpolationFactor);
}
double total_time_optimized_aligned_us =
- (TickTime::Now() - start).Microseconds();
+ (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec;
printf(STRINGIZE(CONVOLVE_FUNC) " (aligned) took %.2fms; which is %.2fx "
"faster than Convolve_C and %.2fx faster than "
STRINGIZE(CONVOLVE_FUNC) " (unaligned).\n",
diff --git a/chromium/third_party/webrtc/common_audio/ring_buffer.c b/chromium/third_party/webrtc/common_audio/ring_buffer.c
index 60fb5dff20d..5fc653bd57e 100644
--- a/chromium/third_party/webrtc/common_audio/ring_buffer.c
+++ b/chromium/third_party/webrtc/common_audio/ring_buffer.c
@@ -17,20 +17,6 @@
#include <stdlib.h>
#include <string.h>
-enum Wrap {
- SAME_WRAP,
- DIFF_WRAP
-};
-
-struct RingBuffer {
- size_t read_pos;
- size_t write_pos;
- size_t element_count;
- size_t element_size;
- enum Wrap rw_wrap;
- char* data;
-};
-
// Get address of region(s) from which we can read data.
// If the region is contiguous, |data_ptr_bytes_2| will be zero.
// If non-contiguous, |data_ptr_bytes_2| will be the size in bytes of the second
diff --git a/chromium/third_party/webrtc/common_audio/ring_buffer.h b/chromium/third_party/webrtc/common_audio/ring_buffer.h
index 4125c48d011..74951a8b2da 100644
--- a/chromium/third_party/webrtc/common_audio/ring_buffer.h
+++ b/chromium/third_party/webrtc/common_audio/ring_buffer.h
@@ -20,7 +20,16 @@ extern "C" {
#include <stddef.h> // size_t
-typedef struct RingBuffer RingBuffer;
+enum Wrap { SAME_WRAP, DIFF_WRAP };
+
+typedef struct RingBuffer {
+ size_t read_pos;
+ size_t write_pos;
+ size_t element_count;
+ size_t element_size;
+ enum Wrap rw_wrap;
+ char* data;
+} RingBuffer;
// Creates and initializes the buffer. Returns NULL on failure.
RingBuffer* WebRtc_CreateBuffer(size_t element_count, size_t element_size);
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h b/chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h
index 2e96883e6de..19379bf084f 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/include/signal_processing_library.h
@@ -106,10 +106,8 @@ extern "C" {
// Initialize SPL. Currently it contains only function pointer initialization.
// If the underlying platform is known to be ARM-Neon (WEBRTC_HAS_NEON defined),
-// the pointers will be assigned to code optimized for Neon; otherwise
-// if run-time Neon detection (WEBRTC_DETECT_NEON) is enabled, the pointers
-// will be assigned to either Neon code or generic C code; otherwise, generic C
-// code will be assigned.
+// the pointers will be assigned to code optimized for Neon; otherwise, generic
+// C code will be assigned.
// Note that this function MUST be called in any application that uses SPL
// functions.
void WebRtcSpl_Init();
@@ -153,7 +151,7 @@ void WebRtcSpl_ZerosArrayW32(int32_t* vector,
typedef int16_t (*MaxAbsValueW16)(const int16_t* vector, size_t length);
extern MaxAbsValueW16 WebRtcSpl_MaxAbsValueW16;
int16_t WebRtcSpl_MaxAbsValueW16C(const int16_t* vector, size_t length);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int16_t WebRtcSpl_MaxAbsValueW16Neon(const int16_t* vector, size_t length);
#endif
#if defined(MIPS32_LE)
@@ -170,7 +168,7 @@ int16_t WebRtcSpl_MaxAbsValueW16_mips(const int16_t* vector, size_t length);
typedef int32_t (*MaxAbsValueW32)(const int32_t* vector, size_t length);
extern MaxAbsValueW32 WebRtcSpl_MaxAbsValueW32;
int32_t WebRtcSpl_MaxAbsValueW32C(const int32_t* vector, size_t length);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int32_t WebRtcSpl_MaxAbsValueW32Neon(const int32_t* vector, size_t length);
#endif
#if defined(MIPS_DSP_R1_LE)
@@ -187,7 +185,7 @@ int32_t WebRtcSpl_MaxAbsValueW32_mips(const int32_t* vector, size_t length);
typedef int16_t (*MaxValueW16)(const int16_t* vector, size_t length);
extern MaxValueW16 WebRtcSpl_MaxValueW16;
int16_t WebRtcSpl_MaxValueW16C(const int16_t* vector, size_t length);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int16_t WebRtcSpl_MaxValueW16Neon(const int16_t* vector, size_t length);
#endif
#if defined(MIPS32_LE)
@@ -204,7 +202,7 @@ int16_t WebRtcSpl_MaxValueW16_mips(const int16_t* vector, size_t length);
typedef int32_t (*MaxValueW32)(const int32_t* vector, size_t length);
extern MaxValueW32 WebRtcSpl_MaxValueW32;
int32_t WebRtcSpl_MaxValueW32C(const int32_t* vector, size_t length);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int32_t WebRtcSpl_MaxValueW32Neon(const int32_t* vector, size_t length);
#endif
#if defined(MIPS32_LE)
@@ -221,7 +219,7 @@ int32_t WebRtcSpl_MaxValueW32_mips(const int32_t* vector, size_t length);
typedef int16_t (*MinValueW16)(const int16_t* vector, size_t length);
extern MinValueW16 WebRtcSpl_MinValueW16;
int16_t WebRtcSpl_MinValueW16C(const int16_t* vector, size_t length);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int16_t WebRtcSpl_MinValueW16Neon(const int16_t* vector, size_t length);
#endif
#if defined(MIPS32_LE)
@@ -238,7 +236,7 @@ int16_t WebRtcSpl_MinValueW16_mips(const int16_t* vector, size_t length);
typedef int32_t (*MinValueW32)(const int32_t* vector, size_t length);
extern MinValueW32 WebRtcSpl_MinValueW32;
int32_t WebRtcSpl_MinValueW32C(const int32_t* vector, size_t length);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int32_t WebRtcSpl_MinValueW32Neon(const int32_t* vector, size_t length);
#endif
#if defined(MIPS32_LE)
@@ -531,7 +529,7 @@ void WebRtcSpl_CrossCorrelationC(int32_t* cross_correlation,
size_t dim_cross_correlation,
int right_shifts,
int step_seq2);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcSpl_CrossCorrelationNeon(int32_t* cross_correlation,
const int16_t* seq1,
const int16_t* seq2,
@@ -698,7 +696,7 @@ int WebRtcSpl_DownsampleFastC(const int16_t* data_in,
size_t coefficients_length,
int factor,
size_t delay);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in,
size_t data_in_length,
int16_t* data_out,
diff --git a/chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c b/chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c
index fdab0383997..c9c4e659cf3 100644
--- a/chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c
+++ b/chromium/third_party/webrtc/common_audio/signal_processing/spl_init.c
@@ -28,8 +28,7 @@ CrossCorrelation WebRtcSpl_CrossCorrelation;
DownsampleFast WebRtcSpl_DownsampleFast;
ScaleAndAddVectorsWithRound WebRtcSpl_ScaleAndAddVectorsWithRound;
-#if (defined(WEBRTC_DETECT_NEON) || !defined(WEBRTC_HAS_NEON)) && \
- !defined(MIPS32_LE)
+#if (!defined(WEBRTC_HAS_NEON)) && !defined(MIPS32_LE)
/* Initialize function pointers to the generic C version. */
static void InitPointersToC() {
WebRtcSpl_MaxAbsValueW16 = WebRtcSpl_MaxAbsValueW16C;
@@ -45,7 +44,7 @@ static void InitPointersToC() {
}
#endif
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
/* Initialize function pointers to the Neon version. */
static void InitPointersToNeon() {
WebRtcSpl_MaxAbsValueW16 = WebRtcSpl_MaxAbsValueW16Neon;
@@ -84,19 +83,13 @@ static void InitPointersToMIPS() {
#endif
static void InitFunctionPointers(void) {
-#if defined(WEBRTC_DETECT_NEON)
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- InitPointersToNeon();
- } else {
- InitPointersToC();
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
InitPointersToNeon();
#elif defined(MIPS32_LE)
InitPointersToMIPS();
#else
InitPointersToC();
-#endif /* WEBRTC_DETECT_NEON */
+#endif /* WEBRTC_HAS_NEON */
}
#if defined(WEBRTC_POSIX)
diff --git a/chromium/third_party/webrtc/common_audio/vad/vad.cc b/chromium/third_party/webrtc/common_audio/vad/vad.cc
index 99d6ffeee66..77de5166db3 100644
--- a/chromium/third_party/webrtc/common_audio/vad/vad.cc
+++ b/chromium/third_party/webrtc/common_audio/vad/vad.cc
@@ -10,6 +10,8 @@
#include "webrtc/common_audio/vad/include/vad.h"
+#include <memory>
+
#include "webrtc/base/checks.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/common_types.h b/chromium/third_party/webrtc/common_types.h
index e171e1bd8af..587b90d753c 100644
--- a/chromium/third_party/webrtc/common_types.h
+++ b/chromium/third_party/webrtc/common_types.h
@@ -296,6 +296,18 @@ class SendSideDelayObserver {
uint32_t ssrc) = 0;
};
+// Callback, used to notify an observer whenever a packet is sent to the
+// transport.
+// TODO(asapersson): This class will remove the need for SendSideDelayObserver.
+// Remove SendSideDelayObserver once possible.
+class SendPacketObserver {
+ public:
+ virtual ~SendPacketObserver() {}
+ virtual void OnSendPacket(uint16_t packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc) = 0;
+};
+
// ==================================================================
// Voice specific types
// ==================================================================
@@ -880,6 +892,11 @@ class StreamDataCountersCallback {
// RTCP mode is described by RFC 5506.
enum class RtcpMode { kOff, kCompound, kReducedSize };
+enum NetworkState {
+ kNetworkUp,
+ kNetworkDown,
+};
+
} // namespace webrtc
#endif // WEBRTC_COMMON_TYPES_H_
diff --git a/chromium/third_party/webrtc/common_video/BUILD.gn b/chromium/third_party/webrtc/common_video/BUILD.gn
index ed877bd344a..7474974c528 100644
--- a/chromium/third_party/webrtc/common_video/BUILD.gn
+++ b/chromium/third_party/webrtc/common_video/BUILD.gn
@@ -17,7 +17,10 @@ config("common_video_config") {
source_set("common_video") {
sources = [
+ "bitrate_adjuster.cc",
"i420_buffer_pool.cc",
+ "include/bitrate_adjuster.h",
+ "include/frame_callback.h",
"include/i420_buffer_pool.h",
"include/incoming_video_stream.h",
"include/video_frame_buffer.h",
diff --git a/chromium/third_party/webrtc/common_video/DEPS b/chromium/third_party/webrtc/common_video/DEPS
index 28059580703..ad03e661693 100644
--- a/chromium/third_party/webrtc/common_video/DEPS
+++ b/chromium/third_party/webrtc/common_video/DEPS
@@ -1,4 +1,5 @@
include_rules = [
"+webrtc/base",
+ "+webrtc/media/base",
"+webrtc/system_wrappers",
]
diff --git a/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster.cc b/chromium/third_party/webrtc/common_video/bitrate_adjuster.cc
index b6828ee6e1e..ada6c5db4b7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster.cc
+++ b/chromium/third_party/webrtc/common_video/bitrate_adjuster.cc
@@ -8,8 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/video_coding/include/bitrate_adjuster.h"
+#include "webrtc/common_video/include/bitrate_adjuster.h"
+#include <algorithm>
#include <cmath>
#include "webrtc/base/checks.h"
diff --git a/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster_unittest.cc b/chromium/third_party/webrtc/common_video/bitrate_adjuster_unittest.cc
index 1d14ee31606..23b278731ef 100644
--- a/chromium/third_party/webrtc/modules/video_coding/bitrate_adjuster_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/bitrate_adjuster_unittest.cc
@@ -10,7 +10,7 @@
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/video_coding/include/bitrate_adjuster.h"
+#include "webrtc/common_video/include/bitrate_adjuster.h"
#include "webrtc/system_wrappers/include/clock.h"
namespace webrtc {
@@ -86,7 +86,7 @@ TEST_F(BitrateAdjusterTest, VaryingBitrates) {
SimulateBitrateBps(actual_bitrate_bps);
VerifyAdjustment();
adjusted_bitrate_bps = adjuster_.GetAdjustedBitrateBps();
- EXPECT_LT(adjusted_bitrate_bps, last_adjusted_bitrate_bps);
+ EXPECT_LE(adjusted_bitrate_bps, last_adjusted_bitrate_bps);
last_adjusted_bitrate_bps = adjusted_bitrate_bps;
// After two cycles we should've stabilized and hit the lower bound.
EXPECT_EQ(GetTargetBitrateBpsPct(kMinAdjustedBitratePct),
diff --git a/chromium/third_party/webrtc/common_video/common_video.gyp b/chromium/third_party/webrtc/common_video/common_video.gyp
index b392bd2aa4d..47ba3e59d71 100644
--- a/chromium/third_party/webrtc/common_video/common_video.gyp
+++ b/chromium/third_party/webrtc/common_video/common_video.gyp
@@ -52,9 +52,12 @@
}],
],
'sources': [
+ 'bitrate_adjuster.cc',
'i420_buffer_pool.cc',
'video_frame.cc',
'incoming_video_stream.cc',
+ 'include/bitrate_adjuster.h',
+ 'include/frame_callback.h',
'include/i420_buffer_pool.h',
'include/incoming_video_stream.h',
'include/video_frame_buffer.h',
diff --git a/chromium/third_party/webrtc/common_video/common_video_unittests.gyp b/chromium/third_party/webrtc/common_video/common_video_unittests.gyp
index 545bfa46fce..8372c0b4a97 100644
--- a/chromium/third_party/webrtc/common_video/common_video_unittests.gyp
+++ b/chromium/third_party/webrtc/common_video/common_video_unittests.gyp
@@ -20,6 +20,7 @@
'<(webrtc_root)/test/test.gyp:video_test_common',
],
'sources': [
+ 'bitrate_adjuster_unittest.cc',
'i420_buffer_pool_unittest.cc',
'i420_video_frame_unittest.cc',
'libyuv/libyuv_unittest.cc',
@@ -54,7 +55,28 @@
],
},
],
- }],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'common_video_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):common_video_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'common_video_unittests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
+ }], # OS=="android"
['test_isolation_mode != "noop"', {
'targets': [
{
diff --git a/chromium/third_party/webrtc/common_video/common_video_unittests_apk.isolate b/chromium/third_party/webrtc/common_video/common_video_unittests_apk.isolate
new file mode 100644
index 00000000000..36514cdb61c
--- /dev/null
+++ b/chromium/third_party/webrtc/common_video/common_video_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'common_video_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_common_video_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/common_video_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_common_video_unittests',
+ 'common_video_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/common_video/i420_buffer_pool.cc b/chromium/third_party/webrtc/common_video/i420_buffer_pool.cc
index 82a10797b35..8896260fc0e 100644
--- a/chromium/third_party/webrtc/common_video/i420_buffer_pool.cc
+++ b/chromium/third_party/webrtc/common_video/i420_buffer_pool.cc
@@ -26,18 +26,28 @@ class PooledI420Buffer : public webrtc::VideoFrameBuffer {
int width() const override { return buffer_->width(); }
int height() const override { return buffer_->height(); }
- const uint8_t* data(webrtc::PlaneType type) const override {
- return buffer_->data(type);
+ const uint8_t* DataY() const override { return buffer_->DataY(); }
+ const uint8_t* DataU() const override { return buffer_->DataU(); }
+ const uint8_t* DataV() const override { return buffer_->DataV(); }
+
+ bool IsMutable() override { return HasOneRef(); }
+ // Make the IsMutable() check here instead of in |buffer_|, because the pool
+ // also has a reference to |buffer_|.
+ uint8_t* MutableDataY() override {
+ RTC_DCHECK(IsMutable());
+ return const_cast<uint8_t*>(buffer_->DataY());
}
- uint8_t* MutableData(webrtc::PlaneType type) override {
- // Make the HasOneRef() check here instead of in |buffer_|, because the pool
- // also has a reference to |buffer_|.
- RTC_DCHECK(HasOneRef());
- return const_cast<uint8_t*>(buffer_->data(type));
+ uint8_t* MutableDataU() override {
+ RTC_DCHECK(IsMutable());
+ return const_cast<uint8_t*>(buffer_->DataU());
}
- int stride(webrtc::PlaneType type) const override {
- return buffer_->stride(type);
+ uint8_t* MutableDataV() override {
+ RTC_DCHECK(IsMutable());
+ return const_cast<uint8_t*>(buffer_->DataV());
}
+ int StrideY() const override { return buffer_->StrideY(); }
+ int StrideU() const override { return buffer_->StrideU(); }
+ int StrideV() const override { return buffer_->StrideV(); }
void* native_handle() const override { return nullptr; }
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override {
@@ -80,7 +90,7 @@ rtc::scoped_refptr<VideoFrameBuffer> I420BufferPool::CreateBuffer(int width,
// CreateBuffer that has not been released yet. If the ref count is 1
// (HasOneRef), then the list we are looping over holds the only reference
// and it's safe to reuse.
- if (buffer->HasOneRef())
+ if (buffer->IsMutable())
return new rtc::RefCountedObject<PooledI420Buffer>(buffer);
}
// Allocate new buffer.
diff --git a/chromium/third_party/webrtc/common_video/i420_buffer_pool_unittest.cc b/chromium/third_party/webrtc/common_video/i420_buffer_pool_unittest.cc
index b030ee774aa..2110066a928 100644
--- a/chromium/third_party/webrtc/common_video/i420_buffer_pool_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/i420_buffer_pool_unittest.cc
@@ -21,16 +21,16 @@ TEST(TestI420BufferPool, SimpleFrameReuse) {
EXPECT_EQ(16, buffer->width());
EXPECT_EQ(16, buffer->height());
// Extract non-refcounted pointers for testing.
- const uint8_t* y_ptr = buffer->data(kYPlane);
- const uint8_t* u_ptr = buffer->data(kUPlane);
- const uint8_t* v_ptr = buffer->data(kVPlane);
+ const uint8_t* y_ptr = buffer->DataY();
+ const uint8_t* u_ptr = buffer->DataU();
+ const uint8_t* v_ptr = buffer->DataV();
// Release buffer so that it is returned to the pool.
buffer = nullptr;
// Check that the memory is resued.
buffer = pool.CreateBuffer(16, 16);
- EXPECT_EQ(y_ptr, buffer->data(kYPlane));
- EXPECT_EQ(u_ptr, buffer->data(kUPlane));
- EXPECT_EQ(v_ptr, buffer->data(kVPlane));
+ EXPECT_EQ(y_ptr, buffer->DataY());
+ EXPECT_EQ(u_ptr, buffer->DataU());
+ EXPECT_EQ(v_ptr, buffer->DataV());
EXPECT_EQ(16, buffer->width());
EXPECT_EQ(16, buffer->height());
}
@@ -39,23 +39,23 @@ TEST(TestI420BufferPool, FailToReuse) {
I420BufferPool pool;
rtc::scoped_refptr<VideoFrameBuffer> buffer = pool.CreateBuffer(16, 16);
// Extract non-refcounted pointers for testing.
- const uint8_t* u_ptr = buffer->data(kUPlane);
- const uint8_t* v_ptr = buffer->data(kVPlane);
+ const uint8_t* u_ptr = buffer->DataU();
+ const uint8_t* v_ptr = buffer->DataV();
// Release buffer so that it is returned to the pool.
buffer = nullptr;
// Check that the pool doesn't try to reuse buffers of incorrect size.
buffer = pool.CreateBuffer(32, 16);
EXPECT_EQ(32, buffer->width());
EXPECT_EQ(16, buffer->height());
- EXPECT_NE(u_ptr, buffer->data(kUPlane));
- EXPECT_NE(v_ptr, buffer->data(kVPlane));
+ EXPECT_NE(u_ptr, buffer->DataU());
+ EXPECT_NE(v_ptr, buffer->DataV());
}
TEST(TestI420BufferPool, ExclusiveOwner) {
// Check that created buffers are exclusive so that they can be written to.
I420BufferPool pool;
rtc::scoped_refptr<VideoFrameBuffer> buffer = pool.CreateBuffer(16, 16);
- EXPECT_TRUE(buffer->HasOneRef());
+ EXPECT_TRUE(buffer->IsMutable());
}
TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) {
@@ -64,11 +64,11 @@ TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) {
I420BufferPool pool;
buffer = pool.CreateBuffer(16, 16);
}
- EXPECT_TRUE(buffer->HasOneRef());
+ EXPECT_TRUE(buffer->IsMutable());
EXPECT_EQ(16, buffer->width());
EXPECT_EQ(16, buffer->height());
// Try to trigger use-after-free errors by writing to y-plane.
- memset(buffer->MutableData(kYPlane), 0xA5, 16 * buffer->stride(kYPlane));
+ memset(buffer->MutableDataY(), 0xA5, 16 * buffer->StrideY());
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc b/chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc
index 9b2bdd762dc..c942e4a8009 100644
--- a/chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/i420_video_frame_unittest.cc
@@ -51,9 +51,9 @@ TEST(TestVideoFrame, SizeAllocation) {
VideoFrame frame;
frame. CreateEmptyFrame(10, 10, 12, 14, 220);
int height = frame.height();
- int stride_y = frame.stride(kYPlane);
- int stride_u = frame.stride(kUPlane);
- int stride_v = frame.stride(kVPlane);
+ int stride_y = frame.video_frame_buffer()->StrideY();
+ int stride_u = frame.video_frame_buffer()->StrideU();
+ int stride_v = frame.video_frame_buffer()->StrideV();
// Verify that allocated size was computed correctly.
EXPECT_EQ(ExpectedSize(stride_y, height, kYPlane),
frame.allocated_size(kYPlane));
@@ -101,9 +101,12 @@ TEST(TestVideoFrame, CopyFrame) {
// Frame of larger dimensions.
small_frame.CreateEmptyFrame(width, height,
stride_y, stride_u, stride_v);
- memset(small_frame.buffer(kYPlane), 1, small_frame.allocated_size(kYPlane));
- memset(small_frame.buffer(kUPlane), 2, small_frame.allocated_size(kUPlane));
- memset(small_frame.buffer(kVPlane), 3, small_frame.allocated_size(kVPlane));
+ memset(small_frame.video_frame_buffer()->MutableDataY(), 1,
+ small_frame.allocated_size(kYPlane));
+ memset(small_frame.video_frame_buffer()->MutableDataU(), 2,
+ small_frame.allocated_size(kUPlane));
+ memset(small_frame.video_frame_buffer()->MutableDataV(), 3,
+ small_frame.allocated_size(kVPlane));
big_frame.CopyFrame(small_frame);
EXPECT_TRUE(test::FramesEqual(small_frame, big_frame));
}
@@ -141,12 +144,12 @@ TEST(TestVideoFrame, ShallowCopy) {
const VideoFrame* const_frame1_ptr = &frame1;
const VideoFrame* const_frame2_ptr = &frame2;
- EXPECT_TRUE(const_frame1_ptr->buffer(kYPlane) ==
- const_frame2_ptr->buffer(kYPlane));
- EXPECT_TRUE(const_frame1_ptr->buffer(kUPlane) ==
- const_frame2_ptr->buffer(kUPlane));
- EXPECT_TRUE(const_frame1_ptr->buffer(kVPlane) ==
- const_frame2_ptr->buffer(kVPlane));
+ EXPECT_TRUE(const_frame1_ptr->video_frame_buffer()->DataY() ==
+ const_frame2_ptr->video_frame_buffer()->DataY());
+ EXPECT_TRUE(const_frame1_ptr->video_frame_buffer()->DataU() ==
+ const_frame2_ptr->video_frame_buffer()->DataU());
+ EXPECT_TRUE(const_frame1_ptr->video_frame_buffer()->DataV() ==
+ const_frame2_ptr->video_frame_buffer()->DataV());
EXPECT_EQ(frame2.timestamp(), frame1.timestamp());
EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms());
@@ -164,21 +167,6 @@ TEST(TestVideoFrame, ShallowCopy) {
EXPECT_NE(frame2.rotation(), frame1.rotation());
}
-TEST(TestVideoFrame, Reset) {
- VideoFrame frame;
- frame.CreateEmptyFrame(5, 5, 5, 5, 5);
- frame.set_ntp_time_ms(1);
- frame.set_timestamp(2);
- frame.set_render_time_ms(3);
- ASSERT_TRUE(frame.video_frame_buffer() != NULL);
-
- frame.Reset();
- EXPECT_EQ(0u, frame.ntp_time_ms());
- EXPECT_EQ(0u, frame.render_time_ms());
- EXPECT_EQ(0u, frame.timestamp());
- EXPECT_TRUE(frame.video_frame_buffer() == NULL);
-}
-
TEST(TestVideoFrame, CopyBuffer) {
VideoFrame frame1, frame2;
int width = 15;
@@ -199,12 +187,12 @@ TEST(TestVideoFrame, CopyBuffer) {
width, height, stride_y, stride_uv, stride_uv,
kVideoRotation_0);
// Expect exactly the same pixel data.
- EXPECT_TRUE(
- test::EqualPlane(buffer_y, frame2.buffer(kYPlane), stride_y, 15, 15));
- EXPECT_TRUE(
- test::EqualPlane(buffer_u, frame2.buffer(kUPlane), stride_uv, 8, 8));
- EXPECT_TRUE(
- test::EqualPlane(buffer_v, frame2.buffer(kVPlane), stride_uv, 8, 8));
+ EXPECT_TRUE(test::EqualPlane(buffer_y, frame2.video_frame_buffer()->DataY(),
+ stride_y, 15, 15));
+ EXPECT_TRUE(test::EqualPlane(buffer_u, frame2.video_frame_buffer()->DataU(),
+ stride_uv, 8, 8));
+ EXPECT_TRUE(test::EqualPlane(buffer_v, frame2.video_frame_buffer()->DataV(),
+ stride_uv, 8, 8));
// Compare size.
EXPECT_LE(kSizeY, frame2.allocated_size(kYPlane));
@@ -215,27 +203,27 @@ TEST(TestVideoFrame, CopyBuffer) {
TEST(TestVideoFrame, ReuseAllocation) {
VideoFrame frame;
frame.CreateEmptyFrame(640, 320, 640, 320, 320);
- const uint8_t* y = frame.buffer(kYPlane);
- const uint8_t* u = frame.buffer(kUPlane);
- const uint8_t* v = frame.buffer(kVPlane);
+ const uint8_t* y = frame.video_frame_buffer()->DataY();
+ const uint8_t* u = frame.video_frame_buffer()->DataU();
+ const uint8_t* v = frame.video_frame_buffer()->DataV();
frame.CreateEmptyFrame(640, 320, 640, 320, 320);
- EXPECT_EQ(y, frame.buffer(kYPlane));
- EXPECT_EQ(u, frame.buffer(kUPlane));
- EXPECT_EQ(v, frame.buffer(kVPlane));
+ EXPECT_EQ(y, frame.video_frame_buffer()->DataY());
+ EXPECT_EQ(u, frame.video_frame_buffer()->DataU());
+ EXPECT_EQ(v, frame.video_frame_buffer()->DataV());
}
TEST(TestVideoFrame, FailToReuseAllocation) {
VideoFrame frame1;
frame1.CreateEmptyFrame(640, 320, 640, 320, 320);
- const uint8_t* y = frame1.buffer(kYPlane);
- const uint8_t* u = frame1.buffer(kUPlane);
- const uint8_t* v = frame1.buffer(kVPlane);
+ const uint8_t* y = frame1.video_frame_buffer()->DataY();
+ const uint8_t* u = frame1.video_frame_buffer()->DataU();
+ const uint8_t* v = frame1.video_frame_buffer()->DataV();
// Make a shallow copy of |frame1|.
VideoFrame frame2(frame1.video_frame_buffer(), 0, 0, kVideoRotation_0);
frame1.CreateEmptyFrame(640, 320, 640, 320, 320);
- EXPECT_NE(y, frame1.buffer(kYPlane));
- EXPECT_NE(u, frame1.buffer(kUPlane));
- EXPECT_NE(v, frame1.buffer(kVPlane));
+ EXPECT_NE(y, frame1.video_frame_buffer()->DataY());
+ EXPECT_NE(u, frame1.video_frame_buffer()->DataU());
+ EXPECT_NE(v, frame1.video_frame_buffer()->DataV());
}
TEST(TestVideoFrame, TextureInitialValues) {
@@ -246,7 +234,8 @@ TEST(TestVideoFrame, TextureInitialValues) {
EXPECT_EQ(480, frame.height());
EXPECT_EQ(100u, frame.timestamp());
EXPECT_EQ(10, frame.render_time_ms());
- EXPECT_EQ(handle, frame.native_handle());
+ ASSERT_TRUE(frame.video_frame_buffer() != nullptr);
+ EXPECT_EQ(handle, frame.video_frame_buffer()->native_handle());
frame.set_timestamp(200);
EXPECT_EQ(200u, frame.timestamp());
@@ -257,9 +246,9 @@ TEST(TestVideoFrame, TextureInitialValues) {
TEST(TestI420FrameBuffer, Copy) {
rtc::scoped_refptr<I420Buffer> buf1(
new rtc::RefCountedObject<I420Buffer>(20, 10));
- memset(buf1->MutableData(kYPlane), 1, 200);
- memset(buf1->MutableData(kUPlane), 2, 50);
- memset(buf1->MutableData(kVPlane), 3, 50);
+ memset(buf1->MutableDataY(), 1, 200);
+ memset(buf1->MutableDataU(), 2, 50);
+ memset(buf1->MutableDataV(), 3, 50);
rtc::scoped_refptr<I420Buffer> buf2 = I420Buffer::Copy(buf1);
EXPECT_TRUE(test::FrameBufsEqual(buf1, buf2));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/include/bitrate_adjuster.h b/chromium/third_party/webrtc/common_video/include/bitrate_adjuster.h
index ec58445ae27..1f2474f88b1 100644
--- a/chromium/third_party/webrtc/modules/video_coding/include/bitrate_adjuster.h
+++ b/chromium/third_party/webrtc/common_video/include/bitrate_adjuster.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_BITRATE_ADJUSTER_H_
-#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_BITRATE_ADJUSTER_H_
+#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_BITRATE_ADJUSTER_H_
+#define WEBRTC_COMMON_VIDEO_INCLUDE_BITRATE_ADJUSTER_H_
#include <functional>
@@ -86,4 +86,4 @@ class BitrateAdjuster {
} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_BITRATE_ADJUSTER_H_
+#endif // WEBRTC_COMMON_VIDEO_INCLUDE_BITRATE_ADJUSTER_H_
diff --git a/chromium/third_party/webrtc/frame_callback.h b/chromium/third_party/webrtc/common_video/include/frame_callback.h
index 2bae25036d9..81737c6b962 100644
--- a/chromium/third_party/webrtc/frame_callback.h
+++ b/chromium/third_party/webrtc/common_video/include/frame_callback.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_FRAME_CALLBACK_H_
-#define WEBRTC_FRAME_CALLBACK_H_
+#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_FRAME_CALLBACK_H_
+#define WEBRTC_COMMON_VIDEO_INCLUDE_FRAME_CALLBACK_H_
#include <stddef.h>
#include <stdint.h>
@@ -52,4 +52,4 @@ class EncodedFrameObserver {
} // namespace webrtc
-#endif // WEBRTC_FRAME_CALLBACK_H_
+#endif // WEBRTC_COMMON_VIDEO_INCLUDE_FRAME_CALLBACK_H_
diff --git a/chromium/third_party/webrtc/common_video/include/incoming_video_stream.h b/chromium/third_party/webrtc/common_video/include/incoming_video_stream.h
index b6ab917bc18..32e3184ce97 100644
--- a/chromium/third_party/webrtc/common_video/include/incoming_video_stream.h
+++ b/chromium/third_party/webrtc/common_video/include/incoming_video_stream.h
@@ -17,34 +17,25 @@
#include "webrtc/base/platform_thread.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_video/video_render_frames.h"
+#include "webrtc/media/base/videosinkinterface.h"
namespace webrtc {
class EventTimerWrapper;
-class VideoRenderCallback {
- public:
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) = 0;
-
- protected:
- virtual ~VideoRenderCallback() {}
-};
-class IncomingVideoStream : public VideoRenderCallback {
+class IncomingVideoStream : public rtc::VideoSinkInterface<VideoFrame> {
public:
- IncomingVideoStream(uint32_t stream_id, bool disable_prerenderer_smoothing);
+ explicit IncomingVideoStream(bool disable_prerenderer_smoothing);
~IncomingVideoStream();
- // Get callback to deliver frames to the module.
- VideoRenderCallback* ModuleCallback();
- virtual int32_t RenderFrame(const uint32_t stream_id,
- const VideoFrame& video_frame);
+ // Overrides VideoSinkInterface
+ void OnFrame(const VideoFrame& video_frame) override;
// Set callback to the platform dependent code.
- void SetRenderCallback(VideoRenderCallback* render_callback);
+ void SetRenderCallback(rtc::VideoSinkInterface<VideoFrame>* render_callback);
// Callback for file recording, snapshot, ...
- void SetExternalCallback(VideoRenderCallback* render_object);
+ void SetExternalCallback(rtc::VideoSinkInterface<VideoFrame>* render_object);
// Start/Stop.
int32_t Start();
@@ -54,14 +45,8 @@ class IncomingVideoStream : public VideoRenderCallback {
int32_t Reset();
// Properties.
- uint32_t StreamId() const;
uint32_t IncomingRate() const;
- void SetStartImage(const VideoFrame& video_frame);
-
- void SetTimeoutImage(const VideoFrame& video_frame,
- const uint32_t timeout);
-
int32_t SetExpectedRenderDelay(int32_t delay_ms);
protected:
@@ -75,7 +60,6 @@ class IncomingVideoStream : public VideoRenderCallback {
void DeliverFrame(const VideoFrame& video_frame);
- uint32_t const stream_id_;
const bool disable_prerenderer_smoothing_;
// Critsects in allowed to enter order.
rtc::CriticalSection stream_critsect_;
@@ -88,19 +72,16 @@ class IncomingVideoStream : public VideoRenderCallback {
std::unique_ptr<EventTimerWrapper> deliver_buffer_event_;
bool running_ GUARDED_BY(stream_critsect_);
- VideoRenderCallback* external_callback_ GUARDED_BY(thread_critsect_);
- VideoRenderCallback* render_callback_ GUARDED_BY(thread_critsect_);
+ rtc::VideoSinkInterface<VideoFrame>* external_callback_
+ GUARDED_BY(thread_critsect_);
+ rtc::VideoSinkInterface<VideoFrame>* render_callback_
+ GUARDED_BY(thread_critsect_);
const std::unique_ptr<VideoRenderFrames> render_buffers_
GUARDED_BY(buffer_critsect_);
uint32_t incoming_rate_ GUARDED_BY(stream_critsect_);
int64_t last_rate_calculation_time_ms_ GUARDED_BY(stream_critsect_);
uint16_t num_frames_since_last_calculation_ GUARDED_BY(stream_critsect_);
- int64_t last_render_time_ms_ GUARDED_BY(thread_critsect_);
- VideoFrame temp_frame_ GUARDED_BY(thread_critsect_);
- VideoFrame start_image_ GUARDED_BY(thread_critsect_);
- VideoFrame timeout_image_ GUARDED_BY(thread_critsect_);
- uint32_t timeout_time_ GUARDED_BY(thread_critsect_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_video/include/video_frame_buffer.h b/chromium/third_party/webrtc/common_video/include/video_frame_buffer.h
index 9cf57a43591..6f082dee9ba 100644
--- a/chromium/third_party/webrtc/common_video/include/video_frame_buffer.h
+++ b/chromium/third_party/webrtc/common_video/include/video_frame_buffer.h
@@ -33,7 +33,16 @@ enum PlaneType {
// not contain any frame metadata such as rotation, timestamp, pixel_width, etc.
class VideoFrameBuffer : public rtc::RefCountInterface {
public:
- // Returns true if this buffer has a single exclusive owner.
+ // Returns true if the caller is exclusive owner, and allowed to
+ // call MutableData.
+
+ // TODO(nisse): Delete default implementation when subclasses in
+ // Chrome are updated.
+ virtual bool IsMutable() { return false; }
+
+ // Underlying refcount access, used to implement IsMutable.
+ // TODO(nisse): Demote to protected, as soon as Chrome is changed to
+ // use IsMutable.
virtual bool HasOneRef() const = 0;
// The resolution of the frame in pixels. For formats where some planes are
@@ -41,16 +50,36 @@ class VideoFrameBuffer : public rtc::RefCountInterface {
virtual int width() const = 0;
virtual int height() const = 0;
+ // TODO(nisse): For the transition, we use default implementations
+ // of the stride and data methods where the new methods calls the
+ // old method, and the old method calls the new methods. Subclasses
+ // must override either the new methods or the old method, to break
+ // infinite recursion. And similarly for the strides. When
+ // applications, in particular Chrome, are updated, delete the old
+ // method and delete the default implementation of the new methods.
+
// Returns pointer to the pixel data for a given plane. The memory is owned by
// the VideoFrameBuffer object and must not be freed by the caller.
- virtual const uint8_t* data(PlaneType type) const = 0;
-
- // Non-const data access is disallowed by default. You need to make sure you
- // have exclusive access and a writable buffer before calling this function.
+ virtual const uint8_t* DataY() const;
+ virtual const uint8_t* DataU() const;
+ virtual const uint8_t* DataV() const;
+ // Deprecated method.
+ // TODO(nisse): Delete after all users are updated.
+ virtual const uint8_t* data(PlaneType type) const;
+
+ // Non-const data access is allowed only if HasOneRef() is true.
+ virtual uint8_t* MutableDataY();
+ virtual uint8_t* MutableDataU();
+ virtual uint8_t* MutableDataV();
+ // Deprecated method. TODO(nisse): Delete after all users are updated.
virtual uint8_t* MutableData(PlaneType type);
// Returns the number of bytes between successive rows for a given plane.
- virtual int stride(PlaneType type) const = 0;
+ virtual int StrideY() const;
+ virtual int StrideU() const;
+ virtual int StrideV() const;
+ // Deprecated method. TODO(nisse): Delete after all users are updated.
+ virtual int stride(PlaneType type) const;
// Return the handle of the underlying video frame. This is used when the
// frame is backed by a texture.
@@ -73,11 +102,19 @@ class I420Buffer : public VideoFrameBuffer {
int width() const override;
int height() const override;
- const uint8_t* data(PlaneType type) const override;
- // Non-const data access is only allowed if HasOneRef() is true to protect
+ const uint8_t* DataY() const override;
+ const uint8_t* DataU() const override;
+ const uint8_t* DataV() const override;
+ // Non-const data access is only allowed if IsMutable() is true, to protect
// against unexpected overwrites.
- uint8_t* MutableData(PlaneType type) override;
- int stride(PlaneType type) const override;
+ bool IsMutable() override;
+ uint8_t* MutableDataY() override;
+ uint8_t* MutableDataU() override;
+ uint8_t* MutableDataV() override;
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
+
void* native_handle() const override;
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
@@ -107,9 +144,15 @@ class NativeHandleBuffer : public VideoFrameBuffer {
int width() const override;
int height() const override;
- const uint8_t* data(PlaneType type) const override;
- int stride(PlaneType type) const override;
+ const uint8_t* DataY() const override;
+ const uint8_t* DataU() const override;
+ const uint8_t* DataV() const override;
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
+
void* native_handle() const override;
+ bool IsMutable() override;
protected:
void* native_handle_;
@@ -131,9 +174,15 @@ class WrappedI420Buffer : public webrtc::VideoFrameBuffer {
int width() const override;
int height() const override;
- const uint8_t* data(PlaneType type) const override;
+ bool IsMutable() override;
+
+ const uint8_t* DataY() const override;
+ const uint8_t* DataU() const override;
+ const uint8_t* DataV() const override;
+ int StrideY() const override;
+ int StrideU() const override;
+ int StrideV() const override;
- int stride(PlaneType type) const override;
void* native_handle() const override;
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
diff --git a/chromium/third_party/webrtc/common_video/incoming_video_stream.cc b/chromium/third_party/webrtc/common_video/incoming_video_stream.cc
index 0e048a6cc00..5fc8eae0bd5 100644
--- a/chromium/third_party/webrtc/common_video/incoming_video_stream.cc
+++ b/chromium/third_party/webrtc/common_video/incoming_video_stream.cc
@@ -22,19 +22,17 @@
#endif
#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/common_video/video_render_frames.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
-IncomingVideoStream::IncomingVideoStream(uint32_t stream_id,
- bool disable_prerenderer_smoothing)
- : stream_id_(stream_id),
- disable_prerenderer_smoothing_(disable_prerenderer_smoothing),
+IncomingVideoStream::IncomingVideoStream(bool disable_prerenderer_smoothing)
+ : disable_prerenderer_smoothing_(disable_prerenderer_smoothing),
incoming_render_thread_(),
deliver_buffer_event_(EventTimerWrapper::Create()),
running_(false),
@@ -43,32 +41,22 @@ IncomingVideoStream::IncomingVideoStream(uint32_t stream_id,
render_buffers_(new VideoRenderFrames()),
incoming_rate_(0),
last_rate_calculation_time_ms_(0),
- num_frames_since_last_calculation_(0),
- last_render_time_ms_(0),
- temp_frame_(),
- start_image_(),
- timeout_image_(),
- timeout_time_() {}
+ num_frames_since_last_calculation_(0) {}
IncomingVideoStream::~IncomingVideoStream() {
Stop();
}
-VideoRenderCallback* IncomingVideoStream::ModuleCallback() {
- return this;
-}
-
-int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
- const VideoFrame& video_frame) {
+void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) {
rtc::CritScope csS(&stream_critsect_);
if (!running_) {
- return -1;
+ return;
}
// Rate statistics.
num_frames_since_last_calculation_++;
- int64_t now_ms = TickTime::MillisecondTimestamp();
+ int64_t now_ms = rtc::TimeMillis();
if (now_ms >= last_rate_calculation_time_ms_ + kFrameRatePeriodMs) {
incoming_rate_ =
static_cast<uint32_t>(1000 * num_frames_since_last_calculation_ /
@@ -86,23 +74,10 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
deliver_buffer_event_->Set();
}
}
- return 0;
-}
-
-void IncomingVideoStream::SetStartImage(const VideoFrame& video_frame) {
- rtc::CritScope csS(&thread_critsect_);
- start_image_.CopyFrame(video_frame);
-}
-
-void IncomingVideoStream::SetTimeoutImage(const VideoFrame& video_frame,
- const uint32_t timeout) {
- rtc::CritScope csS(&thread_critsect_);
- timeout_time_ = timeout;
- timeout_image_.CopyFrame(video_frame);
}
void IncomingVideoStream::SetRenderCallback(
- VideoRenderCallback* render_callback) {
+ rtc::VideoSinkInterface<VideoFrame>* render_callback) {
rtc::CritScope cs(&thread_critsect_);
render_callback_ = render_callback;
}
@@ -118,7 +93,7 @@ int32_t IncomingVideoStream::SetExpectedRenderDelay(
}
void IncomingVideoStream::SetExternalCallback(
- VideoRenderCallback* external_callback) {
+ rtc::VideoSinkInterface<VideoFrame>* external_callback) {
rtc::CritScope cs(&thread_critsect_);
external_callback_ = external_callback;
}
@@ -182,10 +157,6 @@ int32_t IncomingVideoStream::Reset() {
return 0;
}
-uint32_t IncomingVideoStream::StreamId() const {
- return stream_id_;
-}
-
uint32_t IncomingVideoStream::IncomingRate() const {
rtc::CritScope cs(&stream_critsect_);
return incoming_rate_;
@@ -226,33 +197,16 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
void IncomingVideoStream::DeliverFrame(const VideoFrame& video_frame) {
rtc::CritScope cs(&thread_critsect_);
if (video_frame.IsZeroSize()) {
- if (render_callback_) {
- if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
- // We have not rendered anything and have a start image.
- temp_frame_.CopyFrame(start_image_);
- render_callback_->RenderFrame(stream_id_, temp_frame_);
- } else if (!timeout_image_.IsZeroSize() &&
- last_render_time_ms_ + timeout_time_ <
- TickTime::MillisecondTimestamp()) {
- // Render a timeout image.
- temp_frame_.CopyFrame(timeout_image_);
- render_callback_->RenderFrame(stream_id_, temp_frame_);
- }
- }
-
// No frame.
return;
}
// Send frame for rendering.
if (external_callback_) {
- external_callback_->RenderFrame(stream_id_, video_frame);
+ external_callback_->OnFrame(video_frame);
} else if (render_callback_) {
- render_callback_->RenderFrame(stream_id_, video_frame);
+ render_callback_->OnFrame(video_frame);
}
-
- // We're done with this frame.
- last_render_time_ms_ = video_frame.render_time_ms();
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc b/chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc
index 9f92b8bb15c..e45b5334704 100644
--- a/chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/libyuv/libyuv_unittest.cc
@@ -15,67 +15,11 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/video_frame.h"
namespace webrtc {
-int PrintBuffer(const uint8_t* buffer, int width, int height, int stride) {
- if (buffer == NULL)
- return -1;
- int k;
- const uint8_t* tmp_buffer = buffer;
- for (int i = 0; i < height; i++) {
- k = 0;
- for (int j = 0; j < width; j++) {
- printf("%d ", tmp_buffer[k++]);
- }
- tmp_buffer += stride;
- printf(" \n");
- }
- printf(" \n");
- return 0;
-}
-
-int PrintFrame(const VideoFrame* frame, const char* str) {
- if (frame == NULL)
- return -1;
- printf("%s %dx%d \n", str, frame->width(), frame->height());
-
- int ret = 0;
- for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
- PlaneType plane_type = static_cast<PlaneType>(plane_num);
- int width = (plane_num ? (frame->width() + 1) / 2 : frame->width());
- int height = (plane_num ? (frame->height() + 1) / 2 : frame->height());
- ret += PrintBuffer(frame->buffer(plane_type), width, height,
- frame->stride(plane_type));
- }
- return ret;
-}
-
-
-// Create an image from on a YUV frame. Every plane value starts with a start
-// value, and will be set to increasing values.
-void CreateImage(VideoFrame* frame, int plane_offset[kNumOfPlanes]) {
- if (frame == NULL)
- return;
- for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
- int width = (plane_num != kYPlane ? (frame->width() + 1) / 2 :
- frame->width());
- int height = (plane_num != kYPlane ? (frame->height() + 1) / 2 :
- frame->height());
- PlaneType plane_type = static_cast<PlaneType>(plane_num);
- uint8_t *data = frame->buffer(plane_type);
- for (int i = 0; i < height; i++) {
- for (int j = 0; j < width; j++) {
- data[j] = static_cast<uint8_t>(i + plane_offset[plane_num] + j);
- }
- data += frame->stride(plane_type);
- }
- }
-}
-
class TestLibYuv : public ::testing::Test {
protected:
TestLibYuv();
diff --git a/chromium/third_party/webrtc/common_video/libyuv/scaler.cc b/chromium/third_party/webrtc/common_video/libyuv/scaler.cc
index c6adbf95078..6e683c003b4 100644
--- a/chromium/third_party/webrtc/common_video/libyuv/scaler.cc
+++ b/chromium/third_party/webrtc/common_video/libyuv/scaler.cc
@@ -47,6 +47,7 @@ int Scaler::Set(int src_width, int src_height,
return 0;
}
+// TODO(nisse): Should work with VideoFrameBuffer instead.
int Scaler::Scale(const VideoFrame& src_frame, VideoFrame* dst_frame) {
assert(dst_frame);
if (src_frame.IsZeroSize())
@@ -69,31 +70,35 @@ int Scaler::Scale(const VideoFrame& src_frame, VideoFrame* dst_frame) {
const int src_offset_x = ((src_width_ - cropped_src_width) / 2) & ~1;
const int src_offset_y = ((src_height_ - cropped_src_height) / 2) & ~1;
- const uint8_t* y_ptr = src_frame.buffer(kYPlane) +
- src_offset_y * src_frame.stride(kYPlane) +
- src_offset_x;
- const uint8_t* u_ptr = src_frame.buffer(kUPlane) +
- src_offset_y / 2 * src_frame.stride(kUPlane) +
- src_offset_x / 2;
- const uint8_t* v_ptr = src_frame.buffer(kVPlane) +
- src_offset_y / 2 * src_frame.stride(kVPlane) +
- src_offset_x / 2;
-
- return libyuv::I420Scale(y_ptr,
- src_frame.stride(kYPlane),
- u_ptr,
- src_frame.stride(kUPlane),
- v_ptr,
- src_frame.stride(kVPlane),
- cropped_src_width, cropped_src_height,
- dst_frame->buffer(kYPlane),
- dst_frame->stride(kYPlane),
- dst_frame->buffer(kUPlane),
- dst_frame->stride(kUPlane),
- dst_frame->buffer(kVPlane),
- dst_frame->stride(kVPlane),
- dst_width_, dst_height_,
- libyuv::FilterMode(method_));
+ const uint8_t* y_ptr =
+ src_frame.video_frame_buffer()->DataY() +
+ src_offset_y * src_frame.video_frame_buffer()->StrideY() +
+ src_offset_x;
+ const uint8_t* u_ptr =
+ src_frame.video_frame_buffer()->DataU() +
+ src_offset_y / 2 * src_frame.video_frame_buffer()->StrideU() +
+ src_offset_x / 2;
+ const uint8_t* v_ptr =
+ src_frame.video_frame_buffer()->DataV() +
+ src_offset_y / 2 * src_frame.video_frame_buffer()->StrideV() +
+ src_offset_x / 2;
+
+ return libyuv::I420Scale(
+ y_ptr,
+ src_frame.video_frame_buffer()->StrideY(),
+ u_ptr,
+ src_frame.video_frame_buffer()->StrideU(),
+ v_ptr,
+ src_frame.video_frame_buffer()->StrideV(),
+ cropped_src_width, cropped_src_height,
+ dst_frame->video_frame_buffer()->MutableDataY(),
+ dst_frame->video_frame_buffer()->StrideY(),
+ dst_frame->video_frame_buffer()->MutableDataU(),
+ dst_frame->video_frame_buffer()->StrideU(),
+ dst_frame->video_frame_buffer()->MutableDataV(),
+ dst_frame->video_frame_buffer()->StrideV(),
+ dst_width_, dst_height_,
+ libyuv::FilterMode(method_));
}
bool Scaler::SupportedVideoType(VideoType src_video_type,
diff --git a/chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc b/chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc
index 9ba1b9d94f3..29a0a7403d6 100644
--- a/chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc
+++ b/chromium/third_party/webrtc/common_video/libyuv/scaler_unittest.cc
@@ -14,8 +14,8 @@
#include <memory>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/scaler.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@@ -378,9 +378,9 @@ void TestScaler::ScaleSequence(ScaleMethod method,
(src_width + 1) / 2,
kVideoRotation_0);
- start_clock = TickTime::MillisecondTimestamp();
+ start_clock = rtc::TimeMillis();
EXPECT_EQ(0, test_scaler_.Scale(input_frame, &output_frame));
- total_clock += TickTime::MillisecondTimestamp() - start_clock;
+ total_clock += rtc::TimeMillis() - start_clock;
if (PrintVideoFrame(output_frame, output_file) < 0) {
return;
}
diff --git a/chromium/third_party/webrtc/common_video/libyuv/webrtc_libyuv.cc b/chromium/third_party/webrtc/common_video/libyuv/webrtc_libyuv.cc
index 48f5c2036ba..7f5e3300c13 100644
--- a/chromium/third_party/webrtc/common_video/libyuv/webrtc_libyuv.cc
+++ b/chromium/third_party/webrtc/common_video/libyuv/webrtc_libyuv.cc
@@ -102,23 +102,42 @@ size_t CalcBufferSize(VideoType type, int width, int height) {
return buffer_size;
}
+static int PrintPlane(const uint8_t* buf,
+ int width,
+ int height,
+ int stride,
+ FILE* file) {
+ for (int i = 0; i < height; i++, buf += stride) {
+ if (fwrite(buf, 1, width, file) != static_cast<unsigned int>(width))
+ return -1;
+ }
+ return 0;
+}
+
+// TODO(nisse): Belongs with the test code?
int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
if (file == NULL)
return -1;
if (frame.IsZeroSize())
return -1;
- for (int planeNum = 0; planeNum < kNumOfPlanes; ++planeNum) {
- int width = (planeNum ? (frame.width() + 1) / 2 : frame.width());
- int height = (planeNum ? (frame.height() + 1) / 2 : frame.height());
- PlaneType plane_type = static_cast<PlaneType>(planeNum);
- const uint8_t* plane_buffer = frame.buffer(plane_type);
- for (int y = 0; y < height; y++) {
- if (fwrite(plane_buffer, 1, width, file) !=
- static_cast<unsigned int>(width)) {
- return -1;
- }
- plane_buffer += frame.stride(plane_type);
- }
+ int width = frame.video_frame_buffer()->width();
+ int height = frame.video_frame_buffer()->height();
+ int chroma_width = (width + 1) / 2;
+ int chroma_height = (height + 1) / 2;
+
+ if (PrintPlane(frame.video_frame_buffer()->DataY(), width, height,
+ frame.video_frame_buffer()->StrideY(), file) < 0) {
+ return -1;
+ }
+ if (PrintPlane(frame.video_frame_buffer()->DataU(),
+ chroma_width, chroma_height,
+ frame.video_frame_buffer()->StrideU(), file) < 0) {
+ return -1;
+ }
+ if (PrintPlane(frame.video_frame_buffer()->DataV(),
+ chroma_width, chroma_height,
+ frame.video_frame_buffer()->StrideV(), file) < 0) {
+ return -1;
}
return 0;
}
@@ -133,22 +152,23 @@ int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) {
return -1;
}
- int pos = 0;
- uint8_t* buffer_ptr = buffer;
+ int width = input_frame.video_frame_buffer()->width();
+ int height = input_frame.video_frame_buffer()->height();
+ int chroma_width = (width + 1) / 2;
+ int chroma_height = (height + 1) / 2;
+
+ libyuv::I420Copy(input_frame.video_frame_buffer()->DataY(),
+ input_frame.video_frame_buffer()->StrideY(),
+ input_frame.video_frame_buffer()->DataU(),
+ input_frame.video_frame_buffer()->StrideU(),
+ input_frame.video_frame_buffer()->DataV(),
+ input_frame.video_frame_buffer()->StrideV(),
+ buffer, width,
+ buffer + width*height, chroma_width,
+ buffer + width*height + chroma_width*chroma_height,
+ chroma_width,
+ width, height);
- for (int plane = 0; plane < kNumOfPlanes; ++plane) {
- int width = (plane ? (input_frame.width() + 1) / 2 :
- input_frame.width());
- int height = (plane ? (input_frame.height() + 1) / 2 :
- input_frame.height());
- const uint8_t* plane_ptr = input_frame.buffer(
- static_cast<PlaneType>(plane));
- for (int y = 0; y < height; y++) {
- memcpy(&buffer_ptr[pos], plane_ptr, width);
- pos += width;
- plane_ptr += input_frame.stride(static_cast<PlaneType>(plane));
- }
- }
return static_cast<int>(length);
}
@@ -228,6 +248,7 @@ int ConvertVideoType(VideoType video_type) {
return libyuv::FOURCC_ANY;
}
+// TODO(nisse): Delete this wrapper, let callers use libyuv directly.
int ConvertToI420(VideoType src_video_type,
const uint8_t* src_frame,
int crop_x,
@@ -245,33 +266,35 @@ int ConvertToI420(VideoType src_video_type,
dst_width = dst_frame->height();
dst_height = dst_frame->width();
}
- return libyuv::ConvertToI420(src_frame, sample_size,
- dst_frame->buffer(kYPlane),
- dst_frame->stride(kYPlane),
- dst_frame->buffer(kUPlane),
- dst_frame->stride(kUPlane),
- dst_frame->buffer(kVPlane),
- dst_frame->stride(kVPlane),
- crop_x, crop_y,
- src_width, src_height,
- dst_width, dst_height,
- ConvertRotationMode(rotation),
- ConvertVideoType(src_video_type));
+ return libyuv::ConvertToI420(
+ src_frame, sample_size,
+ dst_frame->video_frame_buffer()->MutableDataY(),
+ dst_frame->video_frame_buffer()->StrideY(),
+ dst_frame->video_frame_buffer()->MutableDataU(),
+ dst_frame->video_frame_buffer()->StrideU(),
+ dst_frame->video_frame_buffer()->MutableDataV(),
+ dst_frame->video_frame_buffer()->StrideV(),
+ crop_x, crop_y,
+ src_width, src_height,
+ dst_width, dst_height,
+ ConvertRotationMode(rotation),
+ ConvertVideoType(src_video_type));
}
int ConvertFromI420(const VideoFrame& src_frame,
VideoType dst_video_type,
int dst_sample_size,
uint8_t* dst_frame) {
- return libyuv::ConvertFromI420(src_frame.buffer(kYPlane),
- src_frame.stride(kYPlane),
- src_frame.buffer(kUPlane),
- src_frame.stride(kUPlane),
- src_frame.buffer(kVPlane),
- src_frame.stride(kVPlane),
- dst_frame, dst_sample_size,
- src_frame.width(), src_frame.height(),
- ConvertVideoType(dst_video_type));
+ return libyuv::ConvertFromI420(
+ src_frame.video_frame_buffer()->DataY(),
+ src_frame.video_frame_buffer()->StrideY(),
+ src_frame.video_frame_buffer()->DataU(),
+ src_frame.video_frame_buffer()->StrideU(),
+ src_frame.video_frame_buffer()->DataV(),
+ src_frame.video_frame_buffer()->StrideV(),
+ dst_frame, dst_sample_size,
+ src_frame.width(), src_frame.height(),
+ ConvertVideoType(dst_video_type));
}
// TODO(mikhal): Create a designated VideoFrame for non I420.
@@ -280,15 +303,16 @@ int ConvertFromYV12(const VideoFrame& src_frame,
int dst_sample_size,
uint8_t* dst_frame) {
// YV12 = Y, V, U
- return libyuv::ConvertFromI420(src_frame.buffer(kYPlane),
- src_frame.stride(kYPlane),
- src_frame.buffer(kVPlane),
- src_frame.stride(kVPlane),
- src_frame.buffer(kUPlane),
- src_frame.stride(kUPlane),
- dst_frame, dst_sample_size,
- src_frame.width(), src_frame.height(),
- ConvertVideoType(dst_video_type));
+ return libyuv::ConvertFromI420(
+ src_frame.video_frame_buffer()->DataY(),
+ src_frame.video_frame_buffer()->StrideY(),
+ src_frame.video_frame_buffer()->DataV(),
+ src_frame.video_frame_buffer()->StrideV(),
+ src_frame.video_frame_buffer()->DataU(),
+ src_frame.video_frame_buffer()->StrideU(),
+ dst_frame, dst_sample_size,
+ src_frame.width(), src_frame.height(),
+ ConvertVideoType(dst_video_type));
}
// Compute PSNR for an I420 frame (all planes)
@@ -301,18 +325,18 @@ double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
else if (ref_frame->width() < 0 || ref_frame->height() < 0)
return -1;
- double psnr = libyuv::I420Psnr(ref_frame->buffer(kYPlane),
- ref_frame->stride(kYPlane),
- ref_frame->buffer(kUPlane),
- ref_frame->stride(kUPlane),
- ref_frame->buffer(kVPlane),
- ref_frame->stride(kVPlane),
- test_frame->buffer(kYPlane),
- test_frame->stride(kYPlane),
- test_frame->buffer(kUPlane),
- test_frame->stride(kUPlane),
- test_frame->buffer(kVPlane),
- test_frame->stride(kVPlane),
+ double psnr = libyuv::I420Psnr(ref_frame->video_frame_buffer()->DataY(),
+ ref_frame->video_frame_buffer()->StrideY(),
+ ref_frame->video_frame_buffer()->DataU(),
+ ref_frame->video_frame_buffer()->StrideU(),
+ ref_frame->video_frame_buffer()->DataV(),
+ ref_frame->video_frame_buffer()->StrideV(),
+ test_frame->video_frame_buffer()->DataY(),
+ test_frame->video_frame_buffer()->StrideY(),
+ test_frame->video_frame_buffer()->DataU(),
+ test_frame->video_frame_buffer()->StrideU(),
+ test_frame->video_frame_buffer()->DataV(),
+ test_frame->video_frame_buffer()->StrideV(),
test_frame->width(), test_frame->height());
// LibYuv sets the max psnr value to 128, we restrict it here.
// In case of 0 mse in one frame, 128 can skew the results significantly.
@@ -329,18 +353,18 @@ double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) {
else if (ref_frame->width() < 0 || ref_frame->height() < 0)
return -1;
- return libyuv::I420Ssim(ref_frame->buffer(kYPlane),
- ref_frame->stride(kYPlane),
- ref_frame->buffer(kUPlane),
- ref_frame->stride(kUPlane),
- ref_frame->buffer(kVPlane),
- ref_frame->stride(kVPlane),
- test_frame->buffer(kYPlane),
- test_frame->stride(kYPlane),
- test_frame->buffer(kUPlane),
- test_frame->stride(kUPlane),
- test_frame->buffer(kVPlane),
- test_frame->stride(kVPlane),
+ return libyuv::I420Ssim(ref_frame->video_frame_buffer()->DataY(),
+ ref_frame->video_frame_buffer()->StrideY(),
+ ref_frame->video_frame_buffer()->DataU(),
+ ref_frame->video_frame_buffer()->StrideU(),
+ ref_frame->video_frame_buffer()->DataV(),
+ ref_frame->video_frame_buffer()->StrideV(),
+ test_frame->video_frame_buffer()->DataY(),
+ test_frame->video_frame_buffer()->StrideY(),
+ test_frame->video_frame_buffer()->DataU(),
+ test_frame->video_frame_buffer()->StrideU(),
+ test_frame->video_frame_buffer()->DataV(),
+ test_frame->video_frame_buffer()->StrideV(),
test_frame->width(), test_frame->height());
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/common_video/video_frame.cc b/chromium/third_party/webrtc/common_video/video_frame.cc
index a30f658ea0e..cf6b9c82af3 100644
--- a/chromium/third_party/webrtc/common_video/video_frame.cc
+++ b/chromium/third_party/webrtc/common_video/video_frame.cc
@@ -29,11 +29,12 @@ int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
return plane_stride * ((image_height + 1) / 2);
}
-VideoFrame::VideoFrame() {
- // Intentionally using Reset instead of initializer list so that any missed
- // fields in Reset will be caught by memory checkers.
- Reset();
-}
+VideoFrame::VideoFrame()
+ : video_frame_buffer_(nullptr),
+ timestamp_(0),
+ ntp_time_ms_(0),
+ render_time_ms_(0),
+ rotation_(kVideoRotation_0) {}
VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
uint32_t timestamp,
@@ -65,11 +66,13 @@ void VideoFrame::CreateEmptyFrame(int width,
rotation_ = kVideoRotation_0;
// Check if it's safe to reuse allocation.
- if (video_frame_buffer_ && video_frame_buffer_->HasOneRef() &&
+ if (video_frame_buffer_ && video_frame_buffer_->IsMutable() &&
!video_frame_buffer_->native_handle() &&
width == video_frame_buffer_->width() &&
- height == video_frame_buffer_->height() && stride_y == stride(kYPlane) &&
- stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) {
+ height == video_frame_buffer_->height() &&
+ stride_y == video_frame_buffer_->StrideY() &&
+ stride_u == video_frame_buffer_->StrideU() &&
+ stride_v == video_frame_buffer_->StrideV()) {
return;
}
@@ -92,9 +95,9 @@ void VideoFrame::CreateFrame(const uint8_t* buffer_y,
const int expected_size_u = half_height * stride_u;
const int expected_size_v = half_height * stride_v;
CreateEmptyFrame(width, height, stride_y, stride_u, stride_v);
- memcpy(buffer(kYPlane), buffer_y, expected_size_y);
- memcpy(buffer(kUPlane), buffer_u, expected_size_u);
- memcpy(buffer(kVPlane), buffer_v, expected_size_v);
+ memcpy(video_frame_buffer_->MutableDataY(), buffer_y, expected_size_y);
+ memcpy(video_frame_buffer_->MutableDataU(), buffer_u, expected_size_u);
+ memcpy(video_frame_buffer_->MutableDataV(), buffer_v, expected_size_v);
rotation_ = rotation;
}
@@ -113,22 +116,12 @@ void VideoFrame::CreateFrame(const uint8_t* buffer,
}
void VideoFrame::CopyFrame(const VideoFrame& videoFrame) {
- if (videoFrame.IsZeroSize()) {
- video_frame_buffer_ = nullptr;
- } else if (videoFrame.native_handle()) {
- video_frame_buffer_ = videoFrame.video_frame_buffer();
- } else {
- CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane),
- videoFrame.buffer(kVPlane), videoFrame.width(),
- videoFrame.height(), videoFrame.stride(kYPlane),
- videoFrame.stride(kUPlane), videoFrame.stride(kVPlane),
- kVideoRotation_0);
- }
+ ShallowCopy(videoFrame);
- timestamp_ = videoFrame.timestamp_;
- ntp_time_ms_ = videoFrame.ntp_time_ms_;
- render_time_ms_ = videoFrame.render_time_ms_;
- rotation_ = videoFrame.rotation_;
+ // If backed by a plain memory buffer, create a new, non-shared, copy.
+ if (video_frame_buffer_ && !video_frame_buffer_->native_handle()) {
+ video_frame_buffer_ = I420Buffer::Copy(video_frame_buffer_);
+ }
}
void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) {
@@ -139,30 +132,26 @@ void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) {
rotation_ = videoFrame.rotation_;
}
-void VideoFrame::Reset() {
- video_frame_buffer_ = nullptr;
- timestamp_ = 0;
- ntp_time_ms_ = 0;
- render_time_ms_ = 0;
- rotation_ = kVideoRotation_0;
-}
-
-uint8_t* VideoFrame::buffer(PlaneType type) {
- return video_frame_buffer_ ? video_frame_buffer_->MutableData(type)
- : nullptr;
-}
-
-const uint8_t* VideoFrame::buffer(PlaneType type) const {
- return video_frame_buffer_ ? video_frame_buffer_->data(type) : nullptr;
-}
-
+// TODO(nisse): Delete. Besides test code, only one use, in
+// webrtcvideoengine2.cc:CreateBlackFrame.
int VideoFrame::allocated_size(PlaneType type) const {
const int plane_height = (type == kYPlane) ? height() : (height() + 1) / 2;
- return plane_height * stride(type);
-}
-
-int VideoFrame::stride(PlaneType type) const {
- return video_frame_buffer_ ? video_frame_buffer_->stride(type) : 0;
+ int stride;
+ switch (type) {
+ case kYPlane:
+ stride = video_frame_buffer_->StrideY();
+ break;
+ case kUPlane:
+ stride = video_frame_buffer_->StrideU();
+ break;
+ case kVPlane:
+ stride = video_frame_buffer_->StrideV();
+ break;
+ default:
+ RTC_NOTREACHED();
+ return 0;
+ }
+ return plane_height * stride;
}
int VideoFrame::width() const {
@@ -177,11 +166,8 @@ bool VideoFrame::IsZeroSize() const {
return !video_frame_buffer_;
}
-void* VideoFrame::native_handle() const {
- return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr;
-}
-
-rtc::scoped_refptr<VideoFrameBuffer> VideoFrame::video_frame_buffer() const {
+const rtc::scoped_refptr<VideoFrameBuffer>& VideoFrame::video_frame_buffer()
+ const {
return video_frame_buffer_;
}
@@ -191,7 +177,7 @@ void VideoFrame::set_video_frame_buffer(
}
VideoFrame VideoFrame::ConvertNativeToI420Frame() const {
- RTC_DCHECK(native_handle());
+ RTC_DCHECK(video_frame_buffer_->native_handle());
VideoFrame frame;
frame.ShallowCopy(*this);
frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer());
diff --git a/chromium/third_party/webrtc/common_video/video_frame_buffer.cc b/chromium/third_party/webrtc/common_video/video_frame_buffer.cc
index 6f49e8aef9d..700dcaf02b7 100644
--- a/chromium/third_party/webrtc/common_video/video_frame_buffer.cc
+++ b/chromium/third_party/webrtc/common_video/video_frame_buffer.cc
@@ -27,10 +27,80 @@ int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
} // namespace
-uint8_t* VideoFrameBuffer::MutableData(PlaneType type) {
+const uint8_t* VideoFrameBuffer::data(PlaneType type) const {
+ switch (type) {
+ case kYPlane:
+ return DataY();
+ case kUPlane:
+ return DataU();
+ case kVPlane:
+ return DataV();
+ default:
+ RTC_NOTREACHED();
+ return nullptr;
+ }
+}
+
+const uint8_t* VideoFrameBuffer::DataY() const {
+ return data(kYPlane);
+}
+const uint8_t* VideoFrameBuffer::DataU() const {
+ return data(kUPlane);
+}
+const uint8_t* VideoFrameBuffer::DataV() const {
+ return data(kVPlane);
+}
+
+int VideoFrameBuffer::stride(PlaneType type) const {
+ switch (type) {
+ case kYPlane:
+ return StrideY();
+ case kUPlane:
+ return StrideU();
+ case kVPlane:
+ return StrideV();
+ default:
+ RTC_NOTREACHED();
+ return 0;
+ }
+}
+
+int VideoFrameBuffer::StrideY() const {
+ return stride(kYPlane);
+}
+int VideoFrameBuffer::StrideU() const {
+ return stride(kUPlane);
+}
+int VideoFrameBuffer::StrideV() const {
+ return stride(kVPlane);
+}
+
+uint8_t* VideoFrameBuffer::MutableDataY() {
RTC_NOTREACHED();
return nullptr;
}
+uint8_t* VideoFrameBuffer::MutableDataU() {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+uint8_t* VideoFrameBuffer::MutableDataV() {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+uint8_t* VideoFrameBuffer::MutableData(PlaneType type) {
+ switch (type) {
+ case kYPlane:
+ return MutableDataY();
+ case kUPlane:
+ return MutableDataU();
+ case kVPlane:
+ return MutableDataV();
+ default:
+ RTC_NOTREACHED();
+ return nullptr;
+ }
+}
VideoFrameBuffer::~VideoFrameBuffer() {}
@@ -74,39 +144,41 @@ int I420Buffer::height() const {
return height_;
}
-const uint8_t* I420Buffer::data(PlaneType type) const {
- switch (type) {
- case kYPlane:
- return data_.get();
- case kUPlane:
- return data_.get() + stride_y_ * height_;
- case kVPlane:
- return data_.get() + stride_y_ * height_ +
- stride_u_ * ((height_ + 1) / 2);
- default:
- RTC_NOTREACHED();
- return nullptr;
- }
+const uint8_t* I420Buffer::DataY() const {
+ return data_.get();
+}
+const uint8_t* I420Buffer::DataU() const {
+ return data_.get() + stride_y_ * height_;
+}
+const uint8_t* I420Buffer::DataV() const {
+ return data_.get() + stride_y_ * height_ + stride_u_ * ((height_ + 1) / 2);
}
-uint8_t* I420Buffer::MutableData(PlaneType type) {
- RTC_DCHECK(HasOneRef());
- return const_cast<uint8_t*>(
- static_cast<const VideoFrameBuffer*>(this)->data(type));
+bool I420Buffer::IsMutable() {
+ return HasOneRef();
}
-int I420Buffer::stride(PlaneType type) const {
- switch (type) {
- case kYPlane:
- return stride_y_;
- case kUPlane:
- return stride_u_;
- case kVPlane:
- return stride_v_;
- default:
- RTC_NOTREACHED();
- return 0;
- }
+uint8_t* I420Buffer::MutableDataY() {
+ RTC_DCHECK(IsMutable());
+ return const_cast<uint8_t*>(DataY());
+}
+uint8_t* I420Buffer::MutableDataU() {
+ RTC_DCHECK(IsMutable());
+ return const_cast<uint8_t*>(DataU());
+}
+uint8_t* I420Buffer::MutableDataV() {
+ RTC_DCHECK(IsMutable());
+ return const_cast<uint8_t*>(DataV());
+}
+
+int I420Buffer::StrideY() const {
+ return stride_y_;
+}
+int I420Buffer::StrideU() const {
+ return stride_u_;
+}
+int I420Buffer::StrideV() const {
+ return stride_v_;
}
void* I420Buffer::native_handle() const {
@@ -124,12 +196,12 @@ rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(
int height = buffer->height();
rtc::scoped_refptr<I420Buffer> copy =
new rtc::RefCountedObject<I420Buffer>(width, height);
- RTC_CHECK(libyuv::I420Copy(buffer->data(kYPlane), buffer->stride(kYPlane),
- buffer->data(kUPlane), buffer->stride(kUPlane),
- buffer->data(kVPlane), buffer->stride(kVPlane),
- copy->MutableData(kYPlane), copy->stride(kYPlane),
- copy->MutableData(kUPlane), copy->stride(kUPlane),
- copy->MutableData(kVPlane), copy->stride(kVPlane),
+ RTC_CHECK(libyuv::I420Copy(buffer->DataY(), buffer->StrideY(),
+ buffer->DataU(), buffer->StrideU(),
+ buffer->DataV(), buffer->StrideV(),
+ copy->MutableDataY(), copy->StrideY(),
+ copy->MutableDataU(), copy->StrideU(),
+ copy->MutableDataV(), copy->StrideV(),
width, height) == 0);
return copy;
@@ -144,6 +216,10 @@ NativeHandleBuffer::NativeHandleBuffer(void* native_handle,
RTC_DCHECK_GT(height, 0);
}
+bool NativeHandleBuffer::IsMutable() {
+ return false;
+}
+
int NativeHandleBuffer::width() const {
return width_;
}
@@ -152,12 +228,28 @@ int NativeHandleBuffer::height() const {
return height_;
}
-const uint8_t* NativeHandleBuffer::data(PlaneType type) const {
+const uint8_t* NativeHandleBuffer::DataY() const {
+ RTC_NOTREACHED(); // Should not be called.
+ return nullptr;
+}
+const uint8_t* NativeHandleBuffer::DataU() const {
+ RTC_NOTREACHED(); // Should not be called.
+ return nullptr;
+}
+const uint8_t* NativeHandleBuffer::DataV() const {
RTC_NOTREACHED(); // Should not be called.
return nullptr;
}
-int NativeHandleBuffer::stride(PlaneType type) const {
+int NativeHandleBuffer::StrideY() const {
+ RTC_NOTREACHED(); // Should not be called.
+ return 0;
+}
+int NativeHandleBuffer::StrideU() const {
+ RTC_NOTREACHED(); // Should not be called.
+ return 0;
+}
+int NativeHandleBuffer::StrideV() const {
RTC_NOTREACHED(); // Should not be called.
return 0;
}
@@ -190,6 +282,11 @@ WrappedI420Buffer::~WrappedI420Buffer() {
no_longer_used_cb_();
}
+// Data owned by creator; never mutable.
+bool WrappedI420Buffer::IsMutable() {
+ return false;
+}
+
int WrappedI420Buffer::width() const {
return width_;
}
@@ -198,32 +295,24 @@ int WrappedI420Buffer::height() const {
return height_;
}
-const uint8_t* WrappedI420Buffer::data(PlaneType type) const {
- switch (type) {
- case kYPlane:
- return y_plane_;
- case kUPlane:
- return u_plane_;
- case kVPlane:
- return v_plane_;
- default:
- RTC_NOTREACHED();
- return nullptr;
- }
+const uint8_t* WrappedI420Buffer::DataY() const {
+ return y_plane_;
+}
+const uint8_t* WrappedI420Buffer::DataU() const {
+ return u_plane_;
+}
+const uint8_t* WrappedI420Buffer::DataV() const {
+ return v_plane_;
}
-int WrappedI420Buffer::stride(PlaneType type) const {
- switch (type) {
- case kYPlane:
- return y_stride_;
- case kUPlane:
- return u_stride_;
- case kVPlane:
- return v_stride_;
- default:
- RTC_NOTREACHED();
- return 0;
- }
+int WrappedI420Buffer::StrideY() const {
+ return y_stride_;
+}
+int WrappedI420Buffer::StrideU() const {
+ return u_stride_;
+}
+int WrappedI420Buffer::StrideV() const {
+ return v_stride_;
}
void* WrappedI420Buffer::native_handle() const {
@@ -252,17 +341,17 @@ rtc::scoped_refptr<VideoFrameBuffer> ShallowCenterCrop(
const int offset_x = uv_offset_x * 2;
const int offset_y = uv_offset_y * 2;
- const uint8_t* y_plane = buffer->data(kYPlane) +
- buffer->stride(kYPlane) * offset_y + offset_x;
- const uint8_t* u_plane = buffer->data(kUPlane) +
- buffer->stride(kUPlane) * uv_offset_y + uv_offset_x;
- const uint8_t* v_plane = buffer->data(kVPlane) +
- buffer->stride(kVPlane) * uv_offset_y + uv_offset_x;
+ const uint8_t* y_plane = buffer->DataY() +
+ buffer->StrideY() * offset_y + offset_x;
+ const uint8_t* u_plane = buffer->DataU() +
+ buffer->StrideU() * uv_offset_y + uv_offset_x;
+ const uint8_t* v_plane = buffer->DataV() +
+ buffer->StrideV() * uv_offset_y + uv_offset_x;
return new rtc::RefCountedObject<WrappedI420Buffer>(
cropped_width, cropped_height,
- y_plane, buffer->stride(kYPlane),
- u_plane, buffer->stride(kUPlane),
- v_plane, buffer->stride(kVPlane),
+ y_plane, buffer->StrideY(),
+ u_plane, buffer->StrideU(),
+ v_plane, buffer->StrideV(),
rtc::KeepRefUntilDone(buffer));
}
diff --git a/chromium/third_party/webrtc/common_video/video_render_frames.cc b/chromium/third_party/webrtc/common_video/video_render_frames.cc
index 8b447cb10fa..3e5dfe9d10f 100644
--- a/chromium/third_party/webrtc/common_video/video_render_frames.cc
+++ b/chromium/third_party/webrtc/common_video/video_render_frames.cc
@@ -12,8 +12,8 @@
#include <assert.h>
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
@@ -27,7 +27,7 @@ VideoRenderFrames::VideoRenderFrames()
}
int32_t VideoRenderFrames::AddFrame(const VideoFrame& new_frame) {
- const int64_t time_now = TickTime::MillisecondTimestamp();
+ const int64_t time_now = rtc::TimeMillis();
// Drop old frames only when there are other frames in the queue, otherwise, a
// really slow system never renders any frames.
@@ -74,7 +74,7 @@ uint32_t VideoRenderFrames::TimeToNextFrameRelease() {
}
const int64_t time_to_release = incoming_frames_.front().render_time_ms() -
render_delay_ms_ -
- TickTime::MillisecondTimestamp();
+ rtc::TimeMillis();
return time_to_release < 0 ? 0u : static_cast<uint32_t>(time_to_release);
}
diff --git a/chromium/third_party/webrtc/examples/DEPS b/chromium/third_party/webrtc/examples/DEPS
index df9e0406b79..f065c89458b 100644
--- a/chromium/third_party/webrtc/examples/DEPS
+++ b/chromium/third_party/webrtc/examples/DEPS
@@ -1,4 +1,5 @@
include_rules = [
+ "+WebRTC",
"+webrtc/api",
"+webrtc/base",
"+webrtc/media",
diff --git a/chromium/third_party/webrtc/examples/OWNERS b/chromium/third_party/webrtc/examples/OWNERS
index f489e6b0b0b..d0955711f5f 100644
--- a/chromium/third_party/webrtc/examples/OWNERS
+++ b/chromium/third_party/webrtc/examples/OWNERS
@@ -1,2 +1,4 @@
glaznev@webrtc.org
+magjed@webrtc.org
+perkj@webrtc.org
tkchin@webrtc.org
diff --git a/chromium/third_party/webrtc/examples/androidapp/AndroidManifest.xml b/chromium/third_party/webrtc/examples/androidapp/AndroidManifest.xml
index 1e9cbf17d9a..dfe8975b314 100644
--- a/chromium/third_party/webrtc/examples/androidapp/AndroidManifest.xml
+++ b/chromium/third_party/webrtc/examples/androidapp/AndroidManifest.xml
@@ -22,7 +22,9 @@
android:allowBackup="false">
<activity android:name="ConnectActivity"
- android:label="@string/app_name">
+ android:label="@string/app_name"
+ android:theme="@style/Theme.AppCompat"
+ android:windowSoftInputMode="adjustPan">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
@@ -32,8 +34,8 @@
<action android:name="android.intent.action.VIEW"/>
<category android:name="android.intent.category.DEFAULT"/>
<category android:name="android.intent.category.BROWSABLE"/>
- <data android:scheme="https" android:host="apprtc.appspot.com"/>
- <data android:scheme="http" android:host="apprtc.appspot.com"/>
+ <data android:scheme="https" android:host="appr.tc"/>
+ <data android:scheme="http" android:host="appr.tc"/>
</intent-filter>
</activity>
diff --git a/chromium/third_party/webrtc/examples/androidapp/README b/chromium/third_party/webrtc/examples/androidapp/README
index 856879e4d77..c8705a8bbaa 100644
--- a/chromium/third_party/webrtc/examples/androidapp/README
+++ b/chromium/third_party/webrtc/examples/androidapp/README
@@ -1,4 +1,4 @@
-This directory contains an example Android client for https://apprtc.appspot.com
+This directory contains an example Android client for https://appr.tc
Prerequisites:
- "Getting the code" on http://www.webrtc.org/native-code/android
@@ -18,8 +18,8 @@ cd <path/to/webrtc>/src
ninja -C out/Debug AppRTCDemo
adb install -r out/Debug/apks/AppRTCDemo.apk
-In desktop chrome, navigate to https://apprtc.appspot.com and note the r=<NNN> room
-this redirects to or navigate directly to https://apprtc.appspot.com/r/<NNN> with
+In desktop chrome, navigate to https://appr.tc and note the r=<NNN> room
+this redirects to or navigate directly to https://appr.tc/r/<NNN> with
your own room number. Launch AppRTC on the device and add same <NNN> into the room name list.
You can also run application from a command line to connect to the first room in a list:
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/drawable-hdpi/ic_add_white_24dp.png b/chromium/third_party/webrtc/examples/androidapp/res/drawable-hdpi/ic_add_white_24dp.png
new file mode 100644
index 00000000000..694179bd46b
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/androidapp/res/drawable-hdpi/ic_add_white_24dp.png
Binary files differ
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/drawable-mdpi/ic_add_white_24dp.png b/chromium/third_party/webrtc/examples/androidapp/res/drawable-mdpi/ic_add_white_24dp.png
new file mode 100644
index 00000000000..3856041d702
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/androidapp/res/drawable-mdpi/ic_add_white_24dp.png
Binary files differ
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/drawable-xhdpi/ic_add_white_24dp.png b/chromium/third_party/webrtc/examples/androidapp/res/drawable-xhdpi/ic_add_white_24dp.png
new file mode 100644
index 00000000000..67bb598e52a
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/androidapp/res/drawable-xhdpi/ic_add_white_24dp.png
Binary files differ
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/drawable-xxhdpi/ic_add_white_24dp.png b/chromium/third_party/webrtc/examples/androidapp/res/drawable-xxhdpi/ic_add_white_24dp.png
new file mode 100644
index 00000000000..0fdced8fce7
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/androidapp/res/drawable-xxhdpi/ic_add_white_24dp.png
Binary files differ
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/drawable-xxxhdpi/ic_add_white_24dp.png b/chromium/third_party/webrtc/examples/androidapp/res/drawable-xxxhdpi/ic_add_white_24dp.png
new file mode 100644
index 00000000000..d64c22e9edf
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/androidapp/res/drawable-xxxhdpi/ic_add_white_24dp.png
Binary files differ
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/layout/activity_connect.xml b/chromium/third_party/webrtc/examples/androidapp/res/layout/activity_connect.xml
index 5b807715883..acc20dc6a18 100644
--- a/chromium/third_party/webrtc/examples/androidapp/res/layout/activity_connect.xml
+++ b/chromium/third_party/webrtc/examples/androidapp/res/layout/activity_connect.xml
@@ -1,72 +1,82 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
- xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:app="http://schemas.android.com/apk/res-auto"
+ android:layout_margin="16dp"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:orientation="vertical"
+ android:weightSum="1"
+ android:layout_centerHorizontal="true">
+
+ <TextView
+ android:id="@+id/room_edittext_description"
android:layout_width="match_parent"
- android:layout_height="match_parent"
- android:orientation="vertical"
- android:weightSum="1"
- android:layout_margin="8dp"
- android:layout_centerHorizontal="true">
+ android:layout_height="wrap_content"
+ android:text="@string/room_description"/>
<LinearLayout
+ android:orientation="horizontal"
android:layout_width="match_parent"
android:layout_height="wrap_content"
- android:orientation="horizontal" >
- <ImageButton
- android:id="@+id/add_room_button"
- android:background="@android:drawable/ic_menu_add"
- android:contentDescription="@string/add_room_description"
- android:layout_marginEnd="20dp"
- android:layout_width="48dp"
- android:layout_height="48dp"/>
- <ImageButton
- android:id="@+id/remove_room_button"
- android:background="@android:drawable/ic_delete"
- android:contentDescription="@string/remove_room_description"
- android:layout_marginEnd="20dp"
- android:layout_width="48dp"
- android:layout_height="48dp"/>
- <ImageButton
- android:id="@+id/connect_button"
- android:background="@android:drawable/sym_action_call"
- android:contentDescription="@string/connect_description"
- android:layout_marginEnd="20dp"
- android:layout_width="48dp"
- android:layout_height="48dp"/>
- <ImageButton
- android:id="@+id/connect_loopback_button"
- android:background="@drawable/ic_loopback_call"
- android:contentDescription="@string/connect_loopback_description"
- android:layout_width="48dp"
- android:layout_height="48dp"/>
- </LinearLayout>
- <TextView
- android:id="@+id/room_edittext_description"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:layout_margin="5dp"
- android:text="@string/room_description"/>
- <EditText
+ android:gravity="center"
+ android:layout_marginBottom="8dp">
+
+ <EditText
android:id="@+id/room_edittext"
- android:layout_width="match_parent"
+ android:layout_width="0dp"
android:layout_height="wrap_content"
+ android:layout_weight="1"
android:singleLine="true"
- android:imeOptions="actionDone"/>
+ android:imeOptions="actionDone"
+ android:inputType="text"/>
+
+ <ImageButton
+ android:id="@+id/connect_button"
+ android:layout_width="48dp"
+ android:layout_height="48dp"
+ android:contentDescription="@string/connect_description"
+ android:background="@android:drawable/sym_action_call" />
+ </LinearLayout>
+
<TextView
- android:id="@+id/room_listview_description"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:layout_marginTop="5dp"
- android:lines="1"
- android:maxLines="1"
- android:textAppearance="?android:attr/textAppearanceMedium"
- android:text="@string/room_names"/>
- <ListView
+ android:id="@+id/room_listview_description"
+ android:layout_width="match_parent"
+ android:layout_height="48dp"
+ android:layout_marginTop="8dp"
+ android:lines="1"
+ android:maxLines="1"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:text="@string/favorites"
+ android:gravity="center_vertical"/>
+
+ <FrameLayout
+ android:layout_width="match_parent"
+ android:layout_height="0dp"
+ android:layout_weight="1">
+
+ <ListView
android:id="@+id/room_listview"
- android:layout_width="fill_parent"
- android:layout_height="wrap_content"
- android:choiceMode="singleChoice"
- android:listSelector="@android:color/darker_gray"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
android:drawSelectorOnTop="false" />
+ <TextView
+ android:id="@android:id/empty"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:gravity="center"
+ android:text="@string/no_favorites" />
+
+ <android.support.design.widget.FloatingActionButton
+ android:id="@+id/add_favorite_button"
+ android:layout_width="56dp"
+ android:layout_height="56dp"
+ android:layout_margin="8dp"
+ android:layout_gravity="bottom|end"
+ android:src="@drawable/ic_add_white_24dp"
+ android:contentDescription="@string/add_favorite_description"
+ app:backgroundTint="@android:color/holo_green_dark"
+ app:elevation="8dp"/>
+ </FrameLayout>
</LinearLayout> \ No newline at end of file
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/layout/dialog_add_favorite.xml b/chromium/third_party/webrtc/examples/androidapp/res/layout/dialog_add_favorite.xml
new file mode 100644
index 00000000000..28b533aa0c6
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/androidapp/res/layout/dialog_add_favorite.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ android:padding="24dp">
+
+ <EditText
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:id="@+id/favorite_edittext"
+ android:layout_gravity="center_horizontal"
+ android:inputType="text"/>
+</LinearLayout> \ No newline at end of file
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/menu/connect_menu.xml b/chromium/third_party/webrtc/examples/androidapp/res/menu/connect_menu.xml
index d9f94867b97..8beddddd45f 100644
--- a/chromium/third_party/webrtc/examples/androidapp/res/menu/connect_menu.xml
+++ b/chromium/third_party/webrtc/examples/androidapp/res/menu/connect_menu.xml
@@ -1,8 +1,14 @@
-<menu xmlns:android="http://schemas.android.com/apk/res/android" >
+<menu xmlns:android="http://schemas.android.com/apk/res/android"
+ xmlns:app="http://schemas.android.com/apk/res-auto">
+ <item
+ android:id="@+id/action_loopback"
+ android:icon="@drawable/ic_loopback_call"
+ app:showAsAction="always"
+ android:title="@string/action_loopback"/>
<item
android:id="@+id/action_settings"
android:orderInCategory="100"
android:icon="@android:drawable/ic_menu_preferences"
- android:showAsAction="ifRoom"
+ app:showAsAction="ifRoom"
android:title="@string/action_settings"/>
</menu>
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/values/arrays.xml b/chromium/third_party/webrtc/examples/androidapp/res/values/arrays.xml
index ba8c8918ce0..dd06c46ed16 100644
--- a/chromium/third_party/webrtc/examples/androidapp/res/values/arrays.xml
+++ b/chromium/third_party/webrtc/examples/androidapp/res/values/arrays.xml
@@ -36,4 +36,8 @@
<item>ISAC</item>
</string-array>
+ <string-array name="roomListContextMenu">
+ <item>Remove favorite</item>
+ </string-array>
+
</resources>
diff --git a/chromium/third_party/webrtc/examples/androidapp/res/values/strings.xml b/chromium/third_party/webrtc/examples/androidapp/res/values/strings.xml
index 492c34b6840..24244238e07 100644
--- a/chromium/third_party/webrtc/examples/androidapp/res/values/strings.xml
+++ b/chromium/third_party/webrtc/examples/androidapp/res/values/strings.xml
@@ -3,12 +3,13 @@
<string name="app_name" translatable="no">AppRTC</string>
<string name="settings_name" translatable="no">AppRTC Settings</string>
<string name="disconnect_call">Disconnect Call</string>
- <string name="room_names">Room names:</string>
<string name="room_description">
Please enter a room name. Room names are shared with everyone, so think
of something unique and send it to a friend.
</string>
- <string name="connect_text">Connect</string>
+ <string name="favorites">Favorites</string>
+ <string name="no_favorites">No favorites</string>
+ <string name="add_favorite_title">Add favorite</string>
<string name="invalid_url_title">Invalid URL</string>
<string name="invalid_url_text">The URL or room name you entered resulted in an invalid URL: %1$s
</string>
@@ -16,14 +17,15 @@
<string name="connecting_to">Connecting to: %1$s</string>
<string name="missing_url">FATAL ERROR: Missing URL to connect to.</string>
<string name="ok">OK</string>
+ <string name="add">Add</string>
+ <string name="cancel">Cancel</string>
<string name="switch_camera">Switch front/back camera</string>
<string name="toggle_debug">Toggle debug view</string>
<string name="toggle_mic">Toggle microphone on/off</string>
<string name="action_settings">Settings</string>
- <string name="add_room_description">Add new room to the list</string>
- <string name="remove_room_description">Remove room from the list</string>
+ <string name="action_loopback">Loopback connection</string>
<string name="connect_description">Connect to the room</string>
- <string name="connect_loopback_description">Loopback connection</string>
+ <string name="add_favorite_description">Add favorite</string>
<!-- Settings strings. -->
<string name="pref_room_key">room_preference</string>
@@ -119,7 +121,7 @@
<string name="pref_room_server_url_key">room_server_url_preference</string>
<string name="pref_room_server_url_title">Room server URL.</string>
<string name="pref_room_server_url_dlg">Enter a room server URL.</string>
- <string name="pref_room_server_url_default" translatable="false">https://apprtc.appspot.com</string>
+ <string name="pref_room_server_url_default" translatable="false">https://appr.tc</string>
<string name="pref_displayhud_key">displayhud_preference</string>
<string name="pref_displayhud_title">Display call statistics.</string>
diff --git a/chromium/third_party/webrtc/examples/androidjunit/README b/chromium/third_party/webrtc/examples/androidjunit/README
new file mode 100644
index 00000000000..502d71460f2
--- /dev/null
+++ b/chromium/third_party/webrtc/examples/androidjunit/README
@@ -0,0 +1,8 @@
+This directory contains example JUnit tests for Android AppRTCDemo.
+Many of these test utilize Robolectric to mock Android classes.
+
+To compile:
+ninja -C out/Debug AppRTCDemoJUnitTest
+
+To run:
+out/Debug/bin/run_AppRTCDemoJUnitTest
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient+Internal.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient+Internal.h
index 3fd218c2fd8..f2da4136e79 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient+Internal.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient+Internal.h
@@ -10,7 +10,7 @@
#import "ARDAppClient.h"
-#import "webrtc/api/objc/RTCPeerConnection.h"
+#import "WebRTC/RTCPeerConnection.h"
#import "ARDRoomServerClient.h"
#import "ARDSignalingChannel.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.h
index b59838e3cc6..b7f75462671 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.h
@@ -9,8 +9,9 @@
*/
#import <Foundation/Foundation.h>
-#import "webrtc/api/objc/RTCPeerConnection.h"
-#import "webrtc/api/objc/RTCVideoTrack.h"
+
+#import "WebRTC/RTCPeerConnection.h"
+#import "WebRTC/RTCVideoTrack.h"
typedef NS_ENUM(NSInteger, ARDAppClientState) {
// Disconnected from servers.
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.m
index d8dc7714248..86d18d1333c 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppClient.m
@@ -11,17 +11,18 @@
#import "ARDAppClient+Internal.h"
#if defined(WEBRTC_IOS)
-#import "webrtc/base/objc/RTCTracing.h"
-#import "webrtc/api/objc/RTCAVFoundationVideoSource.h"
+#import "WebRTC/RTCAVFoundationVideoSource.h"
+#import "WebRTC/RTCTracing.h"
#endif
-#import "webrtc/api/objc/RTCAudioTrack.h"
-#import "webrtc/api/objc/RTCConfiguration.h"
-#import "webrtc/api/objc/RTCIceServer.h"
-#import "webrtc/api/objc/RTCMediaConstraints.h"
-#import "webrtc/api/objc/RTCMediaStream.h"
-#import "webrtc/api/objc/RTCPeerConnectionFactory.h"
-#import "webrtc/base/objc/RTCFileLogger.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCAudioTrack.h"
+#import "WebRTC/RTCConfiguration.h"
+#import "WebRTC/RTCFileLogger.h"
+#import "WebRTC/RTCIceServer.h"
+#import "WebRTC/RTCLogging.h"
+#import "WebRTC/RTCMediaConstraints.h"
+#import "WebRTC/RTCMediaStream.h"
+#import "WebRTC/RTCPeerConnectionFactory.h"
+#import "WebRTC/RTCRtpSender.h"
#import "ARDAppEngineClient.h"
#import "ARDCEODTURNClient.h"
@@ -48,8 +49,11 @@ static NSInteger const kARDAppClientErrorCreateSDP = -3;
static NSInteger const kARDAppClientErrorSetSDP = -4;
static NSInteger const kARDAppClientErrorInvalidClient = -5;
static NSInteger const kARDAppClientErrorInvalidRoom = -6;
+static NSString * const kARDMediaStreamId = @"ARDAMS";
+static NSString * const kARDAudioTrackId = @"ARDAMSa0";
+static NSString * const kARDVideoTrackId = @"ARDAMSv0";
-// TODO(tkchin): Remove guard once rtc_base_objc compiles on Mac.
+// TODO(tkchin): Remove guard once rtc_sdk_common_objc compiles on Mac.
#if defined(WEBRTC_IOS)
// TODO(tkchin): Add this as a UI option.
static BOOL const kARDAppClientEnableTracing = NO;
@@ -505,9 +509,9 @@ static BOOL const kARDAppClientEnableTracing = NO;
_peerConnection = [_factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:self];
- // Create AV media stream and add it to the peer connection.
- RTCMediaStream *localStream = [self createLocalMediaStream];
- [_peerConnection addStream:localStream];
+ // Create AV senders.
+ [self createAudioSender];
+ [self createVideoSender];
if (_isInitiator) {
// Send offer.
__weak ARDAppClient *weakSelf = self;
@@ -606,17 +610,25 @@ static BOOL const kARDAppClientEnableTracing = NO;
}
}
-- (RTCMediaStream *)createLocalMediaStream {
- RTCMediaStream *localStream = [_factory mediaStreamWithStreamId:@"ARDAMS"];
- RTCVideoTrack *localVideoTrack = [self createLocalVideoTrack];
- if (localVideoTrack) {
- [localStream addVideoTrack:localVideoTrack];
- [_delegate appClient:self didReceiveLocalVideoTrack:localVideoTrack];
+- (RTCRtpSender *)createVideoSender {
+ RTCRtpSender *sender =
+ [_peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo
+ streamId:kARDMediaStreamId];
+ RTCVideoTrack *track = [self createLocalVideoTrack];
+ if (track) {
+ sender.track = track;
+ [_delegate appClient:self didReceiveLocalVideoTrack:track];
}
- RTCAudioTrack *localAudioTrack =
- [_factory audioTrackWithTrackId:@"ARDAMSa0"];
- [localStream addAudioTrack:localAudioTrack];
- return localStream;
+ return sender;
+}
+
+- (RTCRtpSender *)createAudioSender {
+ RTCRtpSender *sender =
+ [_peerConnection senderWithKind:kRTCMediaStreamTrackKindAudio
+ streamId:kARDMediaStreamId];
+ RTCAudioTrack *track = [_factory audioTrackWithTrackId:kARDAudioTrackId];
+ sender.track = track;
+ return sender;
}
- (RTCVideoTrack *)createLocalVideoTrack {
@@ -634,7 +646,7 @@ static BOOL const kARDAppClientEnableTracing = NO;
[_factory avFoundationVideoSourceWithConstraints:mediaConstraints];
localVideoTrack =
[_factory videoTrackWithSource:source
- trackId:@"ARDAMSv0"];
+ trackId:kARDVideoTrackId];
}
#endif
return localVideoTrack;
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppEngineClient.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppEngineClient.m
index 7d8a185f6d7..0ba32c448a2 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppEngineClient.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDAppEngineClient.m
@@ -10,7 +10,7 @@
#import "ARDAppEngineClient.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "ARDJoinResponse.h"
#import "ARDMessageResponse.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSDPUtils.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSDPUtils.m
index be93ae26f81..3a8a578dc68 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSDPUtils.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSDPUtils.m
@@ -10,8 +10,8 @@
#import "ARDSDPUtils.h"
-#import "webrtc/api/objc/RTCSessionDescription.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
+#import "WebRTC/RTCSessionDescription.h"
@implementation ARDSDPUtils
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.h
index 5cba49559d8..18eafe26736 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.h
@@ -9,8 +9,9 @@
*/
#import <Foundation/Foundation.h>
-#import "webrtc/api/objc/RTCIceCandidate.h"
-#import "webrtc/api/objc/RTCSessionDescription.h"
+
+#import "WebRTC/RTCIceCandidate.h"
+#import "WebRTC/RTCSessionDescription.h"
typedef enum {
kARDSignalingMessageTypeCandidate,
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.m
index 6dd04cc6e02..a5ca29576a0 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDSignalingMessage.m
@@ -10,7 +10,7 @@
#import "ARDSignalingMessage.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "ARDUtilities.h"
#import "RTCIceCandidate+JSON.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDStatsBuilder.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDStatsBuilder.m
index 60cf5516488..1f4ada6237d 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDStatsBuilder.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDStatsBuilder.m
@@ -10,7 +10,7 @@
#import "ARDStatsBuilder.h"
-#import "webrtc/api/objc/RTCStatsReport.h"
+#import "WebRTC/RTCStatsReport.h"
#import "ARDBitrateTracker.h"
#import "ARDUtilities.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDWebSocketChannel.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDWebSocketChannel.m
index 0e753e845b9..87a690ebeb0 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDWebSocketChannel.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ARDWebSocketChannel.m
@@ -10,7 +10,7 @@
#import "ARDWebSocketChannel.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "SRWebSocket.h"
#import "ARDSignalingMessage.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.h
index 0ee6a7872f2..1051f8ee48f 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCIceCandidate.h"
+#import "WebRTC/RTCIceCandidate.h"
@interface RTCIceCandidate (JSON)
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.m
index 773c346c6b6..f20c490682a 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICECandidate+JSON.m
@@ -10,7 +10,7 @@
#import "RTCIceCandidate+JSON.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
static NSString const *kRTCICECandidateTypeKey = @"type";
static NSString const *kRTCICECandidateTypeValue = @"candidate";
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICEServer+JSON.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICEServer+JSON.h
index a6d63e41823..69fb432b446 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICEServer+JSON.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCICEServer+JSON.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCIceServer.h"
+#import "WebRTC/RTCIceServer.h"
@interface RTCIceServer (JSON)
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h
index 88a4c3576cf..74f89a953fb 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCMediaConstraints.h"
+#import "WebRTC/RTCMediaConstraints.h"
@interface RTCMediaConstraints (JSON)
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h
index 2f952eec261..cccff9ad74c 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCSessionDescription.h"
+#import "WebRTC/RTCSessionDescription.h"
@interface RTCSessionDescription (JSON)
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/common/ARDUtilities.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/common/ARDUtilities.m
index ad5768d1f0c..c9d029f629d 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/common/ARDUtilities.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/common/ARDUtilities.m
@@ -12,7 +12,7 @@
#import <mach/mach.h>
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
@implementation NSDictionary (ARDUtilites)
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
index c18c81827cf..8f192623058 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDAppDelegate.m
@@ -10,10 +10,10 @@
#import "ARDAppDelegate.h"
-#import "webrtc/base/objc/RTCFieldTrials.h"
-#import "webrtc/base/objc/RTCLogging.h"
-#import "webrtc/base/objc/RTCSSLAdapter.h"
-#import "webrtc/base/objc/RTCTracing.h"
+#import "WebRTC/RTCFieldTrials.h"
+#import "WebRTC/RTCLogging.h"
+#import "WebRTC/RTCSSLAdapter.h"
+#import "WebRTC/RTCTracing.h"
#import "ARDMainViewController.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.h
index d3a583dfa9a..a783ca1a1c4 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.h
@@ -15,10 +15,10 @@
@protocol ARDMainViewDelegate <NSObject>
- (void)mainView:(ARDMainView *)mainView
- didInputRoom:(NSString *)room
- isLoopback:(BOOL)isLoopback
- isAudioOnly:(BOOL)isAudioOnly
- shouldDelayAudioConfig:(BOOL)shouldDelayAudioConfig;
+ didInputRoom:(NSString *)room
+ isLoopback:(BOOL)isLoopback
+ isAudioOnly:(BOOL)isAudioOnly
+ useManualAudio:(BOOL)useManualAudio;
- (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView;
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
index b2647630e1f..e7e0f940d15 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainView.m
@@ -122,8 +122,8 @@ static CGFloat const kAppLabelHeight = 20;
UILabel *_audioOnlyLabel;
UISwitch *_loopbackSwitch;
UILabel *_loopbackLabel;
- UISwitch *_audioConfigDelaySwitch;
- UILabel *_audioConfigDelayLabel;
+ UISwitch *_useManualAudioSwitch;
+ UILabel *_useManualAudioLabel;
UIButton *_startCallButton;
UIButton *_audioLoopButton;
}
@@ -175,17 +175,17 @@ static CGFloat const kAppLabelHeight = 20;
[_loopbackLabel sizeToFit];
[self addSubview:_loopbackLabel];
- _audioConfigDelaySwitch = [[UISwitch alloc] initWithFrame:CGRectZero];
- [_audioConfigDelaySwitch sizeToFit];
- _audioConfigDelaySwitch.on = YES;
- [self addSubview:_audioConfigDelaySwitch];
+ _useManualAudioSwitch = [[UISwitch alloc] initWithFrame:CGRectZero];
+ [_useManualAudioSwitch sizeToFit];
+ _useManualAudioSwitch.on = YES;
+ [self addSubview:_useManualAudioSwitch];
- _audioConfigDelayLabel = [[UILabel alloc] initWithFrame:CGRectZero];
- _audioConfigDelayLabel.text = @"Delay audio config";
- _audioConfigDelayLabel.font = controlFont;
- _audioConfigDelayLabel.textColor = controlFontColor;
- [_audioConfigDelayLabel sizeToFit];
- [self addSubview:_audioConfigDelayLabel];
+ _useManualAudioLabel = [[UILabel alloc] initWithFrame:CGRectZero];
+ _useManualAudioLabel.text = @"Use manual audio config";
+ _useManualAudioLabel.font = controlFont;
+ _useManualAudioLabel.textColor = controlFontColor;
+ [_useManualAudioLabel sizeToFit];
+ [self addSubview:_useManualAudioLabel];
_startCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
_startCallButton.backgroundColor = [UIColor blueColor];
@@ -275,22 +275,22 @@ static CGFloat const kAppLabelHeight = 20;
_loopbackLabel.center = CGPointMake(loopbackModeLabelCenterX,
CGRectGetMidY(loopbackModeRect));
- CGFloat audioConfigDelayTop =
+ CGFloat useManualAudioTop =
CGRectGetMaxY(_loopbackSwitch.frame) + kCallControlMargin;
- CGRect audioConfigDelayRect =
+ CGRect useManualAudioRect =
CGRectMake(kCallControlMargin * 3,
- audioConfigDelayTop,
- _audioConfigDelaySwitch.frame.size.width,
- _audioConfigDelaySwitch.frame.size.height);
- _audioConfigDelaySwitch.frame = audioConfigDelayRect;
- CGFloat audioConfigDelayLabelCenterX = CGRectGetMaxX(audioConfigDelayRect) +
- kCallControlMargin + _audioConfigDelayLabel.frame.size.width / 2;
- _audioConfigDelayLabel.center =
- CGPointMake(audioConfigDelayLabelCenterX,
- CGRectGetMidY(audioConfigDelayRect));
+ useManualAudioTop,
+ _useManualAudioSwitch.frame.size.width,
+ _useManualAudioSwitch.frame.size.height);
+ _useManualAudioSwitch.frame = useManualAudioRect;
+ CGFloat useManualAudioLabelCenterX = CGRectGetMaxX(useManualAudioRect) +
+ kCallControlMargin + _useManualAudioLabel.frame.size.width / 2;
+ _useManualAudioLabel.center =
+ CGPointMake(useManualAudioLabelCenterX,
+ CGRectGetMidY(useManualAudioRect));
CGFloat audioLoopTop =
- CGRectGetMaxY(audioConfigDelayRect) + kCallControlMargin * 3;
+ CGRectGetMaxY(useManualAudioRect) + kCallControlMargin * 3;
_audioLoopButton.frame = CGRectMake(kCallControlMargin,
audioLoopTop,
_audioLoopButton.frame.size.width,
@@ -335,7 +335,7 @@ static CGFloat const kAppLabelHeight = 20;
didInputRoom:room
isLoopback:_loopbackSwitch.isOn
isAudioOnly:_audioOnlySwitch.isOn
- shouldDelayAudioConfig:_audioConfigDelaySwitch.isOn];
+ useManualAudio:_useManualAudioSwitch.isOn];
}
@end
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m
index a634952c297..a684ba32b06 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDMainViewController.m
@@ -12,8 +12,8 @@
#import <AVFoundation/AVFoundation.h>
-#import "webrtc/base/objc/RTCDispatcher.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCDispatcher.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
@@ -23,13 +23,14 @@
@interface ARDMainViewController () <
ARDMainViewDelegate,
+ ARDVideoCallViewControllerDelegate,
RTCAudioSessionDelegate>
@end
@implementation ARDMainViewController {
ARDMainView *_mainView;
AVAudioPlayer *_audioPlayer;
- BOOL _shouldDelayAudioConfig;
+ BOOL _useManualAudio;
}
- (void)loadView {
@@ -37,17 +38,26 @@
_mainView.delegate = self;
self.view = _mainView;
- [self setupAudioSession];
+ RTCAudioSessionConfiguration *webRTCConfig =
+ [RTCAudioSessionConfiguration webRTCConfiguration];
+ webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
+ AVAudioSessionCategoryOptionDefaultToSpeaker;
+ [RTCAudioSessionConfiguration setWebRTCConfiguration:webRTCConfig];
+
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ [session addDelegate:self];
+
+ [self configureAudioSession];
[self setupAudioPlayer];
}
#pragma mark - ARDMainViewDelegate
- (void)mainView:(ARDMainView *)mainView
- didInputRoom:(NSString *)room
- isLoopback:(BOOL)isLoopback
- isAudioOnly:(BOOL)isAudioOnly
- shouldDelayAudioConfig:(BOOL)shouldDelayAudioConfig {
+ didInputRoom:(NSString *)room
+ isLoopback:(BOOL)isLoopback
+ isAudioOnly:(BOOL)isAudioOnly
+ useManualAudio:(BOOL)useManualAudio {
if (!room.length) {
[self showAlertWithMessage:@"Missing room name."];
return;
@@ -77,15 +87,16 @@
return;
}
- _shouldDelayAudioConfig = shouldDelayAudioConfig;
RTCAudioSession *session = [RTCAudioSession sharedInstance];
- session.shouldDelayAudioConfiguration = _shouldDelayAudioConfig;
+ session.useManualAudio = useManualAudio;
+ session.isAudioEnabled = NO;
// Kick off the video call.
ARDVideoCallViewController *videoCallViewController =
[[ARDVideoCallViewController alloc] initForRoom:trimmedRoom
isLoopback:isLoopback
- isAudioOnly:isAudioOnly];
+ isAudioOnly:isAudioOnly
+ delegate:self];
videoCallViewController.modalTransitionStyle =
UIModalTransitionStyleCrossDissolve;
[self presentViewController:videoCallViewController
@@ -102,10 +113,22 @@
mainView.isAudioLoopPlaying = _audioPlayer.playing;
}
+#pragma mark - ARDVideoCallViewControllerDelegate
+
+- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController {
+ if (![viewController isBeingDismissed]) {
+ RTCLog(@"Dismissing VC");
+ [self dismissViewControllerAnimated:YES completion:^{
+ [self restartAudioPlayerIfNeeded];
+ }];
+ }
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ session.isAudioEnabled = NO;
+}
+
#pragma mark - RTCAudioSessionDelegate
-- (void)audioSessionShouldConfigure:(RTCAudioSession *)session {
- // Won't get called unless audio config is delayed.
+- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
// Stop playback on main queue and then configure WebRTC.
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
@@ -113,35 +136,23 @@
RTCLog(@"Stopping audio loop due to WebRTC start.");
[_audioPlayer stop];
}
- // TODO(tkchin): Shouldn't lock on main queue. Figure out better way to
- // check audio loop state.
- [session lockForConfiguration];
- [session configureWebRTCSession:nil];
- [session unlockForConfiguration];
+ RTCLog(@"Setting isAudioEnabled to YES.");
+ session.isAudioEnabled = YES;
}];
}
-- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session {
- // Won't get called unless audio config is delayed.
- [session lockForConfiguration];
- [session unconfigureWebRTCSession:nil];
- [session unlockForConfiguration];
-}
-
-- (void)audioSessionDidUnconfigure:(RTCAudioSession *)session {
+- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
// WebRTC is done with the audio session. Restart playback.
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
- if (_mainView.isAudioLoopPlaying) {
- RTCLog(@"Starting audio loop due to WebRTC end.");
- [_audioPlayer play];
- }
+ RTCLog(@"audioSessionDidStopPlayOrRecord");
+ [self restartAudioPlayerIfNeeded];
}];
}
#pragma mark - Private
-- (void)setupAudioSession {
+- (void)configureAudioSession {
RTCAudioSessionConfiguration *configuration =
[[RTCAudioSessionConfiguration alloc] init];
configuration.category = AVAudioSessionCategoryAmbient;
@@ -149,10 +160,17 @@
configuration.mode = AVAudioSessionModeDefault;
RTCAudioSession *session = [RTCAudioSession sharedInstance];
- [session addDelegate:self];
[session lockForConfiguration];
+ BOOL hasSucceeded = NO;
NSError *error = nil;
- if (![session setConfiguration:configuration active:YES error:&error]) {
+ if (session.isActive) {
+ hasSucceeded = [session setConfiguration:configuration error:&error];
+ } else {
+ hasSucceeded = [session setConfiguration:configuration
+ active:YES
+ error:&error];
+ }
+ if (!hasSucceeded) {
RTCLogError(@"Error setting configuration: %@", error.localizedDescription);
}
[session unlockForConfiguration];
@@ -169,6 +187,14 @@
[_audioPlayer prepareToPlay];
}
+- (void)restartAudioPlayerIfNeeded {
+ if (_mainView.isAudioLoopPlaying && !self.presentedViewController) {
+ RTCLog(@"Starting audio loop due to WebRTC end.");
+ [self configureAudioSession];
+ [_audioPlayer play];
+ }
+}
+
- (void)showAlertWithMessage:(NSString*)message {
UIAlertView* alertView = [[UIAlertView alloc] initWithTitle:nil
message:message
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDStatsView.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDStatsView.m
index be0bdd5b3c4..f4522423b9f 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDStatsView.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDStatsView.m
@@ -10,7 +10,7 @@
#import "ARDStatsView.h"
-#import "webrtc/api/objc/RTCStatsReport.h"
+#import "WebRTC/RTCStatsReport.h"
#import "ARDStatsBuilder.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
index 980abd4520f..dec1bfcba06 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallView.h
@@ -10,8 +10,8 @@
#import <UIKit/UIKit.h>
-#import "webrtc/api/objc/RTCEAGLVideoView.h"
-#import "webrtc/base/objc/RTCCameraPreviewView.h"
+#import "WebRTC/RTCCameraPreviewView.h"
+#import "WebRTC/RTCEAGLVideoView.h"
#import "ARDStatsView.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h
index 7dee2d8537b..f3a95548209 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h
@@ -10,10 +10,20 @@
#import <UIKit/UIKit.h>
+@class ARDVideoCallViewController;
+@protocol ARDVideoCallViewControllerDelegate <NSObject>
+
+- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController;
+
+@end
+
@interface ARDVideoCallViewController : UIViewController
+@property(nonatomic, weak) id<ARDVideoCallViewControllerDelegate> delegate;
+
- (instancetype)initForRoom:(NSString *)room
isLoopback:(BOOL)isLoopback
- isAudioOnly:(BOOL)isAudioOnly;
+ isAudioOnly:(BOOL)isAudioOnly
+ delegate:(id<ARDVideoCallViewControllerDelegate>)delegate;
@end
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
index b8786773038..d35e1e6b2d0 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
@@ -10,11 +10,11 @@
#import "ARDVideoCallViewController.h"
-#import "webrtc/base/objc/RTCDispatcher.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
-#import "webrtc/api/objc/RTCAVFoundationVideoSource.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCAVFoundationVideoSource.h"
+#import "WebRTC/RTCDispatcher.h"
+#import "WebRTC/RTCLogging.h"
#import "ARDAppClient.h"
#import "ARDVideoCallView.h"
@@ -34,11 +34,14 @@
}
@synthesize videoCallView = _videoCallView;
+@synthesize delegate = _delegate;
- (instancetype)initForRoom:(NSString *)room
isLoopback:(BOOL)isLoopback
- isAudioOnly:(BOOL)isAudioOnly {
+ isAudioOnly:(BOOL)isAudioOnly
+ delegate:(id<ARDVideoCallViewControllerDelegate>)delegate {
if (self = [super init]) {
+ _delegate = delegate;
_client = [[ARDAppClient alloc] initWithDelegate:self];
[_client connectToRoomWithId:room
isLoopback:isLoopback
@@ -177,10 +180,7 @@
self.remoteVideoTrack = nil;
self.localVideoTrack = nil;
[_client disconnect];
- if (![self isBeingDismissed]) {
- [self.presentingViewController dismissViewControllerAnimated:YES
- completion:nil];
- }
+ [_delegate viewControllerDidFinish:self];
}
- (void)switchCamera {
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m
index f0b2a04b5f3..e79519acc29 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m
@@ -14,7 +14,7 @@
#import "APPRTCAppDelegate.h"
-#import "webrtc/base/objc/RTCSSLAdapter.h"
+#import "WebRTC/RTCSSLAdapter.h"
#import "APPRTCViewController.h"
diff --git a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCViewController.m b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
index db7b19c0e52..4d58d2ad347 100644
--- a/chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
+++ b/chromium/third_party/webrtc/examples/objc/AppRTCDemo/mac/APPRTCViewController.m
@@ -11,8 +11,9 @@
#import "APPRTCViewController.h"
#import <AVFoundation/AVFoundation.h>
-#import "webrtc/api/objc/RTCNSGLVideoView.h"
-#import "webrtc/api/objc/RTCVideoTrack.h"
+
+#import "WebRTC/RTCNSGLVideoView.h"
+#import "WebRTC/RTCVideoTrack.h"
#import "ARDAppClient.h"
diff --git a/chromium/third_party/webrtc/examples/peerconnection/client/conductor.cc b/chromium/third_party/webrtc/examples/peerconnection/client/conductor.cc
index e26d403248d..8ec6ed9c102 100644
--- a/chromium/third_party/webrtc/examples/peerconnection/client/conductor.cc
+++ b/chromium/third_party/webrtc/examples/peerconnection/client/conductor.cc
@@ -10,6 +10,7 @@
#include "webrtc/examples/peerconnection/client/conductor.h"
+#include <memory>
#include <utility>
#include <vector>
@@ -308,7 +309,7 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) {
return;
}
webrtc::SdpParseError error;
- rtc::scoped_ptr<webrtc::IceCandidateInterface> candidate(
+ std::unique_ptr<webrtc::IceCandidateInterface> candidate(
webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, sdp, &error));
if (!candidate.get()) {
LOG(WARNING) << "Can't parse received candidate message. "
diff --git a/chromium/third_party/webrtc/examples/peerconnection/client/conductor.h b/chromium/third_party/webrtc/examples/peerconnection/client/conductor.h
index db2f77b6464..02351b78be1 100644
--- a/chromium/third_party/webrtc/examples/peerconnection/client/conductor.h
+++ b/chromium/third_party/webrtc/examples/peerconnection/client/conductor.h
@@ -21,7 +21,6 @@
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/examples/peerconnection/client/main_wnd.h"
#include "webrtc/examples/peerconnection/client/peer_connection_client.h"
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
class VideoCaptureModule;
diff --git a/chromium/third_party/webrtc/examples/peerconnection/client/linux/main_wnd.h b/chromium/third_party/webrtc/examples/peerconnection/client/linux/main_wnd.h
index e756fc98c4a..3c718577d10 100644
--- a/chromium/third_party/webrtc/examples/peerconnection/client/linux/main_wnd.h
+++ b/chromium/third_party/webrtc/examples/peerconnection/client/linux/main_wnd.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
#define WEBRTC_EXAMPLES_PEERCONNECTION_CLIENT_LINUX_MAIN_WND_H_
+#include <memory>
#include <string>
#include "webrtc/examples/peerconnection/client/main_wnd.h"
@@ -92,7 +93,7 @@ class GtkMainWnd : public MainWindow {
protected:
void SetSize(int width, int height);
- rtc::scoped_ptr<uint8_t[]> image_;
+ std::unique_ptr<uint8_t[]> image_;
int width_;
int height_;
GtkMainWnd* main_wnd_;
@@ -111,9 +112,9 @@ class GtkMainWnd : public MainWindow {
std::string port_;
bool autoconnect_;
bool autocall_;
- rtc::scoped_ptr<VideoRenderer> local_renderer_;
- rtc::scoped_ptr<VideoRenderer> remote_renderer_;
- rtc::scoped_ptr<uint8_t[]> draw_buffer_;
+ std::unique_ptr<VideoRenderer> local_renderer_;
+ std::unique_ptr<VideoRenderer> remote_renderer_;
+ std::unique_ptr<uint8_t[]> draw_buffer_;
int draw_buffer_size_;
};
diff --git a/chromium/third_party/webrtc/examples/peerconnection/client/main_wnd.h b/chromium/third_party/webrtc/examples/peerconnection/client/main_wnd.h
index 80db2a5adcf..03db80d0b6d 100644
--- a/chromium/third_party/webrtc/examples/peerconnection/client/main_wnd.h
+++ b/chromium/third_party/webrtc/examples/peerconnection/client/main_wnd.h
@@ -13,6 +13,7 @@
#pragma once
#include <map>
+#include <memory>
#include <string>
#include "webrtc/api/mediastreaminterface.h"
@@ -131,7 +132,7 @@ class MainWnd : public MainWindow {
HWND wnd_;
BITMAPINFO bmi_;
- rtc::scoped_ptr<uint8_t[]> image_;
+ std::unique_ptr<uint8_t[]> image_;
CRITICAL_SECTION buffer_lock_;
rtc::scoped_refptr<webrtc::VideoTrackInterface> rendered_track_;
};
@@ -176,8 +177,8 @@ class MainWnd : public MainWindow {
void HandleTabbing();
private:
- rtc::scoped_ptr<VideoRenderer> local_renderer_;
- rtc::scoped_ptr<VideoRenderer> remote_renderer_;
+ std::unique_ptr<VideoRenderer> local_renderer_;
+ std::unique_ptr<VideoRenderer> remote_renderer_;
UI ui_;
HWND wnd_;
DWORD ui_thread_id_;
diff --git a/chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.h b/chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.h
index b7abfdfe18e..dbf2d8ff3d2 100644
--- a/chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.h
+++ b/chromium/third_party/webrtc/examples/peerconnection/client/peer_connection_client.h
@@ -13,11 +13,11 @@
#pragma once
#include <map>
+#include <memory>
#include <string>
#include "webrtc/base/nethelpers.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/signalthread.h"
#include "webrtc/base/sigslot.h"
@@ -109,8 +109,8 @@ class PeerConnectionClient : public sigslot::has_slots<>,
PeerConnectionClientObserver* callback_;
rtc::SocketAddress server_address_;
rtc::AsyncResolver* resolver_;
- rtc::scoped_ptr<rtc::AsyncSocket> control_socket_;
- rtc::scoped_ptr<rtc::AsyncSocket> hanging_get_;
+ std::unique_ptr<rtc::AsyncSocket> control_socket_;
+ std::unique_ptr<rtc::AsyncSocket> hanging_get_;
std::string onconnect_data_;
std::string control_data_;
std::string notification_data_;
diff --git a/chromium/third_party/webrtc/examples/relayserver/relayserver_main.cc b/chromium/third_party/webrtc/examples/relayserver/relayserver_main.cc
index 31f43c4d15a..c14ea459c86 100644
--- a/chromium/third_party/webrtc/examples/relayserver/relayserver_main.cc
+++ b/chromium/third_party/webrtc/examples/relayserver/relayserver_main.cc
@@ -9,9 +9,9 @@
*/
#include <iostream> // NOLINT
+#include <memory>
#include "webrtc/p2p/base/relayserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread.h"
int main(int argc, char **argv) {
@@ -35,7 +35,7 @@ int main(int argc, char **argv) {
rtc::Thread *pthMain = rtc::Thread::Current();
- rtc::scoped_ptr<rtc::AsyncUDPSocket> int_socket(
+ std::unique_ptr<rtc::AsyncUDPSocket> int_socket(
rtc::AsyncUDPSocket::Create(pthMain->socketserver(), int_addr));
if (!int_socket) {
std::cerr << "Failed to create a UDP socket bound at"
@@ -43,7 +43,7 @@ int main(int argc, char **argv) {
return 1;
}
- rtc::scoped_ptr<rtc::AsyncUDPSocket> ext_socket(
+ std::unique_ptr<rtc::AsyncUDPSocket> ext_socket(
rtc::AsyncUDPSocket::Create(pthMain->socketserver(), ext_addr));
if (!ext_socket) {
std::cerr << "Failed to create a UDP socket bound at"
diff --git a/chromium/third_party/webrtc/libjingle/xmllite/xmlbuilder.h b/chromium/third_party/webrtc/libjingle/xmllite/xmlbuilder.h
index 354bf0b70a5..3b48f282479 100644
--- a/chromium/third_party/webrtc/libjingle/xmllite/xmlbuilder.h
+++ b/chromium/third_party/webrtc/libjingle/xmllite/xmlbuilder.h
@@ -11,10 +11,10 @@
#ifndef _xmlbuilder_h_
#define _xmlbuilder_h_
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/libjingle/xmllite/xmlparser.h"
-#include "webrtc/base/scoped_ptr.h"
#ifdef EXPAT_RELATIVE_PATH
#include "expat.h"
@@ -52,8 +52,8 @@ public:
private:
XmlElement * pelCurrent_;
- rtc::scoped_ptr<XmlElement> pelRoot_;
- rtc::scoped_ptr<std::vector<XmlElement*> > pvParents_;
+ std::unique_ptr<XmlElement> pelRoot_;
+ std::unique_ptr<std::vector<XmlElement*> > pvParents_;
};
}
diff --git a/chromium/third_party/webrtc/libjingle/xmllite/xmlelement.h b/chromium/third_party/webrtc/libjingle/xmllite/xmlelement.h
index 70c6f799230..37adfc01707 100644
--- a/chromium/third_party/webrtc/libjingle/xmllite/xmlelement.h
+++ b/chromium/third_party/webrtc/libjingle/xmllite/xmlelement.h
@@ -15,7 +15,6 @@
#include <string>
#include "webrtc/libjingle/xmllite/qname.h"
-#include "webrtc/base/scoped_ptr.h"
namespace buzz {
diff --git a/chromium/third_party/webrtc/libjingle/xmllite/xmlnsstack.h b/chromium/third_party/webrtc/libjingle/xmllite/xmlnsstack.h
index 174f8aefc05..64e2f6ee66f 100644
--- a/chromium/third_party/webrtc/libjingle/xmllite/xmlnsstack.h
+++ b/chromium/third_party/webrtc/libjingle/xmllite/xmlnsstack.h
@@ -11,10 +11,10 @@
#ifndef WEBRTC_LIBJINGLE_XMLLITE_XMLNSSTACK_H_
#define WEBRTC_LIBJINGLE_XMLLITE_XMLNSSTACK_H_
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/libjingle/xmllite/qname.h"
-#include "webrtc/base/scoped_ptr.h"
namespace buzz {
@@ -37,8 +37,8 @@ public:
private:
- rtc::scoped_ptr<std::vector<std::string> > pxmlnsStack_;
- rtc::scoped_ptr<std::vector<size_t> > pxmlnsDepthStack_;
+ std::unique_ptr<std::vector<std::string> > pxmlnsStack_;
+ std::unique_ptr<std::vector<size_t> > pxmlnsDepthStack_;
};
}
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/chatroommoduleimpl.cc b/chromium/third_party/webrtc/libjingle/xmpp/chatroommoduleimpl.cc
index 52fba4cbae5..4feb5bc9b8c 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/chatroommoduleimpl.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/chatroommoduleimpl.cc
@@ -11,6 +11,7 @@
#include <algorithm>
#include <iostream>
#include <map>
+#include <memory>
#include <sstream>
#include <string>
#include <vector>
@@ -105,7 +106,7 @@ public:
const XmppPresence* presence() const;
private:
- rtc::scoped_ptr<XmppPresence> presence_;
+ std::unique_ptr<XmppPresence> presence_;
};
class XmppChatroomMemberEnumeratorImpl :
@@ -430,7 +431,7 @@ void
XmppChatroomModuleImpl::FireEnteredStatus(const XmlElement* presence,
XmppChatroomEnteredStatus status) {
if (chatroom_handler_) {
- rtc::scoped_ptr<XmppPresence> xmpp_presence(XmppPresence::Create());
+ std::unique_ptr<XmppPresence> xmpp_presence(XmppPresence::Create());
xmpp_presence->set_raw_xml(presence);
chatroom_handler_->ChatroomEnteredStatus(this, xmpp_presence.get(), status);
}
@@ -472,7 +473,7 @@ XmppReturnStatus
XmppChatroomModuleImpl::ServerChangedOtherPresence(const XmlElement&
presence_element) {
XmppReturnStatus xmpp_status = XMPP_RETURN_OK;
- rtc::scoped_ptr<XmppPresence> presence(XmppPresence::Create());
+ std::unique_ptr<XmppPresence> presence(XmppPresence::Create());
IFR(presence->set_raw_xml(&presence_element));
JidMemberMap::iterator pos = chatroom_jid_members_.find(presence->jid());
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/discoitemsquerytask.cc b/chromium/third_party/webrtc/libjingle/xmpp/discoitemsquerytask.cc
index 765ee143978..10f6e1d7452 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/discoitemsquerytask.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/discoitemsquerytask.cc
@@ -11,7 +11,6 @@
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/libjingle/xmpp/discoitemsquerytask.h"
#include "webrtc/libjingle/xmpp/xmpptask.h"
-#include "webrtc/base/scoped_ptr.h"
namespace buzz {
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/fakexmppclient.h b/chromium/third_party/webrtc/libjingle/xmpp/fakexmppclient.h
index 453a7c86f16..63c216caf21 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/fakexmppclient.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/fakexmppclient.h
@@ -13,6 +13,7 @@
#ifndef WEBRTC_LIBJINGLE_XMPP_FAKEXMPPCLIENT_H_
#define WEBRTC_LIBJINGLE_XMPP_FAKEXMPPCLIENT_H_
+#include <algorithm>
#include <string>
#include <vector>
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient.h b/chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient.h
index 2586768e2bd..fecc727604f 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient.h
@@ -12,13 +12,13 @@
#define WEBRTC_LIBJINGLE_XMPP_HANGOUTPUBSUBCLIENT_H_
#include <map>
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/libjingle/xmpp/jid.h"
#include "webrtc/libjingle/xmpp/pubsubclient.h"
#include "webrtc/libjingle/xmpp/pubsubstateclient.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/sigslotrepeater.h"
@@ -163,14 +163,14 @@ class HangoutPubSubClient : public sigslot::has_slots<> {
const XmlElement* stanza);
Jid mucjid_;
std::string nick_;
- rtc::scoped_ptr<PubSubClient> media_client_;
- rtc::scoped_ptr<PubSubClient> presenter_client_;
- rtc::scoped_ptr<PubSubStateClient<bool> > presenter_state_client_;
- rtc::scoped_ptr<PubSubStateClient<bool> > audio_mute_state_client_;
- rtc::scoped_ptr<PubSubStateClient<bool> > video_mute_state_client_;
- rtc::scoped_ptr<PubSubStateClient<bool> > video_pause_state_client_;
- rtc::scoped_ptr<PubSubStateClient<bool> > recording_state_client_;
- rtc::scoped_ptr<PubSubStateClient<bool> > media_block_state_client_;
+ std::unique_ptr<PubSubClient> media_client_;
+ std::unique_ptr<PubSubClient> presenter_client_;
+ std::unique_ptr<PubSubStateClient<bool> > presenter_state_client_;
+ std::unique_ptr<PubSubStateClient<bool> > audio_mute_state_client_;
+ std::unique_ptr<PubSubStateClient<bool> > video_mute_state_client_;
+ std::unique_ptr<PubSubStateClient<bool> > video_pause_state_client_;
+ std::unique_ptr<PubSubStateClient<bool> > recording_state_client_;
+ std::unique_ptr<PubSubStateClient<bool> > media_block_state_client_;
};
} // namespace buzz
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient_unittest.cc b/chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient_unittest.cc
index 7c6ea58f2d2..dd706b6a9a8 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient_unittest.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/hangoutpubsubclient_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/libjingle/xmllite/qname.h"
@@ -228,11 +229,11 @@ class HangoutPubSubClientTest : public testing::Test {
listener.get(), &TestHangoutPubSubListener::OnMediaBlockError);
}
- rtc::scoped_ptr<rtc::FakeTaskRunner> runner;
+ std::unique_ptr<rtc::FakeTaskRunner> runner;
// xmpp_client deleted by deleting runner.
buzz::FakeXmppClient* xmpp_client;
- rtc::scoped_ptr<buzz::HangoutPubSubClient> client;
- rtc::scoped_ptr<TestHangoutPubSubListener> listener;
+ std::unique_ptr<buzz::HangoutPubSubClient> client;
+ std::unique_ptr<TestHangoutPubSubListener> listener;
buzz::Jid pubsubjid;
std::string nick;
};
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/iqtask.h b/chromium/third_party/webrtc/libjingle/xmpp/iqtask.h
index 1d50c383947..0db00711e86 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/iqtask.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/iqtask.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_LIBJINGLE_XMPP_IQTASK_H_
#define WEBRTC_LIBJINGLE_XMPP_IQTASK_H_
+#include <memory>
#include <string>
#include "webrtc/libjingle/xmpp/xmppengine.h"
@@ -40,7 +41,7 @@ class IqTask : public XmppTask {
virtual int OnTimeout();
Jid to_;
- rtc::scoped_ptr<XmlElement> stanza_;
+ std::unique_ptr<XmlElement> stanza_;
};
} // namespace buzz
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/jingleinfotask.cc b/chromium/third_party/webrtc/libjingle/xmpp/jingleinfotask.cc
index a5a07121bd1..5599edc0eb3 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/jingleinfotask.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/jingleinfotask.cc
@@ -10,6 +10,8 @@
#include "webrtc/libjingle/xmpp/jingleinfotask.h"
+#include <memory>
+
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/libjingle/xmpp/xmppclient.h"
#include "webrtc/libjingle/xmpp/xmpptask.h"
@@ -24,7 +26,7 @@ class JingleInfoTask::JingleInfoGetTask : public XmppTask {
done_(false) {}
virtual int ProcessStart() {
- rtc::scoped_ptr<XmlElement> get(
+ std::unique_ptr<XmlElement> get(
MakeIq(STR_GET, Jid(), task_id()));
get->AddElement(new XmlElement(QN_JINGLE_INFO_QUERY, true));
if (SendStanza(get.get()) != XMPP_RETURN_OK) {
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/mucroomconfigtask.cc b/chromium/third_party/webrtc/libjingle/xmpp/mucroomconfigtask.cc
index 08b10650a96..5938a2ca722 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/mucroomconfigtask.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/mucroomconfigtask.cc
@@ -14,7 +14,6 @@
#include "webrtc/libjingle/xmpp/mucroomconfigtask.h"
#include "webrtc/libjingle/xmpp/constants.h"
-#include "webrtc/base/scoped_ptr.h"
namespace buzz {
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/mucroomlookuptask.cc b/chromium/third_party/webrtc/libjingle/xmpp/mucroomlookuptask.cc
index 8c0a4d78561..a52491c9184 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/mucroomlookuptask.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/mucroomlookuptask.cc
@@ -12,7 +12,6 @@
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
namespace buzz {
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/pingtask.cc b/chromium/third_party/webrtc/libjingle/xmpp/pingtask.cc
index 479dc23ff5c..ad4566a51c3 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/pingtask.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/pingtask.cc
@@ -10,9 +10,10 @@
#include "webrtc/libjingle/xmpp/pingtask.h"
+#include <memory>
+
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
namespace buzz {
@@ -56,7 +57,7 @@ int PingTask::ProcessStart() {
ping_response_deadline_ = 0;
}
- uint32_t now = rtc::Time();
+ int64_t now = rtc::TimeMillis();
// If the ping timed out, signal.
if (ping_response_deadline_ != 0 && now >= ping_response_deadline_) {
@@ -66,7 +67,7 @@ int PingTask::ProcessStart() {
// Send a ping if it's time.
if (now >= next_ping_time_) {
- rtc::scoped_ptr<buzz::XmlElement> stanza(
+ std::unique_ptr<buzz::XmlElement> stanza(
MakeIq(buzz::STR_GET, Jid(STR_EMPTY), task_id()));
stanza->AddElement(new buzz::XmlElement(QN_PING));
SendStanza(stanza.get());
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/pingtask.h b/chromium/third_party/webrtc/libjingle/xmpp/pingtask.h
index 22fd94d7217..b070a1ccf1e 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/pingtask.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/pingtask.h
@@ -46,8 +46,8 @@ class PingTask : public buzz::XmppTask, private rtc::MessageHandler {
rtc::MessageQueue* message_queue_;
uint32_t ping_period_millis_;
uint32_t ping_timeout_millis_;
- uint32_t next_ping_time_;
- uint32_t ping_response_deadline_; // 0 if the response has been received
+ int64_t next_ping_time_;
+ int64_t ping_response_deadline_; // 0 if the response has been received
};
} // namespace buzz
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/pubsub_task.cc b/chromium/third_party/webrtc/libjingle/xmpp/pubsub_task.cc
index f30c0518144..812cb95832c 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/pubsub_task.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/pubsub_task.cc
@@ -11,6 +11,7 @@
#include "webrtc/libjingle/xmpp/pubsub_task.h"
#include <map>
+#include <memory>
#include <string>
#include "webrtc/libjingle/xmpp/constants.h"
@@ -82,7 +83,7 @@ int PubsubTask::ProcessResponse() {
bool PubsubTask::SubscribeToNode(const std::string& pubsub_node,
NodeHandler handler) {
subscribed_nodes_[pubsub_node] = handler;
- rtc::scoped_ptr<buzz::XmlElement> get_iq_request(
+ std::unique_ptr<buzz::XmlElement> get_iq_request(
MakeIq(buzz::STR_GET, pubsub_node_jid_, task_id()));
if (!get_iq_request) {
return false;
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/pubsubclient_unittest.cc b/chromium/third_party/webrtc/libjingle/xmpp/pubsubclient_unittest.cc
index 3815ef8a509..ab4c853e565 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/pubsubclient_unittest.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/pubsubclient_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/libjingle/xmllite/qname.h"
@@ -103,11 +104,11 @@ class PubSubClientTest : public testing::Test {
listener.get(), &TestPubSubItemsListener::OnRetractError);
}
- rtc::scoped_ptr<rtc::FakeTaskRunner> runner;
+ std::unique_ptr<rtc::FakeTaskRunner> runner;
// xmpp_client deleted by deleting runner.
buzz::FakeXmppClient* xmpp_client;
- rtc::scoped_ptr<buzz::PubSubClient> client;
- rtc::scoped_ptr<TestPubSubItemsListener> listener;
+ std::unique_ptr<buzz::PubSubClient> client;
+ std::unique_ptr<TestPubSubItemsListener> listener;
buzz::Jid pubsubjid;
std::string node;
std::string itemid;
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/pubsubstateclient.h b/chromium/third_party/webrtc/libjingle/xmpp/pubsubstateclient.h
index ffb794af4c0..07aa26dbad5 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/pubsubstateclient.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/pubsubstateclient.h
@@ -12,6 +12,7 @@
#define WEBRTC_LIBJINGLE_XMPP_PUBSUBSTATECLIENT_H_
#include <map>
+#include <memory>
#include <string>
#include <vector>
@@ -20,7 +21,7 @@
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/libjingle/xmpp/jid.h"
#include "webrtc/libjingle/xmpp/pubsubclient.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/sigslotrepeater.h"
@@ -256,8 +257,8 @@ class PubSubStateClient : public sigslot::has_slots<> {
PubSubClient* client_;
const QName state_name_;
C default_state_;
- rtc::scoped_ptr<PubSubStateKeySerializer> key_serializer_;
- rtc::scoped_ptr<PubSubStateSerializer<C> > state_serializer_;
+ std::unique_ptr<PubSubStateKeySerializer> key_serializer_;
+ std::unique_ptr<PubSubStateSerializer<C> > state_serializer_;
// key => state
std::map<std::string, C> state_by_key_;
// itemid => StateItemInfo
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/pubsubtasks_unittest.cc b/chromium/third_party/webrtc/libjingle/xmpp/pubsubtasks_unittest.cc
index 8062e58e18f..79e656bb0bb 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/pubsubtasks_unittest.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/pubsubtasks_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/libjingle/xmllite/qname.h"
@@ -80,10 +81,10 @@ class PubSubTasksTest : public testing::Test {
listener.reset(new TestPubSubTasksListener());
}
- rtc::scoped_ptr<rtc::FakeTaskRunner> runner;
+ std::unique_ptr<rtc::FakeTaskRunner> runner;
// Client deleted by deleting runner.
buzz::FakeXmppClient* client;
- rtc::scoped_ptr<TestPubSubTasksListener> listener;
+ std::unique_ptr<TestPubSubTasksListener> listener;
buzz::Jid pubsubjid;
std::string node;
std::string itemid;
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/rostermodule_unittest.cc b/chromium/third_party/webrtc/libjingle/xmpp/rostermodule_unittest.cc
index 1ae6c226a57..18f20253572 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/rostermodule_unittest.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/rostermodule_unittest.cc
@@ -9,6 +9,7 @@
*/
#include <iostream>
+#include <memory>
#include <sstream>
#include <string>
@@ -18,7 +19,6 @@
#include "webrtc/libjingle/xmpp/util_unittest.h"
#include "webrtc/libjingle/xmpp/xmppengine.h"
#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
#define TEST_OK(x) EXPECT_EQ((x),XMPP_RETURN_OK)
#define TEST_BADARGUMENT(x) EXPECT_EQ((x),XMPP_RETURN_BADARGUMENT)
@@ -250,7 +250,7 @@ TEST_F(RosterModuleTest, TestPresence) {
status->AddAttr(QN_STATUS, STR_PSTN_CONFERENCE_STATUS_CONNECTING);
XmlElement presence_xml(QN_PRESENCE);
presence_xml.AddElement(status);
- rtc::scoped_ptr<XmppPresence> presence(XmppPresence::Create());
+ std::unique_ptr<XmppPresence> presence(XmppPresence::Create());
presence->set_raw_xml(&presence_xml);
EXPECT_EQ(presence->connection_status(), XMPP_CONNECTION_STATUS_CONNECTING);
}
@@ -258,11 +258,11 @@ TEST_F(RosterModuleTest, TestPresence) {
TEST_F(RosterModuleTest, TestOutgoingPresence) {
std::stringstream dump;
- rtc::scoped_ptr<XmppEngine> engine(XmppEngine::Create());
+ std::unique_ptr<XmppEngine> engine(XmppEngine::Create());
XmppTestHandler handler(engine.get());
XmppTestRosterHandler roster_handler;
- rtc::scoped_ptr<XmppRosterModule> roster(XmppRosterModule::Create());
+ std::unique_ptr<XmppRosterModule> roster(XmppRosterModule::Create());
roster->set_roster_handler(&roster_handler);
// Configure the roster module
@@ -364,7 +364,7 @@ TEST_F(RosterModuleTest, TestOutgoingPresence) {
EXPECT_EQ(handler.SessionActivity(), "");
// Construct a directed presence
- rtc::scoped_ptr<XmppPresence> directed_presence(XmppPresence::Create());
+ std::unique_ptr<XmppPresence> directed_presence(XmppPresence::Create());
TEST_OK(directed_presence->set_available(XMPP_PRESENCE_AVAILABLE));
TEST_OK(directed_presence->set_priority(120));
TEST_OK(directed_presence->set_status("*very* available"));
@@ -381,11 +381,11 @@ TEST_F(RosterModuleTest, TestOutgoingPresence) {
}
TEST_F(RosterModuleTest, TestIncomingPresence) {
- rtc::scoped_ptr<XmppEngine> engine(XmppEngine::Create());
+ std::unique_ptr<XmppEngine> engine(XmppEngine::Create());
XmppTestHandler handler(engine.get());
XmppTestRosterHandler roster_handler;
- rtc::scoped_ptr<XmppRosterModule> roster(XmppRosterModule::Create());
+ std::unique_ptr<XmppRosterModule> roster(XmppRosterModule::Create());
roster->set_roster_handler(&roster_handler);
// Configure the roster module
@@ -513,11 +513,11 @@ TEST_F(RosterModuleTest, TestIncomingPresence) {
}
TEST_F(RosterModuleTest, TestPresenceSubscription) {
- rtc::scoped_ptr<XmppEngine> engine(XmppEngine::Create());
+ std::unique_ptr<XmppEngine> engine(XmppEngine::Create());
XmppTestHandler handler(engine.get());
XmppTestRosterHandler roster_handler;
- rtc::scoped_ptr<XmppRosterModule> roster(XmppRosterModule::Create());
+ std::unique_ptr<XmppRosterModule> roster(XmppRosterModule::Create());
roster->set_roster_handler(&roster_handler);
// Configure the roster module
@@ -576,11 +576,11 @@ TEST_F(RosterModuleTest, TestPresenceSubscription) {
}
TEST_F(RosterModuleTest, TestRosterReceive) {
- rtc::scoped_ptr<XmppEngine> engine(XmppEngine::Create());
+ std::unique_ptr<XmppEngine> engine(XmppEngine::Create());
XmppTestHandler handler(engine.get());
XmppTestRosterHandler roster_handler;
- rtc::scoped_ptr<XmppRosterModule> roster(XmppRosterModule::Create());
+ std::unique_ptr<XmppRosterModule> roster(XmppRosterModule::Create());
roster->set_roster_handler(&roster_handler);
// Configure the roster module
@@ -696,7 +696,7 @@ TEST_F(RosterModuleTest, TestRosterReceive) {
EXPECT_EQ(handler.SessionActivity(), "");
// Request that someone be added
- rtc::scoped_ptr<XmppRosterContact> contact(XmppRosterContact::Create());
+ std::unique_ptr<XmppRosterContact> contact(XmppRosterContact::Create());
TEST_OK(contact->set_jid(Jid("brandt@example.net")));
TEST_OK(contact->set_name("Brandt"));
TEST_OK(contact->AddGroup("Business Partners"));
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/rostermoduleimpl.h b/chromium/third_party/webrtc/libjingle/xmpp/rostermoduleimpl.h
index 6e3bd91c8a2..772692bd65b 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/rostermoduleimpl.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/rostermoduleimpl.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_LIBJINGLE_XMPP_XMPPTHREAD_H_
#define WEBRTC_LIBJINGLE_XMPP_XMPPTHREAD_H_
+#include <memory>
+
#include "webrtc/libjingle/xmpp/moduleimpl.h"
#include "webrtc/libjingle/xmpp/rostermodule.h"
@@ -86,7 +88,7 @@ private:
// Store everything in the XML element. If this becomes a perf issue we can
// cache the data.
- rtc::scoped_ptr<XmlElement> raw_xml_;
+ std::unique_ptr<XmlElement> raw_xml_;
};
//! A contact as given by the server
@@ -151,7 +153,7 @@ private:
int group_count_;
int group_index_returned_;
XmlElement * group_returned_;
- rtc::scoped_ptr<XmlElement> raw_xml_;
+ std::unique_ptr<XmlElement> raw_xml_;
};
//! An XmppModule for handle roster and presence functionality
@@ -273,11 +275,11 @@ private:
typedef std::vector<XmppPresenceImpl*> PresenceVector;
typedef std::map<Jid, PresenceVector*> JidPresenceVectorMap;
- rtc::scoped_ptr<JidPresenceVectorMap> incoming_presence_map_;
- rtc::scoped_ptr<PresenceVector> incoming_presence_vector_;
+ std::unique_ptr<JidPresenceVectorMap> incoming_presence_map_;
+ std::unique_ptr<PresenceVector> incoming_presence_vector_;
typedef std::vector<XmppRosterContactImpl*> ContactVector;
- rtc::scoped_ptr<ContactVector> contacts_;
+ std::unique_ptr<ContactVector> contacts_;
};
}
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmpp.gyp b/chromium/third_party/webrtc/libjingle/xmpp/xmpp.gyp
index 4dc75f7b52e..37b75122b4d 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmpp.gyp
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmpp.gyp
@@ -19,9 +19,6 @@
'defines': [
'FEATURE_ENABLE_SSL',
],
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'sources': [
'asyncsocket.h',
'chatroommodule.h',
@@ -98,9 +95,6 @@
'xmppthread.h',
],
'direct_dependent_settings': {
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'defines': [
'FEATURE_ENABLE_SSL',
'FEATURE_ENABLE_VOICEMAIL',
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmppclient.cc b/chromium/third_party/webrtc/libjingle/xmpp/xmppclient.cc
index f7e88c39278..a87659824bb 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmppclient.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmppclient.cc
@@ -15,7 +15,6 @@
#include "webrtc/libjingle/xmpp/prexmppauth.h"
#include "webrtc/libjingle/xmpp/saslplainmechanism.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/stringutils.h"
#include "xmpptask.h"
@@ -48,9 +47,9 @@ public:
XmppClient* const client_;
// the two main objects
- rtc::scoped_ptr<AsyncSocket> socket_;
- rtc::scoped_ptr<XmppEngine> engine_;
- rtc::scoped_ptr<PreXmppAuth> pre_auth_;
+ std::unique_ptr<AsyncSocket> socket_;
+ std::unique_ptr<XmppEngine> engine_;
+ std::unique_ptr<PreXmppAuth> pre_auth_;
rtc::CryptString pass_;
std::string auth_mechanism_;
std::string auth_token_;
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmppclient.h b/chromium/third_party/webrtc/libjingle/xmpp/xmppclient.h
index 84ca6c1418d..dba591cb44a 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmppclient.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmppclient.h
@@ -11,7 +11,9 @@
#ifndef WEBRTC_LIBJINGLE_XMPP_XMPPCLIENT_H_
#define WEBRTC_LIBJINGLE_XMPP_XMPPCLIENT_H_
+#include <memory>
#include <string>
+
#include "webrtc/libjingle/xmpp/asyncsocket.h"
#include "webrtc/libjingle/xmpp/xmppclientsettings.h"
#include "webrtc/libjingle/xmpp/xmppengine.h"
@@ -136,7 +138,7 @@ public:
class Private;
friend class Private;
- rtc::scoped_ptr<Private> d_;
+ std::unique_ptr<Private> d_;
bool delivering_signal_;
bool valid_;
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmppengine_unittest.cc b/chromium/third_party/webrtc/libjingle/xmpp/xmppengine_unittest.cc
index 7af151ba48d..d5afead660e 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmppengine_unittest.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmppengine_unittest.cc
@@ -9,8 +9,10 @@
*/
#include <iostream>
+#include <memory>
#include <sstream>
#include <string>
+
#include "webrtc/libjingle/xmllite/xmlelement.h"
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/libjingle/xmpp/plainsaslhandler.h"
@@ -77,8 +79,8 @@ class XmppEngineTest : public testing::Test {
void RunLogin();
private:
- rtc::scoped_ptr<XmppEngine> engine_;
- rtc::scoped_ptr<XmppTestHandler> handler_;
+ std::unique_ptr<XmppEngine> engine_;
+ std::unique_ptr<XmppTestHandler> handler_;
};
void XmppEngineTest::RunLogin() {
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmppengineimpl.h b/chromium/third_party/webrtc/libjingle/xmpp/xmppengineimpl.h
index c322596c46f..e1f3061f90d 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmppengineimpl.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmppengineimpl.h
@@ -11,8 +11,10 @@
#ifndef WEBRTC_LIBJINGLE_XMPP_XMPPENGINEIMPL_H_
#define WEBRTC_LIBJINGLE_XMPP_XMPPENGINEIMPL_H_
+#include <memory>
#include <sstream>
#include <vector>
+
#include "webrtc/libjingle/xmpp/xmppengine.h"
#include "webrtc/libjingle/xmpp/xmppstanzaparser.h"
@@ -233,7 +235,7 @@ class XmppEngineImpl : public XmppEngine {
TlsOptions tls_option_;
std::string tls_server_hostname_;
std::string tls_server_domain_;
- rtc::scoped_ptr<XmppLoginTask> login_task_;
+ std::unique_ptr<XmppLoginTask> login_task_;
std::string lang_;
int next_id_;
@@ -242,7 +244,7 @@ class XmppEngineImpl : public XmppEngine {
bool encrypted_;
Error error_code_;
int subcode_;
- rtc::scoped_ptr<XmlElement> stream_error_;
+ std::unique_ptr<XmlElement> stream_error_;
bool raised_reset_;
XmppOutputHandler* output_handler_;
XmppSessionHandler* session_handler_;
@@ -250,14 +252,14 @@ class XmppEngineImpl : public XmppEngine {
XmlnsStack xmlns_stack_;
typedef std::vector<XmppStanzaHandler*> StanzaHandlerVector;
- rtc::scoped_ptr<StanzaHandlerVector> stanza_handlers_[HL_COUNT];
+ std::unique_ptr<StanzaHandlerVector> stanza_handlers_[HL_COUNT];
typedef std::vector<XmppIqEntry*> IqEntryVector;
- rtc::scoped_ptr<IqEntryVector> iq_entries_;
+ std::unique_ptr<IqEntryVector> iq_entries_;
- rtc::scoped_ptr<SaslHandler> sasl_handler_;
+ std::unique_ptr<SaslHandler> sasl_handler_;
- rtc::scoped_ptr<std::stringstream> output_;
+ std::unique_ptr<std::stringstream> output_;
};
} // namespace buzz
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask.h b/chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask.h
index f69a648394e..c015c2e7a1c 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask.h
@@ -11,13 +11,13 @@
#ifndef WEBRTC_LIBJINGLE_XMPP_LOGINTASK_H_
#define WEBRTC_LIBJINGLE_XMPP_LOGINTASK_H_
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/libjingle/xmpp/jid.h"
#include "webrtc/libjingle/xmpp/xmppengine.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
namespace buzz {
@@ -70,12 +70,12 @@ private:
const XmlElement * pelStanza_;
bool isStart_;
std::string iqId_;
- rtc::scoped_ptr<XmlElement> pelFeatures_;
+ std::unique_ptr<XmlElement> pelFeatures_;
Jid fullJid_;
std::string streamId_;
- rtc::scoped_ptr<std::vector<XmlElement *> > pvecQueuedStanzas_;
+ std::unique_ptr<std::vector<XmlElement *> > pvecQueuedStanzas_;
- rtc::scoped_ptr<SaslMechanism> sasl_mech_;
+ std::unique_ptr<SaslMechanism> sasl_mech_;
#if !defined(NDEBUG)
static const rtc::ConstantLabel LOGINTASK_STATES[];
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask_unittest.cc b/chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask_unittest.cc
index 221cbdeaf5e..18fce97645b 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask_unittest.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmpplogintask_unittest.cc
@@ -9,8 +9,10 @@
*/
#include <iostream>
+#include <memory>
#include <sstream>
#include <string>
+
#include "webrtc/libjingle/xmllite/xmlelement.h"
#include "webrtc/libjingle/xmpp/constants.h"
#include "webrtc/libjingle/xmpp/plainsaslhandler.h"
@@ -20,6 +22,7 @@
#include "webrtc/base/common.h"
#include "webrtc/base/cryptstring.h"
#include "webrtc/base/gunit.h"
+#include "webrtc/typedefs.h"
using buzz::Jid;
using buzz::QName;
@@ -67,8 +70,8 @@ class XmppLoginTaskTest : public testing::Test {
void SetTlsOptions(buzz::TlsOptions option);
private:
- rtc::scoped_ptr<XmppEngine> engine_;
- rtc::scoped_ptr<XmppTestHandler> handler_;
+ std::unique_ptr<XmppEngine> engine_;
+ std::unique_ptr<XmppTestHandler> handler_;
};
void XmppLoginTaskTest::SetTlsOptions(buzz::TlsOptions option) {
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmpppump.cc b/chromium/third_party/webrtc/libjingle/xmpp/xmpppump.cc
index a428ffa4dc4..4412483cb83 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmpppump.cc
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmpppump.cc
@@ -50,7 +50,7 @@ void XmppPump::WakeTasks() {
}
int64_t XmppPump::CurrentTime() {
- return (int64_t)rtc::Time();
+ return (int64_t)rtc::TimeMillis();
}
void XmppPump::OnMessage(rtc::Message *pmsg) {
diff --git a/chromium/third_party/webrtc/libjingle/xmpp/xmpptask.h b/chromium/third_party/webrtc/libjingle/xmpp/xmpptask.h
index 36351b7fa8f..3f3a39c791c 100644
--- a/chromium/third_party/webrtc/libjingle/xmpp/xmpptask.h
+++ b/chromium/third_party/webrtc/libjingle/xmpp/xmpptask.h
@@ -12,8 +12,11 @@
#define WEBRTC_LIBJINGLE_XMPP_XMPPTASK_H_
#include <deque>
+#include <memory>
#include <string>
+
#include "webrtc/libjingle/xmpp/xmppengine.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/task.h"
#include "webrtc/base/taskparent.h"
@@ -159,7 +162,7 @@ private:
bool stopped_;
std::deque<XmlElement*> stanza_queue_;
- rtc::scoped_ptr<XmlElement> next_stanza_;
+ std::unique_ptr<XmlElement> next_stanza_;
std::string id_;
#if !defined(NDEBUG)
diff --git a/chromium/third_party/webrtc/media/base/audioframe.h b/chromium/third_party/webrtc/media/base/audioframe.h
deleted file mode 100644
index 15553c47520..00000000000
--- a/chromium/third_party/webrtc/media/base/audioframe.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MEDIA_BASE_AUDIOFRAME_H_
-#define WEBRTC_MEDIA_BASE_AUDIOFRAME_H_
-
-namespace cricket {
-
-class AudioFrame {
- public:
- AudioFrame()
- : audio10ms_(NULL),
- length_(0),
- sampling_frequency_(8000),
- stereo_(false) {
- }
- AudioFrame(int16_t* audio, size_t audio_length, int sample_freq, bool stereo)
- : audio10ms_(audio),
- length_(audio_length),
- sampling_frequency_(sample_freq),
- stereo_(stereo) {}
-
- int16_t* GetData() { return audio10ms_; }
- size_t GetSize() const { return length_; }
- int GetSamplingFrequency() const { return sampling_frequency_; }
- bool GetStereo() const { return stereo_; }
-
- private:
- // TODO(janahan): currently the data is not owned by this class.
- // add ownership when we come up with the first use case that requires it.
- int16_t* audio10ms_;
- size_t length_;
- int sampling_frequency_;
- bool stereo_;
-};
-
-} // namespace cricket
-#endif // WEBRTC_MEDIA_BASE_AUDIOFRAME_H_
diff --git a/chromium/third_party/webrtc/media/base/codec.cc b/chromium/third_party/webrtc/media/base/codec.cc
index 287de0cdbf4..01350f71236 100644
--- a/chromium/third_party/webrtc/media/base/codec.cc
+++ b/chromium/third_party/webrtc/media/base/codec.cc
@@ -71,12 +71,10 @@ bool FeedbackParams::HasDuplicateEntries() const {
return false;
}
-Codec::Codec(int id, const std::string& name, int clockrate, int preference)
- : id(id), name(name), clockrate(clockrate), preference(preference) {
-}
+Codec::Codec(int id, const std::string& name, int clockrate)
+ : id(id), name(name), clockrate(clockrate) {}
-Codec::Codec() : id(0), clockrate(0), preference(0) {
-}
+Codec::Codec() : id(0), clockrate(0) {}
Codec::Codec(const Codec& c) = default;
@@ -86,7 +84,6 @@ Codec& Codec::operator=(const Codec& c) {
this->id = c.id; // id is reserved in objective-c
name = c.name;
clockrate = c.clockrate;
- preference = c.preference;
params = c.params;
feedback_params = c.feedback_params;
return *this;
@@ -94,8 +91,7 @@ Codec& Codec::operator=(const Codec& c) {
bool Codec::operator==(const Codec& c) const {
return this->id == c.id && // id is reserved in objective-c
- name == c.name && clockrate == c.clockrate &&
- preference == c.preference && params == c.params &&
+ name == c.name && clockrate == c.clockrate && params == c.params &&
feedback_params == c.feedback_params;
}
@@ -146,16 +142,20 @@ void Codec::IntersectFeedbackParams(const Codec& other) {
feedback_params.Intersect(other.feedback_params);
}
+webrtc::RtpCodecParameters Codec::ToCodecParameters() const {
+ webrtc::RtpCodecParameters codec_params;
+ codec_params.payload_type = id;
+ codec_params.mime_type = name;
+ codec_params.clock_rate = clockrate;
+ return codec_params;
+}
+
AudioCodec::AudioCodec(int id,
const std::string& name,
int clockrate,
int bitrate,
- size_t channels,
- int preference)
- : Codec(id, name, clockrate, preference),
- bitrate(bitrate),
- channels(channels) {
-}
+ size_t channels)
+ : Codec(id, name, clockrate), bitrate(bitrate), channels(channels) {}
AudioCodec::AudioCodec() : Codec(), bitrate(0), channels(0) {
}
@@ -190,17 +190,23 @@ bool AudioCodec::Matches(const AudioCodec& codec) const {
((codec.channels < 2 && channels < 2) || channels == codec.channels);
}
+webrtc::RtpCodecParameters AudioCodec::ToCodecParameters() const {
+ webrtc::RtpCodecParameters codec_params = Codec::ToCodecParameters();
+ codec_params.channels = channels;
+ return codec_params;
+}
+
std::string AudioCodec::ToString() const {
std::ostringstream os;
os << "AudioCodec[" << id << ":" << name << ":" << clockrate << ":" << bitrate
- << ":" << channels << ":" << preference << "]";
+ << ":" << channels << "]";
return os.str();
}
std::string VideoCodec::ToString() const {
std::ostringstream os;
os << "VideoCodec[" << id << ":" << name << ":" << width << ":" << height
- << ":" << framerate << ":" << preference << "]";
+ << ":" << framerate << "]";
return os.str();
}
@@ -208,20 +214,17 @@ VideoCodec::VideoCodec(int id,
const std::string& name,
int width,
int height,
- int framerate,
- int preference)
- : Codec(id, name, kVideoCodecClockrate, preference),
+ int framerate)
+ : Codec(id, name, kVideoCodecClockrate),
width(width),
height(height),
- framerate(framerate) {
-}
+ framerate(framerate) {}
VideoCodec::VideoCodec(int id, const std::string& name)
- : Codec(id, name, kVideoCodecClockrate, 0),
+ : Codec(id, name, kVideoCodecClockrate),
width(0),
height(0),
- framerate(0) {
-}
+ framerate(0) {}
VideoCodec::VideoCodec() : Codec(), width(0), height(0), framerate(0) {
clockrate = kVideoCodecClockrate;
@@ -244,7 +247,7 @@ bool VideoCodec::operator==(const VideoCodec& c) const {
VideoCodec VideoCodec::CreateRtxCodec(int rtx_payload_type,
int associated_payload_type) {
- VideoCodec rtx_codec(rtx_payload_type, kRtxCodecName, 0, 0, 0, 0);
+ VideoCodec rtx_codec(rtx_payload_type, kRtxCodecName, 0, 0, 0);
rtx_codec.SetParam(kCodecParamAssociatedPayloadType, associated_payload_type);
return rtx_codec;
}
@@ -291,9 +294,8 @@ bool VideoCodec::ValidateCodecFormat() const {
return true;
}
-DataCodec::DataCodec(int id, const std::string& name, int preference)
- : Codec(id, name, kDataCodecClockrate, preference) {
-}
+DataCodec::DataCodec(int id, const std::string& name)
+ : Codec(id, name, kDataCodecClockrate) {}
DataCodec::DataCodec() : Codec() {
clockrate = kDataCodecClockrate;
diff --git a/chromium/third_party/webrtc/media/base/codec.h b/chromium/third_party/webrtc/media/base/codec.h
index 7476c026942..76a623a0de8 100644
--- a/chromium/third_party/webrtc/media/base/codec.h
+++ b/chromium/third_party/webrtc/media/base/codec.h
@@ -16,6 +16,7 @@
#include <string>
#include <vector>
+#include "webrtc/api/rtpparameters.h"
#include "webrtc/media/base/mediaconstants.h"
namespace cricket {
@@ -64,16 +65,15 @@ struct Codec {
int id;
std::string name;
int clockrate;
- int preference;
CodecParameterMap params;
FeedbackParams feedback_params;
// Creates a codec with the given parameters.
- Codec(int id, const std::string& name, int clockrate, int preference);
+ Codec(int id, const std::string& name, int clockrate);
// Creates an empty codec.
Codec();
Codec(const Codec& c);
- ~Codec();
+ virtual ~Codec();
// Indicates if this codec is compatible with the specified codec.
bool Matches(const Codec& codec) const;
@@ -92,14 +92,12 @@ struct Codec {
bool HasFeedbackParam(const FeedbackParam& param) const;
void AddFeedbackParam(const FeedbackParam& param);
- static bool Preferable(const Codec& first, const Codec& other) {
- return first.preference > other.preference;
- }
-
// Filter |this| feedbacks params such that only those shared by both |this|
// and |other| are kept.
void IntersectFeedbackParams(const Codec& other);
+ virtual webrtc::RtpCodecParameters ToCodecParameters() const;
+
Codec& operator=(const Codec& c);
bool operator==(const Codec& c) const;
@@ -118,22 +116,19 @@ struct AudioCodec : public Codec {
const std::string& name,
int clockrate,
int bitrate,
- size_t channels,
- int preference);
+ size_t channels);
// Creates an empty codec.
AudioCodec();
AudioCodec(const AudioCodec& c);
- ~AudioCodec() = default;
+ virtual ~AudioCodec() = default;
// Indicates if this codec is compatible with the specified codec.
bool Matches(const AudioCodec& codec) const;
- static bool Preferable(const AudioCodec& first, const AudioCodec& other) {
- return first.preference > other.preference;
- }
-
std::string ToString() const;
+ webrtc::RtpCodecParameters ToCodecParameters() const override;
+
AudioCodec& operator=(const AudioCodec& c);
bool operator==(const AudioCodec& c) const;
@@ -153,17 +148,12 @@ struct VideoCodec : public Codec {
const std::string& name,
int width,
int height,
- int framerate,
- int preference);
+ int framerate);
VideoCodec(int id, const std::string& name);
// Creates an empty codec.
VideoCodec();
VideoCodec(const VideoCodec& c);
- ~VideoCodec() = default;
-
- static bool Preferable(const VideoCodec& first, const VideoCodec& other) {
- return first.preference > other.preference;
- }
+ virtual ~VideoCodec() = default;
std::string ToString() const;
@@ -193,9 +183,10 @@ struct VideoCodec : public Codec {
};
struct DataCodec : public Codec {
- DataCodec(int id, const std::string& name, int preference);
+ DataCodec(int id, const std::string& name);
DataCodec();
DataCodec(const DataCodec& c);
+ virtual ~DataCodec() = default;
DataCodec& operator=(const DataCodec& c);
diff --git a/chromium/third_party/webrtc/media/base/codec_unittest.cc b/chromium/third_party/webrtc/media/base/codec_unittest.cc
index 88d49632d00..a3468803973 100644
--- a/chromium/third_party/webrtc/media/base/codec_unittest.cc
+++ b/chromium/third_party/webrtc/media/base/codec_unittest.cc
@@ -26,7 +26,7 @@ class CodecTest : public testing::Test {
};
TEST_F(CodecTest, TestCodecOperators) {
- Codec c0(96, "D", 1000, 0);
+ Codec c0(96, "D", 1000);
c0.SetParam("a", 1);
Codec c1 = c0;
@@ -50,35 +50,29 @@ TEST_F(CodecTest, TestCodecOperators) {
EXPECT_TRUE(c0 != c1);
c1 = c0;
- c1.preference = 1;
- EXPECT_TRUE(c0 != c1);
-
- c1 = c0;
c1.SetParam("a", 2);
EXPECT_TRUE(c0 != c1);
Codec c5;
- Codec c6(0, "", 0, 0);
+ Codec c6(0, "", 0);
EXPECT_TRUE(c5 == c6);
}
TEST_F(CodecTest, TestAudioCodecOperators) {
- AudioCodec c0(96, "A", 44100, 20000, 2, 3);
- AudioCodec c1(95, "A", 44100, 20000, 2, 3);
- AudioCodec c2(96, "x", 44100, 20000, 2, 3);
- AudioCodec c3(96, "A", 48000, 20000, 2, 3);
- AudioCodec c4(96, "A", 44100, 10000, 2, 3);
- AudioCodec c5(96, "A", 44100, 20000, 1, 3);
- AudioCodec c6(96, "A", 44100, 20000, 2, 1);
+ AudioCodec c0(96, "A", 44100, 20000, 2);
+ AudioCodec c1(95, "A", 44100, 20000, 2);
+ AudioCodec c2(96, "x", 44100, 20000, 2);
+ AudioCodec c3(96, "A", 48000, 20000, 2);
+ AudioCodec c4(96, "A", 44100, 10000, 2);
+ AudioCodec c5(96, "A", 44100, 20000, 1);
EXPECT_TRUE(c0 != c1);
EXPECT_TRUE(c0 != c2);
EXPECT_TRUE(c0 != c3);
EXPECT_TRUE(c0 != c4);
EXPECT_TRUE(c0 != c5);
- EXPECT_TRUE(c0 != c6);
AudioCodec c7;
- AudioCodec c8(0, "", 0, 0, 0, 0);
+ AudioCodec c8(0, "", 0, 0, 0);
AudioCodec c9 = c0;
EXPECT_TRUE(c8 == c7);
EXPECT_TRUE(c9 != c7);
@@ -103,61 +97,59 @@ TEST_F(CodecTest, TestAudioCodecOperators) {
TEST_F(CodecTest, TestAudioCodecMatches) {
// Test a codec with a static payload type.
- AudioCodec c0(95, "A", 44100, 20000, 1, 3);
- EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 20000, 1, 0)));
- EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 20000, 0, 0)));
- EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 0, 0, 0)));
- EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 0, 0, 0, 0)));
- EXPECT_FALSE(c0.Matches(AudioCodec(96, "", 44100, 20000, 1, 0)));
- EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 55100, 20000, 1, 0)));
- EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 44100, 30000, 1, 0)));
- EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 44100, 20000, 2, 0)));
- EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 55100, 30000, 2, 0)));
+ AudioCodec c0(95, "A", 44100, 20000, 1);
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 20000, 1)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 20000, 0)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 0, 0)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 0, 0, 0)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(96, "", 44100, 20000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 55100, 20000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 44100, 30000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 44100, 20000, 2)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 55100, 30000, 2)));
// Test a codec with a dynamic payload type.
- AudioCodec c1(96, "A", 44100, 20000, 1, 3);
- EXPECT_TRUE(c1.Matches(AudioCodec(96, "A", 0, 0, 0, 0)));
- EXPECT_TRUE(c1.Matches(AudioCodec(97, "A", 0, 0, 0, 0)));
- EXPECT_TRUE(c1.Matches(AudioCodec(96, "a", 0, 0, 0, 0)));
- EXPECT_TRUE(c1.Matches(AudioCodec(97, "a", 0, 0, 0, 0)));
- EXPECT_FALSE(c1.Matches(AudioCodec(95, "A", 0, 0, 0, 0)));
- EXPECT_FALSE(c1.Matches(AudioCodec(96, "", 44100, 20000, 2, 0)));
- EXPECT_FALSE(c1.Matches(AudioCodec(96, "A", 55100, 30000, 1, 0)));
+ AudioCodec c1(96, "A", 44100, 20000, 1);
+ EXPECT_TRUE(c1.Matches(AudioCodec(96, "A", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(97, "A", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(96, "a", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(97, "a", 0, 0, 0)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(95, "A", 0, 0, 0)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(96, "", 44100, 20000, 2)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(96, "A", 55100, 30000, 1)));
// Test a codec with a dynamic payload type, and auto bitrate.
- AudioCodec c2(97, "A", 16000, 0, 1, 3);
+ AudioCodec c2(97, "A", 16000, 0, 1);
// Use default bitrate.
- EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 0, 1, 0)));
- EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 0, 0, 0)));
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 0, 1)));
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 0, 0)));
// Use explicit bitrate.
- EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 32000, 1, 0)));
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 32000, 1)));
// Backward compatibility with clients that might send "-1" (for default).
- EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, -1, 1, 0)));
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, -1, 1)));
// Stereo doesn't match channels = 0.
- AudioCodec c3(96, "A", 44100, 20000, 2, 3);
- EXPECT_TRUE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 2, 3)));
- EXPECT_FALSE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 1, 3)));
- EXPECT_FALSE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 0, 3)));
+ AudioCodec c3(96, "A", 44100, 20000, 2);
+ EXPECT_TRUE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 2)));
+ EXPECT_FALSE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 1)));
+ EXPECT_FALSE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 0)));
}
TEST_F(CodecTest, TestVideoCodecOperators) {
- VideoCodec c0(96, "V", 320, 200, 30, 3);
- VideoCodec c1(95, "V", 320, 200, 30, 3);
- VideoCodec c2(96, "x", 320, 200, 30, 3);
- VideoCodec c3(96, "V", 120, 200, 30, 3);
- VideoCodec c4(96, "V", 320, 100, 30, 3);
- VideoCodec c5(96, "V", 320, 200, 10, 3);
- VideoCodec c6(96, "V", 320, 200, 30, 1);
+ VideoCodec c0(96, "V", 320, 200, 30);
+ VideoCodec c1(95, "V", 320, 200, 30);
+ VideoCodec c2(96, "x", 320, 200, 30);
+ VideoCodec c3(96, "V", 120, 200, 30);
+ VideoCodec c4(96, "V", 320, 100, 30);
+ VideoCodec c5(96, "V", 320, 200, 10);
EXPECT_TRUE(c0 != c1);
EXPECT_TRUE(c0 != c2);
EXPECT_TRUE(c0 != c3);
EXPECT_TRUE(c0 != c4);
EXPECT_TRUE(c0 != c5);
- EXPECT_TRUE(c0 != c6);
VideoCodec c7;
- VideoCodec c8(0, "", 0, 0, 0, 0);
+ VideoCodec c8(0, "", 0, 0, 0);
VideoCodec c9 = c0;
EXPECT_TRUE(c8 == c7);
EXPECT_TRUE(c9 != c7);
@@ -182,34 +174,34 @@ TEST_F(CodecTest, TestVideoCodecOperators) {
TEST_F(CodecTest, TestVideoCodecMatches) {
// Test a codec with a static payload type.
- VideoCodec c0(95, "V", 320, 200, 30, 3);
- EXPECT_TRUE(c0.Matches(VideoCodec(95, "", 640, 400, 15, 0)));
- EXPECT_FALSE(c0.Matches(VideoCodec(96, "", 320, 200, 30, 0)));
+ VideoCodec c0(95, "V", 320, 200, 30);
+ EXPECT_TRUE(c0.Matches(VideoCodec(95, "", 640, 400, 15)));
+ EXPECT_FALSE(c0.Matches(VideoCodec(96, "", 320, 200, 30)));
// Test a codec with a dynamic payload type.
- VideoCodec c1(96, "V", 320, 200, 30, 3);
- EXPECT_TRUE(c1.Matches(VideoCodec(96, "V", 640, 400, 15, 0)));
- EXPECT_TRUE(c1.Matches(VideoCodec(97, "V", 640, 400, 15, 0)));
- EXPECT_TRUE(c1.Matches(VideoCodec(96, "v", 640, 400, 15, 0)));
- EXPECT_TRUE(c1.Matches(VideoCodec(97, "v", 640, 400, 15, 0)));
- EXPECT_FALSE(c1.Matches(VideoCodec(96, "", 320, 200, 30, 0)));
- EXPECT_FALSE(c1.Matches(VideoCodec(95, "V", 640, 400, 15, 0)));
+ VideoCodec c1(96, "V", 320, 200, 30);
+ EXPECT_TRUE(c1.Matches(VideoCodec(96, "V", 640, 400, 15)));
+ EXPECT_TRUE(c1.Matches(VideoCodec(97, "V", 640, 400, 15)));
+ EXPECT_TRUE(c1.Matches(VideoCodec(96, "v", 640, 400, 15)));
+ EXPECT_TRUE(c1.Matches(VideoCodec(97, "v", 640, 400, 15)));
+ EXPECT_FALSE(c1.Matches(VideoCodec(96, "", 320, 200, 30)));
+ EXPECT_FALSE(c1.Matches(VideoCodec(95, "V", 640, 400, 15)));
}
TEST_F(CodecTest, TestDataCodecMatches) {
// Test a codec with a static payload type.
- DataCodec c0(95, "D", 0);
- EXPECT_TRUE(c0.Matches(DataCodec(95, "", 0)));
- EXPECT_FALSE(c0.Matches(DataCodec(96, "", 0)));
+ DataCodec c0(95, "D");
+ EXPECT_TRUE(c0.Matches(DataCodec(95, "")));
+ EXPECT_FALSE(c0.Matches(DataCodec(96, "")));
// Test a codec with a dynamic payload type.
- DataCodec c1(96, "D", 3);
- EXPECT_TRUE(c1.Matches(DataCodec(96, "D", 0)));
- EXPECT_TRUE(c1.Matches(DataCodec(97, "D", 0)));
- EXPECT_TRUE(c1.Matches(DataCodec(96, "d", 0)));
- EXPECT_TRUE(c1.Matches(DataCodec(97, "d", 0)));
- EXPECT_FALSE(c1.Matches(DataCodec(96, "", 0)));
- EXPECT_FALSE(c1.Matches(DataCodec(95, "D", 0)));
+ DataCodec c1(96, "D");
+ EXPECT_TRUE(c1.Matches(DataCodec(96, "D")));
+ EXPECT_TRUE(c1.Matches(DataCodec(97, "D")));
+ EXPECT_TRUE(c1.Matches(DataCodec(96, "d")));
+ EXPECT_TRUE(c1.Matches(DataCodec(97, "d")));
+ EXPECT_FALSE(c1.Matches(DataCodec(96, "")));
+ EXPECT_FALSE(c1.Matches(DataCodec(95, "D")));
}
TEST_F(CodecTest, TestSetParamGetParamAndRemoveParam) {
@@ -254,10 +246,10 @@ TEST_F(CodecTest, TestIntersectFeedbackParams) {
TEST_F(CodecTest, TestGetCodecType) {
// Codec type comparison should be case insenstive on names.
- const VideoCodec codec(96, "V", 320, 200, 30, 3);
- const VideoCodec rtx_codec(96, "rTx", 320, 200, 30, 3);
- const VideoCodec ulpfec_codec(96, "ulpFeC", 320, 200, 30, 3);
- const VideoCodec red_codec(96, "ReD", 320, 200, 30, 3);
+ const VideoCodec codec(96, "V", 320, 200, 30);
+ const VideoCodec rtx_codec(96, "rTx", 320, 200, 30);
+ const VideoCodec ulpfec_codec(96, "ulpFeC", 320, 200, 30);
+ const VideoCodec red_codec(96, "ReD", 320, 200, 30);
EXPECT_EQ(VideoCodec::CODEC_VIDEO, codec.GetCodecType());
EXPECT_EQ(VideoCodec::CODEC_RTX, rtx_codec.GetCodecType());
EXPECT_EQ(VideoCodec::CODEC_ULPFEC, ulpfec_codec.GetCodecType());
@@ -275,7 +267,7 @@ TEST_F(CodecTest, TestCreateRtxCodec) {
}
TEST_F(CodecTest, TestValidateCodecFormat) {
- const VideoCodec codec(96, "V", 320, 200, 30, 3);
+ const VideoCodec codec(96, "V", 320, 200, 30);
ASSERT_TRUE(codec.ValidateCodecFormat());
// Accept 0-127 as payload types.
@@ -329,3 +321,19 @@ TEST_F(CodecTest, TestValidateCodecFormat) {
different_bitrates.params[kCodecParamMaxBitrate] = "100";
EXPECT_TRUE(different_bitrates.ValidateCodecFormat());
}
+
+TEST_F(CodecTest, TestToCodecParameters) {
+ const VideoCodec v(96, "V", 320, 200, 30);
+ webrtc::RtpCodecParameters codec_params_1 = v.ToCodecParameters();
+ EXPECT_EQ(96, codec_params_1.payload_type);
+ EXPECT_EQ("V", codec_params_1.mime_type);
+ EXPECT_EQ(cricket::kVideoCodecClockrate, codec_params_1.clock_rate);
+ EXPECT_EQ(1, codec_params_1.channels);
+
+ const AudioCodec a(97, "A", 44100, 20000, 2);
+ webrtc::RtpCodecParameters codec_params_2 = a.ToCodecParameters();
+ EXPECT_EQ(97, codec_params_2.payload_type);
+ EXPECT_EQ("A", codec_params_2.mime_type);
+ EXPECT_EQ(44100, codec_params_2.clock_rate);
+ EXPECT_EQ(2, codec_params_2.channels);
+}
diff --git a/chromium/third_party/webrtc/media/base/fakemediaengine.h b/chromium/third_party/webrtc/media/base/fakemediaengine.h
index d6d653991c6..bde584386f9 100644
--- a/chromium/third_party/webrtc/media/base/fakemediaengine.h
+++ b/chromium/third_party/webrtc/media/base/fakemediaengine.h
@@ -55,7 +55,9 @@ template <class Base> class RtpHelper : public Base {
const std::list<std::string>& rtp_packets() const { return rtp_packets_; }
const std::list<std::string>& rtcp_packets() const { return rtcp_packets_; }
- bool SendRtp(const void* data, int len, const rtc::PacketOptions& options) {
+ bool SendRtp(const void* data,
+ size_t len,
+ const rtc::PacketOptions& options) {
if (!sending_) {
return false;
}
@@ -63,13 +65,13 @@ template <class Base> class RtpHelper : public Base {
kMaxRtpPacketLen);
return Base::SendPacket(&packet, options);
}
- bool SendRtcp(const void* data, int len) {
+ bool SendRtcp(const void* data, size_t len) {
rtc::CopyOnWriteBuffer packet(reinterpret_cast<const uint8_t*>(data), len,
kMaxRtpPacketLen);
return Base::SendRtcp(&packet, rtc::PacketOptions());
}
- bool CheckRtp(const void* data, int len) {
+ bool CheckRtp(const void* data, size_t len) {
bool success = !rtp_packets_.empty();
if (success) {
std::string packet = rtp_packets_.front();
@@ -78,7 +80,7 @@ template <class Base> class RtpHelper : public Base {
}
return success;
}
- bool CheckRtcp(const void* data, int len) {
+ bool CheckRtcp(const void* data, size_t len) {
bool success = !rtcp_packets_.empty();
if (success) {
std::string packet = rtcp_packets_.front();
@@ -97,13 +99,14 @@ template <class Base> class RtpHelper : public Base {
return false;
}
send_streams_.push_back(sp);
- rtp_parameters_[sp.first_ssrc()] = CreateRtpParametersWithOneEncoding();
+ rtp_send_parameters_[sp.first_ssrc()] =
+ CreateRtpParametersWithOneEncoding();
return true;
}
virtual bool RemoveSendStream(uint32_t ssrc) {
- auto parameters_iterator = rtp_parameters_.find(ssrc);
- if (parameters_iterator != rtp_parameters_.end()) {
- rtp_parameters_.erase(parameters_iterator);
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ rtp_send_parameters_.erase(parameters_iterator);
}
return RemoveStreamBySsrc(&send_streams_, ssrc);
}
@@ -113,23 +116,49 @@ template <class Base> class RtpHelper : public Base {
return false;
}
receive_streams_.push_back(sp);
+ rtp_receive_parameters_[sp.first_ssrc()] =
+ CreateRtpParametersWithOneEncoding();
return true;
}
virtual bool RemoveRecvStream(uint32_t ssrc) {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
+ rtp_receive_parameters_.erase(parameters_iterator);
+ }
return RemoveStreamBySsrc(&receive_streams_, ssrc);
}
- virtual webrtc::RtpParameters GetRtpParameters(uint32_t ssrc) const {
- auto parameters_iterator = rtp_parameters_.find(ssrc);
- if (parameters_iterator != rtp_parameters_.end()) {
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const {
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ return parameters_iterator->second;
+ }
+ return webrtc::RtpParameters();
+ }
+ virtual bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ parameters_iterator->second = parameters;
+ return true;
+ }
+ // Replicate the behavior of the real media channel: return false
+ // when setting parameters for unknown SSRCs.
+ return false;
+ }
+
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
return parameters_iterator->second;
}
return webrtc::RtpParameters();
}
- virtual bool SetRtpParameters(uint32_t ssrc,
- const webrtc::RtpParameters& parameters) {
- auto parameters_iterator = rtp_parameters_.find(ssrc);
- if (parameters_iterator != rtp_parameters_.end()) {
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
parameters_iterator->second = parameters;
return true;
}
@@ -179,7 +208,7 @@ template <class Base> class RtpHelper : public Base {
return ready_to_send_;
}
- NetworkRoute last_network_route() const { return last_network_route_; }
+ rtc::NetworkRoute last_network_route() const { return last_network_route_; }
int num_network_route_changes() const { return num_network_route_changes_; }
void set_num_network_route_changes(int changes) {
num_network_route_changes_ = changes;
@@ -224,7 +253,7 @@ template <class Base> class RtpHelper : public Base {
ready_to_send_ = ready;
}
virtual void OnNetworkRouteChanged(const std::string& transport_name,
- const NetworkRoute& network_route) {
+ const rtc::NetworkRoute& network_route) {
last_network_route_ = network_route;
++num_network_route_changes_;
}
@@ -241,13 +270,14 @@ template <class Base> class RtpHelper : public Base {
std::vector<StreamParams> send_streams_;
std::vector<StreamParams> receive_streams_;
std::set<uint32_t> muted_streams_;
- std::map<uint32_t, webrtc::RtpParameters> rtp_parameters_;
+ std::map<uint32_t, webrtc::RtpParameters> rtp_send_parameters_;
+ std::map<uint32_t, webrtc::RtpParameters> rtp_receive_parameters_;
bool fail_set_send_codecs_;
bool fail_set_recv_codecs_;
uint32_t send_ssrc_;
std::string rtcp_cname_;
bool ready_to_send_;
- NetworkRoute last_network_route_;
+ rtc::NetworkRoute last_network_route_;
int num_network_route_changes_ = 0;
};
@@ -482,23 +512,23 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
return sinks_;
}
int max_bps() const { return max_bps_; }
- virtual bool SetSendParameters(const VideoSendParameters& params) {
+ bool SetSendParameters(const VideoSendParameters& params) override {
return (SetSendCodecs(params.codecs) &&
SetSendRtpHeaderExtensions(params.extensions) &&
SetMaxSendBandwidth(params.max_bandwidth_bps));
}
- virtual bool SetRecvParameters(const VideoRecvParameters& params) {
+ bool SetRecvParameters(const VideoRecvParameters& params) override {
return (SetRecvCodecs(params.codecs) &&
SetRecvRtpHeaderExtensions(params.extensions));
}
- virtual bool AddSendStream(const StreamParams& sp) {
+ bool AddSendStream(const StreamParams& sp) override {
return RtpHelper<VideoMediaChannel>::AddSendStream(sp);
}
- virtual bool RemoveSendStream(uint32_t ssrc) {
+ bool RemoveSendStream(uint32_t ssrc) override {
return RtpHelper<VideoMediaChannel>::RemoveSendStream(ssrc);
}
- virtual bool GetSendCodec(VideoCodec* send_codec) {
+ bool GetSendCodec(VideoCodec* send_codec) override {
if (send_codecs_.empty()) {
return false;
}
@@ -516,9 +546,9 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
return true;
}
- virtual bool SetSend(bool send) { return set_sending(send); }
- virtual bool SetVideoSend(uint32_t ssrc, bool enable,
- const VideoOptions* options) {
+ bool SetSend(bool send) override { return set_sending(send); }
+ bool SetVideoSend(uint32_t ssrc, bool enable,
+ const VideoOptions* options) override {
if (!RtpHelper<VideoMediaChannel>::MuteStream(ssrc, !enable)) {
return false;
}
@@ -527,27 +557,29 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
}
return true;
}
- virtual bool SetCapturer(uint32_t ssrc, VideoCapturer* capturer) {
- capturers_[ssrc] = capturer;
- return true;
+ void SetSource(
+ uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) override {
+ sources_[ssrc] = source;
}
- bool HasCapturer(uint32_t ssrc) const {
- return capturers_.find(ssrc) != capturers_.end();
+
+ bool HasSource(uint32_t ssrc) const {
+ return sources_.find(ssrc) != sources_.end();
}
- virtual bool AddRecvStream(const StreamParams& sp) {
+ bool AddRecvStream(const StreamParams& sp) override {
if (!RtpHelper<VideoMediaChannel>::AddRecvStream(sp))
return false;
sinks_[sp.first_ssrc()] = NULL;
return true;
}
- virtual bool RemoveRecvStream(uint32_t ssrc) {
+ bool RemoveRecvStream(uint32_t ssrc) override {
if (!RtpHelper<VideoMediaChannel>::RemoveRecvStream(ssrc))
return false;
sinks_.erase(ssrc);
return true;
}
- virtual bool GetStats(VideoMediaInfo* info) { return false; }
+ bool GetStats(VideoMediaInfo* info) override { return false; }
private:
bool SetRecvCodecs(const std::vector<VideoCodec>& codecs) {
@@ -580,7 +612,7 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
std::vector<VideoCodec> recv_codecs_;
std::vector<VideoCodec> send_codecs_;
std::map<uint32_t, rtc::VideoSinkInterface<VideoFrame>*> sinks_;
- std::map<uint32_t, VideoCapturer*> capturers_;
+ std::map<uint32_t, rtc::VideoSourceInterface<VideoFrame>*> sources_;
VideoOptions options_;
int max_bps_;
};
@@ -700,7 +732,7 @@ class FakeVoiceEngine : public FakeBaseEngine {
: output_volume_(-1) {
// Add a fake audio codec. Note that the name must not be "" as there are
// sanity checks against that.
- codecs_.push_back(AudioCodec(101, "fake_audio_codec", 0, 0, 1, 0));
+ codecs_.push_back(AudioCodec(101, "fake_audio_codec", 0, 0, 1));
}
rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const {
return rtc::scoped_refptr<webrtc::AudioState>();
@@ -744,7 +776,9 @@ class FakeVoiceEngine : public FakeBaseEngine {
void StopAecDump() {}
- bool StartRtcEventLog(rtc::PlatformFile file) { return false; }
+ bool StartRtcEventLog(rtc::PlatformFile file, int64_t max_size_bytes) {
+ return false;
+ }
void StopRtcEventLog() {}
@@ -761,7 +795,7 @@ class FakeVideoEngine : public FakeBaseEngine {
FakeVideoEngine() : capture_(false) {
// Add a fake video codec. Note that the name must not be "" as there are
// sanity checks against that.
- codecs_.push_back(VideoCodec(0, "fake_video_codec", 0, 0, 0, 0));
+ codecs_.push_back(VideoCodec(0, "fake_video_codec", 0, 0, 0));
}
void Init() {}
bool SetOptions(const VideoOptions& options) {
diff --git a/chromium/third_party/webrtc/media/base/fakevideocapturer.h b/chromium/third_party/webrtc/media/base/fakevideocapturer.h
index 89dcf652f14..026bf80a7b1 100644
--- a/chromium/third_party/webrtc/media/base/fakevideocapturer.h
+++ b/chromium/third_party/webrtc/media/base/fakevideocapturer.h
@@ -31,7 +31,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
public:
FakeVideoCapturer(bool is_screencast)
: running_(false),
- initial_unix_timestamp_(time(NULL) * rtc::kNumNanosecsPerSec),
+ initial_timestamp_(rtc::TimeNanos()),
next_timestamp_(rtc::kNumNanosecsPerMillisec),
is_screencast_(is_screencast),
rotation_(webrtc::kVideoRotation_0) {
@@ -99,7 +99,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
frame.height = height;
frame.fourcc = fourcc;
frame.data_size = size;
- frame.time_stamp = initial_unix_timestamp_ + next_timestamp_;
+ frame.time_stamp = initial_timestamp_ + next_timestamp_;
next_timestamp_ += timestamp_interval;
std::unique_ptr<char[]> data(new char[size]);
@@ -153,7 +153,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
private:
bool running_;
- int64_t initial_unix_timestamp_;
+ int64_t initial_timestamp_;
int64_t next_timestamp_;
const bool is_screencast_;
webrtc::VideoRotation rotation_;
diff --git a/chromium/third_party/webrtc/media/base/fakevideorenderer.h b/chromium/third_party/webrtc/media/base/fakevideorenderer.h
index f75888e1ee7..7398bbaa8bb 100644
--- a/chromium/third_party/webrtc/media/base/fakevideorenderer.h
+++ b/chromium/third_party/webrtc/media/base/fakevideorenderer.h
@@ -39,7 +39,7 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
++num_rendered_frames_;
width_ = frame.width();
height_ = frame.height();
- rotation_ = frame.GetVideoRotation();
+ rotation_ = frame.rotation();
timestamp_ = frame.GetTimeStamp();
SignalRenderFrame(&frame);
}
@@ -82,15 +82,15 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
uint8_t v_min,
uint8_t v_max,
const cricket::VideoFrame* frame) {
- if (!frame) {
+ if (!frame || !frame->video_frame_buffer()) {
return false;
}
// Y
int y_width = frame->width();
int y_height = frame->height();
- const uint8_t* y_plane = frame->GetYPlane();
+ const uint8_t* y_plane = frame->video_frame_buffer()->DataY();
const uint8_t* y_pos = y_plane;
- int32_t y_pitch = frame->GetYPitch();
+ int32_t y_pitch = frame->video_frame_buffer()->StrideY();
for (int i = 0; i < y_height; ++i) {
for (int j = 0; j < y_width; ++j) {
uint8_t y_value = *(y_pos + j);
@@ -103,12 +103,12 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
// U and V
int chroma_width = (frame->width() + 1)/2;
int chroma_height = (frame->height() + 1)/2;
- const uint8_t* u_plane = frame->GetUPlane();
- const uint8_t* v_plane = frame->GetVPlane();
+ const uint8_t* u_plane = frame->video_frame_buffer()->DataU();
+ const uint8_t* v_plane = frame->video_frame_buffer()->DataV();
const uint8_t* u_pos = u_plane;
const uint8_t* v_pos = v_plane;
- int32_t u_pitch = frame->GetUPitch();
- int32_t v_pitch = frame->GetVPitch();
+ int32_t u_pitch = frame->video_frame_buffer()->StrideU();
+ int32_t v_pitch = frame->video_frame_buffer()->StrideV();
for (int i = 0; i < chroma_height; ++i) {
for (int j = 0; j < chroma_width; ++j) {
uint8_t u_value = *(u_pos + j);
diff --git a/chromium/third_party/webrtc/media/base/mediachannel.h b/chromium/third_party/webrtc/media/base/mediachannel.h
index 424572de2f0..54347098542 100644
--- a/chromium/third_party/webrtc/media/base/mediachannel.h
+++ b/chromium/third_party/webrtc/media/base/mediachannel.h
@@ -17,6 +17,7 @@
#include "webrtc/api/rtpparameters.h"
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/buffer.h"
#include "webrtc/base/copyonwritebuffer.h"
#include "webrtc/base/dscp.h"
#include "webrtc/base/logging.h"
@@ -29,11 +30,11 @@
#include "webrtc/media/base/mediaconstants.h"
#include "webrtc/media/base/streamparams.h"
#include "webrtc/media/base/videosinkinterface.h"
+#include "webrtc/media/base/videosourceinterface.h"
// TODO(juberti): re-evaluate this include
#include "webrtc/pc/audiomonitor.h"
namespace rtc {
-class Buffer;
class RateLimiter;
class Timing;
}
@@ -156,6 +157,7 @@ struct AudioOptions {
SetFrom(&extended_filter_aec, change.extended_filter_aec);
SetFrom(&delay_agnostic_aec, change.delay_agnostic_aec);
SetFrom(&experimental_ns, change.experimental_ns);
+ SetFrom(&intelligibility_enhancer, change.intelligibility_enhancer);
SetFrom(&tx_agc_target_dbov, change.tx_agc_target_dbov);
SetFrom(&tx_agc_digital_compression_gain,
change.tx_agc_digital_compression_gain);
@@ -180,6 +182,7 @@ struct AudioOptions {
extended_filter_aec == o.extended_filter_aec &&
delay_agnostic_aec == o.delay_agnostic_aec &&
experimental_ns == o.experimental_ns &&
+ intelligibility_enhancer == o.intelligibility_enhancer &&
adjust_agc_delta == o.adjust_agc_delta &&
tx_agc_target_dbov == o.tx_agc_target_dbov &&
tx_agc_digital_compression_gain == o.tx_agc_digital_compression_gain &&
@@ -209,6 +212,7 @@ struct AudioOptions {
ost << ToStringIfSet("extended_filter_aec", extended_filter_aec);
ost << ToStringIfSet("delay_agnostic_aec", delay_agnostic_aec);
ost << ToStringIfSet("experimental_ns", experimental_ns);
+ ost << ToStringIfSet("intelligibility_enhancer", intelligibility_enhancer);
ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov);
ost << ToStringIfSet("tx_agc_digital_compression_gain",
tx_agc_digital_compression_gain);
@@ -243,6 +247,7 @@ struct AudioOptions {
rtc::Optional<bool> extended_filter_aec;
rtc::Optional<bool> delay_agnostic_aec;
rtc::Optional<bool> experimental_ns;
+ rtc::Optional<bool> intelligibility_enhancer;
// Note that tx_agc_* only applies to non-experimental AGC.
rtc::Optional<uint16_t> tx_agc_target_dbov;
rtc::Optional<uint16_t> tx_agc_digital_compression_gain;
@@ -391,8 +396,9 @@ class MediaChannel : public sigslot::has_slots<> {
// Called when the socket's ability to send has changed.
virtual void OnReadyToSend(bool ready) = 0;
// Called when the network route used for sending packets changed.
- virtual void OnNetworkRouteChanged(const std::string& transport_name,
- const NetworkRoute& network_route) = 0;
+ virtual void OnNetworkRouteChanged(
+ const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) = 0;
// Creates a new outgoing media stream with SSRCs and CNAME as described
// by sp.
virtual bool AddSendStream(const StreamParams& sp) = 0;
@@ -839,6 +845,7 @@ struct RtpParameters {
std::vector<RtpHeaderExtension> extensions;
// TODO(pthatcher): Add streams.
RtcpParameters rtcp;
+ virtual ~RtpParameters() = default;
};
// TODO(deadbeef): Rename to RtpSenderParameters, since they're intended to
@@ -904,6 +911,15 @@ class VoiceMediaChannel : public MediaChannel {
virtual ~VoiceMediaChannel() {}
virtual bool SetSendParameters(const AudioSendParameters& params) = 0;
virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0;
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
+ virtual bool SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(
+ uint32_t ssrc) const = 0;
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
// Starts or stops playout of received audio.
virtual bool SetPlayout(bool playout) = 0;
// Starts or stops sending (and potentially capture) of local audio.
@@ -980,9 +996,15 @@ class VideoMediaChannel : public MediaChannel {
virtual bool SetSendParameters(const VideoSendParameters& params) = 0;
virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0;
- virtual webrtc::RtpParameters GetRtpParameters(uint32_t ssrc) const = 0;
- virtual bool SetRtpParameters(uint32_t ssrc,
- const webrtc::RtpParameters& parameters) = 0;
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
+ virtual bool SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(
+ uint32_t ssrc) const = 0;
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
// Gets the currently set codecs/payload types to be used for outgoing media.
virtual bool GetSendCodec(VideoCodec* send_codec) = 0;
// Starts or stops transmission (and potentially capture) of local video.
@@ -995,9 +1017,10 @@ class VideoMediaChannel : public MediaChannel {
// If SSRC is 0, the renderer is used for the 'default' stream.
virtual bool SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) = 0;
- // If |ssrc| is 0, replace the default capturer (engine capturer) with
- // |capturer|. If |ssrc| is non zero create a new stream with |ssrc| as SSRC.
- virtual bool SetCapturer(uint32_t ssrc, VideoCapturer* capturer) = 0;
+ // Register a source. The |ssrc| must correspond to a registered send stream.
+ virtual void SetSource(
+ uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) = 0;
// Gets quality stats for the channel.
virtual bool GetStats(VideoMediaInfo* info) = 0;
};
@@ -1107,7 +1130,7 @@ class DataMediaChannel : public MediaChannel {
virtual bool SetReceive(bool receive) = 0;
virtual void OnNetworkRouteChanged(const std::string& transport_name,
- const NetworkRoute& network_route) {}
+ const rtc::NetworkRoute& network_route) {}
virtual bool SendData(
const SendDataParams& params,
diff --git a/chromium/third_party/webrtc/media/base/mediaconstants.cc b/chromium/third_party/webrtc/media/base/mediaconstants.cc
index d6bb7e3d48a..a8e2b12599b 100644
--- a/chromium/third_party/webrtc/media/base/mediaconstants.cc
+++ b/chromium/third_party/webrtc/media/base/mediaconstants.cc
@@ -64,7 +64,7 @@ const int kOpusDefaultUseInbandFec = 0;
const int kOpusDefaultUseDtx = 0;
const int kOpusDefaultMaxPlaybackRate = 48000;
-const int kPreferredMaxPTime = 60;
+const int kPreferredMaxPTime = 120;
const int kPreferredMinPTime = 10;
const int kPreferredSPropStereo = 0;
const int kPreferredStereo = 0;
diff --git a/chromium/third_party/webrtc/media/base/mediaengine.h b/chromium/third_party/webrtc/media/base/mediaengine.h
index 4c7d62a1e4b..4446bd6b0e4 100644
--- a/chromium/third_party/webrtc/media/base/mediaengine.h
+++ b/chromium/third_party/webrtc/media/base/mediaengine.h
@@ -93,8 +93,11 @@ class MediaEngineInterface {
// Stops recording AEC dump.
virtual void StopAecDump() = 0;
- // Starts RtcEventLog using existing file.
- virtual bool StartRtcEventLog(rtc::PlatformFile file) = 0;
+ // Starts RtcEventLog using existing file. A maximum file size in bytes can be
+ // specified. Logging is stopped just before the size limit is exceeded.
+ // If max_size_bytes is set to a value <= 0, no limit will be used.
+ virtual bool StartRtcEventLog(rtc::PlatformFile file,
+ int64_t max_size_bytes) = 0;
// Stops recording an RtcEventLog.
virtual void StopRtcEventLog() = 0;
@@ -176,8 +179,9 @@ class CompositeMediaEngine : public MediaEngineInterface {
voice_.StopAecDump();
}
- virtual bool StartRtcEventLog(rtc::PlatformFile file) {
- return voice_.StartRtcEventLog(file);
+ virtual bool StartRtcEventLog(rtc::PlatformFile file,
+ int64_t max_size_bytes) {
+ return voice_.StartRtcEventLog(file, max_size_bytes);
}
virtual void StopRtcEventLog() { voice_.StopRtcEventLog(); }
diff --git a/chromium/third_party/webrtc/media/base/rtpdataengine.cc b/chromium/third_party/webrtc/media/base/rtpdataengine.cc
index ae361745557..4b6647c649a 100644
--- a/chromium/third_party/webrtc/media/base/rtpdataengine.cc
+++ b/chromium/third_party/webrtc/media/base/rtpdataengine.cc
@@ -36,8 +36,7 @@ static const size_t kMaxSrtpHmacOverhead = 16;
RtpDataEngine::RtpDataEngine() {
data_codecs_.push_back(
- DataCodec(kGoogleRtpDataCodecId,
- kGoogleRtpDataCodecName, 0));
+ DataCodec(kGoogleRtpDataCodecId, kGoogleRtpDataCodecName));
SetTiming(new rtc::Timing());
}
@@ -92,7 +91,7 @@ void RtpClock::Tick(double now, int* seq_num, uint32_t* timestamp) {
}
const DataCodec* FindUnknownCodec(const std::vector<DataCodec>& codecs) {
- DataCodec data_codec(kGoogleRtpDataCodecId, kGoogleRtpDataCodecName, 0);
+ DataCodec data_codec(kGoogleRtpDataCodecId, kGoogleRtpDataCodecName);
std::vector<DataCodec>::const_iterator iter;
for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
if (!iter->Matches(data_codec)) {
@@ -103,7 +102,7 @@ const DataCodec* FindUnknownCodec(const std::vector<DataCodec>& codecs) {
}
const DataCodec* FindKnownCodec(const std::vector<DataCodec>& codecs) {
- DataCodec data_codec(kGoogleRtpDataCodecId, kGoogleRtpDataCodecName, 0);
+ DataCodec data_codec(kGoogleRtpDataCodecId, kGoogleRtpDataCodecName);
std::vector<DataCodec>::const_iterator iter;
for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
if (iter->Matches(data_codec)) {
diff --git a/chromium/third_party/webrtc/media/base/rtpdump.cc b/chromium/third_party/webrtc/media/base/rtpdump.cc
index a109f2d8e26..246085913e1 100644
--- a/chromium/third_party/webrtc/media/base/rtpdump.cc
+++ b/chromium/third_party/webrtc/media/base/rtpdump.cc
@@ -28,13 +28,12 @@ namespace cricket {
const char RtpDumpFileHeader::kFirstLine[] = "#!rtpplay1.0 0.0.0.0/0\n";
-RtpDumpFileHeader::RtpDumpFileHeader(uint32_t start_ms, uint32_t s, uint16_t p)
- : start_sec(start_ms / 1000),
- start_usec(start_ms % 1000 * 1000),
+RtpDumpFileHeader::RtpDumpFileHeader(int64_t start_ms, uint32_t s, uint16_t p)
+ : start_sec(static_cast<uint32_t>(start_ms / 1000)),
+ start_usec(static_cast<uint32_t>(start_ms % 1000 * 1000)),
source(s),
port(p),
- padding(0) {
-}
+ padding(0) {}
void RtpDumpFileHeader::WriteToByteBuffer(rtc::ByteBufferWriter* buf) {
buf->WriteUInt32(start_sec);
@@ -44,7 +43,7 @@ void RtpDumpFileHeader::WriteToByteBuffer(rtc::ByteBufferWriter* buf) {
buf->WriteUInt16(padding);
}
-static const uint32_t kDefaultTimeIncrease = 30;
+static const int kDefaultTimeIncrease = 30;
bool RtpDumpPacket::IsValidRtpPacket() const {
return original_data_len >= data.size() &&
@@ -162,7 +161,7 @@ rtc::StreamResult RtpDumpReader::ReadFileHeader() {
uint32_t start_usec;
buf.ReadUInt32(&start_sec);
buf.ReadUInt32(&start_usec);
- start_time_ms_ = start_sec * 1000 + start_usec / 1000;
+ start_time_ms_ = static_cast<int64_t>(start_sec * 1000 + start_usec / 1000);
// Increase the length by 1 since first_line does not contain the ending \n.
first_line_and_file_header_len_ = first_line.size() + 1 + sizeof(header);
}
@@ -305,9 +304,8 @@ RtpDumpWriter::RtpDumpWriter(rtc::StreamInterface* stream)
: stream_(stream),
packet_filter_(PF_ALL),
file_header_written_(false),
- start_time_ms_(rtc::Time()),
- warn_slow_writes_delay_(kWarnSlowWritesDelayMs) {
-}
+ start_time_ms_(rtc::TimeMillis()),
+ warn_slow_writes_delay_(kWarnSlowWritesDelayMs) {}
void RtpDumpWriter::set_packet_filter(int filter) {
packet_filter_ = filter;
@@ -315,7 +313,7 @@ void RtpDumpWriter::set_packet_filter(int filter) {
}
uint32_t RtpDumpWriter::GetElapsedTime() const {
- return rtc::TimeSince(start_time_ms_);
+ return static_cast<uint32_t>(rtc::TimeSince(start_time_ms_));
}
rtc::StreamResult RtpDumpWriter::WriteFileHeader() {
@@ -327,7 +325,7 @@ rtc::StreamResult RtpDumpWriter::WriteFileHeader() {
}
rtc::ByteBufferWriter buf;
- RtpDumpFileHeader file_header(rtc::Time(), 0, 0);
+ RtpDumpFileHeader file_header(rtc::TimeMillis(), 0, 0);
file_header.WriteToByteBuffer(&buf);
return WriteToStream(buf.Data(), buf.Length());
}
@@ -395,10 +393,10 @@ size_t RtpDumpWriter::FilterPacket(const void* data, size_t data_len,
rtc::StreamResult RtpDumpWriter::WriteToStream(
const void* data, size_t data_len) {
- uint32_t before = rtc::Time();
+ int64_t before = rtc::TimeMillis();
rtc::StreamResult result =
stream_->WriteAll(data, data_len, NULL, NULL);
- uint32_t delay = rtc::TimeSince(before);
+ int64_t delay = rtc::TimeSince(before);
if (delay >= warn_slow_writes_delay_) {
LOG(LS_WARNING) << "Slow RtpDump: took " << delay << "ms to write "
<< data_len << " bytes.";
diff --git a/chromium/third_party/webrtc/media/base/rtpdump.h b/chromium/third_party/webrtc/media/base/rtpdump.h
index 4ce479277af..8ea7800e9c7 100644
--- a/chromium/third_party/webrtc/media/base/rtpdump.h
+++ b/chromium/third_party/webrtc/media/base/rtpdump.h
@@ -18,6 +18,7 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/bytebuffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/stream.h"
namespace cricket {
@@ -39,7 +40,7 @@ enum RtpDumpPacketFilter {
};
struct RtpDumpFileHeader {
- RtpDumpFileHeader(uint32_t start_ms, uint32_t s, uint16_t p);
+ RtpDumpFileHeader(int64_t start_ms, uint32_t s, uint16_t p);
void WriteToByteBuffer(rtc::ByteBufferWriter* buf);
static const char kFirstLine[];
@@ -112,7 +113,7 @@ class RtpDumpReader {
rtc::StreamInterface* stream_;
bool file_header_read_;
size_t first_line_and_file_header_len_;
- uint32_t start_time_ms_;
+ int64_t start_time_ms_;
uint32_t ssrc_override_;
RTC_DISALLOW_COPY_AND_ASSIGN(RtpDumpReader);
@@ -158,10 +159,10 @@ class RtpDumpLoopReader : public RtpDumpReader {
// the previous dump packets in the input stream.
uint32_t first_elapsed_time_;
int first_rtp_seq_num_;
- uint32_t first_rtp_timestamp_;
+ int64_t first_rtp_timestamp_;
uint32_t prev_elapsed_time_;
int prev_rtp_seq_num_;
- uint32_t prev_rtp_timestamp_;
+ int64_t prev_rtp_timestamp_;
RTC_DISALLOW_COPY_AND_ASSIGN(RtpDumpLoopReader);
};
@@ -206,9 +207,9 @@ class RtpDumpWriter {
rtc::StreamInterface* stream_;
int packet_filter_;
bool file_header_written_;
- uint32_t start_time_ms_; // Time when the record starts.
+ int64_t start_time_ms_; // Time when the record starts.
// If writing to the stream takes longer than this many ms, log a warning.
- uint32_t warn_slow_writes_delay_;
+ int64_t warn_slow_writes_delay_;
RTC_DISALLOW_COPY_AND_ASSIGN(RtpDumpWriter);
};
diff --git a/chromium/third_party/webrtc/media/base/videoadapter.cc b/chromium/third_party/webrtc/media/base/videoadapter.cc
index 797a876f2b4..9c3837c5729 100644
--- a/chromium/third_party/webrtc/media/base/videoadapter.cc
+++ b/chromium/third_party/webrtc/media/base/videoadapter.cc
@@ -13,69 +13,77 @@
#include <algorithm>
#include <limits>
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/media/base/mediaconstants.h"
#include "webrtc/media/base/videocommon.h"
namespace {
+struct Fraction {
+ int numerator;
+ int denominator;
+};
+
// Scale factors optimized for in libYUV that we accept.
// Must be sorted in decreasing scale factors for FindScaleLargerThan to work.
-const float kScaleFactors[] = {
- 1.f / 1.f, // Full size.
- 3.f / 4.f, // 3/4 scale.
- 1.f / 2.f, // 1/2 scale.
- 3.f / 8.f, // 3/8 scale.
- 1.f / 4.f, // 1/4 scale.
- 3.f / 16.f, // 3/16 scale.
+const Fraction kScaleFractions[] = {
+ {1, 1},
+ {3, 4},
+ {1, 2},
+ {3, 8},
+ {1, 4},
+ {3, 16},
};
-float FindScaleLessThanOrEqual(int width,
- int height,
- int target_num_pixels,
- int* resulting_number_of_pixels) {
+// Round |valueToRound| to a multiple of |multiple|. Prefer rounding upwards,
+// but never more than |maxValue|.
+int roundUp(int valueToRound, int multiple, int maxValue) {
+ const int roundedValue = (valueToRound + multiple - 1) / multiple * multiple;
+ return roundedValue <= maxValue ? roundedValue
+ : (maxValue / multiple * multiple);
+}
+
+Fraction FindScaleLessThanOrEqual(int input_num_pixels, int target_num_pixels) {
float best_distance = std::numeric_limits<float>::max();
- float best_scale = 0.0f; // Default to 0 if nothing matches.
- float pixels = width * height;
- float best_number_of_pixels = 0.0f;
- for (const auto& scale : kScaleFactors) {
- float test_num_pixels = pixels * scale * scale;
+ Fraction best_scale = {0, 1}; // Default to 0 if nothing matches.
+ for (const auto& fraction : kScaleFractions) {
+ const float scale =
+ fraction.numerator / static_cast<float>(fraction.denominator);
+ float test_num_pixels = input_num_pixels * scale * scale;
float diff = target_num_pixels - test_num_pixels;
if (diff < 0) {
continue;
}
if (diff < best_distance) {
best_distance = diff;
- best_scale = scale;
- best_number_of_pixels = test_num_pixels;
+ best_scale = fraction;
if (best_distance == 0) { // Found exact match.
break;
}
}
}
- if (resulting_number_of_pixels) {
- *resulting_number_of_pixels = static_cast<int>(best_number_of_pixels + .5f);
- }
return best_scale;
}
-float FindScaleLargerThan(int width,
- int height,
- int target_num_pixels,
- int* resulting_number_of_pixels) {
+Fraction FindScaleLargerThan(int input_num_pixels,
+ int target_num_pixels,
+ int* resulting_number_of_pixels) {
float best_distance = std::numeric_limits<float>::max();
- float best_scale = 1.f; // Default to unscaled if nothing matches.
- float pixels = width * height;
- float best_number_of_pixels = pixels; // Default to input number of pixels.
- for (const auto& scale : kScaleFactors) {
- float test_num_pixels = pixels * scale * scale;
+ Fraction best_scale = {1, 1}; // Default to unscaled if nothing matches.
+ // Default to input number of pixels.
+ float best_number_of_pixels = input_num_pixels;
+ for (const auto& fraction : kScaleFractions) {
+ const float scale =
+ fraction.numerator / static_cast<float>(fraction.denominator);
+ float test_num_pixels = input_num_pixels * scale * scale;
float diff = test_num_pixels - target_num_pixels;
if (diff <= 0) {
break;
}
if (diff < best_distance) {
best_distance = diff;
- best_scale = scale;
+ best_scale = fraction;
best_number_of_pixels = test_num_pixels;
}
}
@@ -84,21 +92,36 @@ float FindScaleLargerThan(int width,
return best_scale;
}
+Fraction FindScale(int input_num_pixels,
+ int max_pixel_count_step_up,
+ int max_pixel_count) {
+ // Try scale just above |max_pixel_count_step_up_|.
+ if (max_pixel_count_step_up > 0) {
+ int resulting_pixel_count;
+ const Fraction scale = FindScaleLargerThan(
+ input_num_pixels, max_pixel_count_step_up, &resulting_pixel_count);
+ if (resulting_pixel_count <= max_pixel_count)
+ return scale;
+ }
+ // Return largest scale below |max_pixel_count|.
+ return FindScaleLessThanOrEqual(input_num_pixels, max_pixel_count);
+}
+
} // namespace
namespace cricket {
VideoAdapter::VideoAdapter()
- : output_num_pixels_(std::numeric_limits<int>::max()),
- frames_in_(0),
+ : frames_in_(0),
frames_out_(0),
frames_scaled_(0),
adaption_changes_(0),
previous_width_(0),
previous_height_(0),
+ input_interval_(0),
interval_next_frame_(0),
- format_request_max_pixel_count_(std::numeric_limits<int>::max()),
- resolution_request_max_pixel_count_(std::numeric_limits<int>::max()) {}
+ resolution_request_max_pixel_count_(std::numeric_limits<int>::max()),
+ resolution_request_max_pixel_count_step_up_(0) {}
VideoAdapter::~VideoAdapter() {}
@@ -106,57 +129,43 @@ void VideoAdapter::SetExpectedInputFrameInterval(int64_t interval) {
// TODO(perkj): Consider measuring input frame rate instead.
// Frame rate typically varies depending on lighting.
rtc::CritScope cs(&critical_section_);
- input_format_.interval = interval;
-}
-
-void VideoAdapter::SetInputFormat(const VideoFormat& format) {
- bool is_resolution_change = (input_format().width != format.width ||
- input_format().height != format.height);
- int64_t old_input_interval = input_format_.interval;
- input_format_ = format;
- output_format_.interval =
- std::max(output_format_.interval, input_format_.interval);
- if (old_input_interval != input_format_.interval) {
- LOG(LS_INFO) << "VAdapt input interval changed from "
- << old_input_interval << " to " << input_format_.interval;
- }
- if (is_resolution_change) {
- // Trigger the adaptation logic again, to potentially reset the adaptation
- // state for things like view requests that may not longer be capping
- // output (or may now cap output).
- Adapt(std::min(format_request_max_pixel_count_,
- resolution_request_max_pixel_count_),
- 0);
- }
-}
-
-const VideoFormat& VideoAdapter::input_format() const {
- rtc::CritScope cs(&critical_section_);
- return input_format_;
+ input_interval_ = interval;
}
-VideoFormat VideoAdapter::AdaptFrameResolution(int in_width, int in_height) {
+void VideoAdapter::AdaptFrameResolution(int in_width,
+ int in_height,
+ int* cropped_width,
+ int* cropped_height,
+ int* out_width,
+ int* out_height) {
rtc::CritScope cs(&critical_section_);
++frames_in_;
- SetInputFormat(VideoFormat(
- in_width, in_height, input_format_.interval, input_format_.fourcc));
+ // The max output pixel count is the minimum of the requests from
+ // OnOutputFormatRequest and OnResolutionRequest.
+ int max_pixel_count = resolution_request_max_pixel_count_;
+ if (requested_format_) {
+ max_pixel_count = std::min(
+ max_pixel_count, requested_format_->width * requested_format_->height);
+ }
// Drop the input frame if necessary.
bool should_drop = false;
- if (!output_num_pixels_) {
+ if (max_pixel_count == 0) {
// Drop all frames as the output format is 0x0.
should_drop = true;
- } else {
+ } else if (requested_format_ && requested_format_->interval > 0) {
// Drop some frames based on input fps and output fps.
// Normally output fps is less than input fps.
- interval_next_frame_ += input_format_.interval;
- if (output_format_.interval > 0) {
- if (interval_next_frame_ >= output_format_.interval) {
- interval_next_frame_ %= output_format_.interval;
- } else {
- should_drop = true;
- }
+ interval_next_frame_ += input_interval_;
+ if (interval_next_frame_ >= requested_format_->interval) {
+ interval_next_frame_ -= requested_format_->interval;
+ // Reset |interval_next_frame_| if it accumulates too much to avoid
+ // "catching up" behaviour.
+ if (interval_next_frame_ >= requested_format_->interval)
+ interval_next_frame_ = 0;
+ } else {
+ should_drop = true;
}
}
if (should_drop) {
@@ -170,48 +179,79 @@ VideoFormat VideoAdapter::AdaptFrameResolution(int in_width, int in_height) {
<< " Changes: " << adaption_changes_
<< " Input: " << in_width
<< "x" << in_height
- << " i" << input_format_.interval
- << " Output: i" << output_format_.interval;
+ << " i" << input_interval_
+ << " Output: i"
+ << (requested_format_ ? requested_format_->interval : 0);
}
- return VideoFormat(); // Drop frame.
+ // Drop frame.
+ *cropped_width = 0;
+ *cropped_height = 0;
+ *out_width = 0;
+ *out_height = 0;
+ return;
+ }
+
+ // Calculate how the input should be cropped.
+ if (!requested_format_ ||
+ requested_format_->width == 0 || requested_format_->height == 0) {
+ *cropped_width = in_width;
+ *cropped_height = in_height;
+ } else {
+ // Adjust |requested_format_| orientation to match input.
+ if ((in_width > in_height) !=
+ (requested_format_->width > requested_format_->height)) {
+ std::swap(requested_format_->width, requested_format_->height);
+ }
+ const float requested_aspect =
+ requested_format_->width /
+ static_cast<float>(requested_format_->height);
+ *cropped_width =
+ std::min(in_width, static_cast<int>(in_height * requested_aspect));
+ *cropped_height =
+ std::min(in_height, static_cast<int>(in_width / requested_aspect));
}
- const float scale = FindScaleLessThanOrEqual(in_width, in_height,
- output_num_pixels_, nullptr);
- const int output_width = static_cast<int>(in_width * scale + .5f);
- const int output_height = static_cast<int>(in_height * scale + .5f);
+ // Find best scale factor.
+ const Fraction scale =
+ FindScale(*cropped_width * *cropped_height,
+ resolution_request_max_pixel_count_step_up_, max_pixel_count);
+
+ // Adjust cropping slightly to get even integer output size and a perfect
+ // scale factor.
+ *cropped_width = roundUp(*cropped_width, scale.denominator, in_width);
+ *cropped_height = roundUp(*cropped_height, scale.denominator, in_height);
+ RTC_DCHECK_EQ(0, *cropped_width % scale.denominator);
+ RTC_DCHECK_EQ(0, *cropped_height % scale.denominator);
+
+ // Calculate final output size.
+ *out_width = *cropped_width / scale.denominator * scale.numerator;
+ *out_height = *cropped_height / scale.denominator * scale.numerator;
++frames_out_;
- if (scale != 1)
+ if (scale.numerator != scale.denominator)
++frames_scaled_;
- if (previous_width_ && (previous_width_ != output_width ||
- previous_height_ != output_height)) {
+ if (previous_width_ && (previous_width_ != *out_width ||
+ previous_height_ != *out_height)) {
++adaption_changes_;
LOG(LS_INFO) << "Frame size changed: scaled " << frames_scaled_ << " / out "
<< frames_out_ << " / in " << frames_in_
<< " Changes: " << adaption_changes_ << " Input: " << in_width
- << "x" << in_height << " i" << input_format_.interval
- << " Scale: " << scale << " Output: " << output_width << "x"
- << output_height << " i" << output_format_.interval;
+ << "x" << in_height << " i" << input_interval_
+ << " Scale: " << scale.numerator << "/" << scale.denominator
+ << " Output: " << *out_width << "x" << *out_height << " i"
+ << (requested_format_ ? requested_format_->interval : 0);
}
- output_format_.width = output_width;
- output_format_.height = output_height;
- previous_width_ = output_width;
- previous_height_ = output_height;
-
- return output_format_;
+ previous_width_ = *out_width;
+ previous_height_ = *out_height;
}
void VideoAdapter::OnOutputFormatRequest(const VideoFormat& format) {
rtc::CritScope cs(&critical_section_);
- format_request_max_pixel_count_ = format.width * format.height;
- output_format_.interval = format.interval;
- Adapt(std::min(format_request_max_pixel_count_,
- resolution_request_max_pixel_count_),
- 0);
+ requested_format_ = rtc::Optional<VideoFormat>(format);
+ interval_next_frame_ = 0;
}
void VideoAdapter::OnResolutionRequest(
@@ -220,44 +260,8 @@ void VideoAdapter::OnResolutionRequest(
rtc::CritScope cs(&critical_section_);
resolution_request_max_pixel_count_ =
max_pixel_count.value_or(std::numeric_limits<int>::max());
- Adapt(std::min(format_request_max_pixel_count_,
- resolution_request_max_pixel_count_),
- max_pixel_count_step_up.value_or(0));
-}
-
-bool VideoAdapter::Adapt(int max_num_pixels, int max_pixel_count_step_up) {
- float scale_lower =
- FindScaleLessThanOrEqual(input_format_.width, input_format_.height,
- max_num_pixels, &max_num_pixels);
- float scale_upper =
- max_pixel_count_step_up > 0
- ? FindScaleLargerThan(input_format_.width, input_format_.height,
- max_pixel_count_step_up,
- &max_pixel_count_step_up)
- : 1.f;
-
- bool use_max_pixel_count_step_up =
- max_pixel_count_step_up > 0 && max_num_pixels > max_pixel_count_step_up;
-
- int old_num_pixels = output_num_pixels_;
- output_num_pixels_ =
- use_max_pixel_count_step_up ? max_pixel_count_step_up : max_num_pixels;
- // Log the new size.
- float scale = use_max_pixel_count_step_up ? scale_upper : scale_lower;
- int new_width = static_cast<int>(input_format_.width * scale + .5f);
- int new_height = static_cast<int>(input_format_.height * scale + .5f);
-
- bool changed = output_num_pixels_ != old_num_pixels;
- LOG(LS_INFO) << "OnResolutionRequest: "
- << " Max pixels: " << max_num_pixels
- << " Max pixels step up: " << max_pixel_count_step_up
- << " Output Pixels: " << output_num_pixels_
- << " Input: " << input_format_.width << "x"
- << input_format_.height << " Scale: " << scale
- << " Resolution: " << new_width << "x" << new_height
- << " Changed: " << (changed ? "true" : "false");
-
- return changed;
+ resolution_request_max_pixel_count_step_up_ =
+ max_pixel_count_step_up.value_or(0);
}
} // namespace cricket
diff --git a/chromium/third_party/webrtc/media/base/videoadapter.h b/chromium/third_party/webrtc/media/base/videoadapter.h
index b7aba1406a4..2db0ada2713 100644
--- a/chromium/third_party/webrtc/media/base/videoadapter.h
+++ b/chromium/third_party/webrtc/media/base/videoadapter.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MEDIA_BASE_VIDEOADAPTER_H_
#define WEBRTC_MEDIA_BASE_VIDEOADAPTER_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/optional.h"
#include "webrtc/media/base/videocommon.h"
@@ -31,12 +32,23 @@ class VideoAdapter {
// interval.
void SetExpectedInputFrameInterval(int64_t interval);
- // Return the adapted resolution given the input resolution. The returned
- // resolution will be 0x0 if the frame should be dropped.
- VideoFormat AdaptFrameResolution(int in_width, int in_height);
+ // Return the adapted resolution given the input resolution. The input
+ // resolution should first be cropped to the specified resolution, and then
+ // scaled to the final output resolution. The output resolution will be 0x0 if
+ // the frame should be dropped.
+ void AdaptFrameResolution(int in_width,
+ int in_height,
+ int* cropped_width,
+ int* cropped_height,
+ int* out_width,
+ int* out_height);
// Requests the output frame size and frame interval from
- // |AdaptFrameResolution| to not be larger than |format|.
+ // |AdaptFrameResolution| to not be larger than |format|. Also, the input
+ // frame size will be cropped to match the requested aspect ratio. The
+ // requested aspect ratio is orientation agnostic and will be adjusted to
+ // maintain the input orientation, so it doesn't matter if e.g. 1280x720 or
+ // 720x1280 is requested.
void OnOutputFormatRequest(const VideoFormat& format);
// Requests the output frame size from |AdaptFrameResolution| to not have
@@ -45,28 +57,22 @@ class VideoAdapter {
void OnResolutionRequest(rtc::Optional<int> max_pixel_count,
rtc::Optional<int> max_pixel_count_step_up);
- const VideoFormat& input_format() const;
-
private:
- void SetInputFormat(const VideoFormat& format);
- bool Adapt(int max_num_pixels, int max_pixel_count_step_up);
-
- VideoFormat input_format_;
- VideoFormat output_format_;
- int output_num_pixels_;
int frames_in_; // Number of input frames.
int frames_out_; // Number of output frames.
int frames_scaled_; // Number of frames scaled.
int adaption_changes_; // Number of changes in scale factor.
int previous_width_; // Previous adapter output width.
int previous_height_; // Previous adapter output height.
- int64_t interval_next_frame_;
+ int input_interval_ GUARDED_BY(critical_section_);
+ int64_t interval_next_frame_ GUARDED_BY(critical_section_);
// Max number of pixels requested via calls to OnOutputFormatRequest,
// OnResolutionRequest respectively.
// The adapted output format is the minimum of these.
- int format_request_max_pixel_count_;
- int resolution_request_max_pixel_count_;
+ rtc::Optional<VideoFormat> requested_format_ GUARDED_BY(critical_section_);
+ int resolution_request_max_pixel_count_ GUARDED_BY(critical_section_);
+ int resolution_request_max_pixel_count_step_up_ GUARDED_BY(critical_section_);
// The critical section to protect the above variables.
rtc::CriticalSection critical_section_;
diff --git a/chromium/third_party/webrtc/media/base/videoadapter_unittest.cc b/chromium/third_party/webrtc/media/base/videoadapter_unittest.cc
index 1ad58d165c0..92b4a558b9b 100644
--- a/chromium/third_party/webrtc/media/base/videoadapter_unittest.cc
+++ b/chromium/third_party/webrtc/media/base/videoadapter_unittest.cc
@@ -10,6 +10,7 @@
#include <limits.h> // For INT_MAX
+#include <memory>
#include <string>
#include <vector>
@@ -49,8 +50,10 @@ class VideoAdapterTest : public testing::Test {
int dropped_frames;
bool last_adapt_was_no_op;
- int adapted_width;
- int adapted_height;
+ int cropped_width;
+ int cropped_height;
+ int out_width;
+ int out_height;
};
explicit VideoCapturerListener(VideoAdapter* adapter)
@@ -65,12 +68,21 @@ class VideoAdapterTest : public testing::Test {
rtc::CritScope lock(&crit_);
const int in_width = captured_frame->width;
const int in_height = abs(captured_frame->height);
- const VideoFormat adapted_format =
- video_adapter_->AdaptFrameResolution(in_width, in_height);
- if (!adapted_format.IsSize0x0()) {
- adapted_format_ = adapted_format;
- last_adapt_was_no_op_ = (in_width == adapted_format.width &&
- in_height == adapted_format.height);
+ int cropped_width;
+ int cropped_height;
+ int out_width;
+ int out_height;
+ video_adapter_->AdaptFrameResolution(in_width, in_height,
+ &cropped_width, &cropped_height,
+ &out_width, &out_height);
+ if (out_width != 0 && out_height != 0) {
+ cropped_width_ = cropped_width;
+ cropped_height_ = cropped_height;
+ out_width_ = out_width;
+ out_height_ = out_height;
+ last_adapt_was_no_op_ =
+ (in_width == cropped_width && in_height == cropped_height &&
+ in_width == out_width && in_height == out_height);
} else {
++dropped_frames_;
}
@@ -83,20 +95,20 @@ class VideoAdapterTest : public testing::Test {
stats.captured_frames = captured_frames_;
stats.dropped_frames = dropped_frames_;
stats.last_adapt_was_no_op = last_adapt_was_no_op_;
- if (!adapted_format_.IsSize0x0()) {
- stats.adapted_width = adapted_format_.width;
- stats.adapted_height = adapted_format_.height;
- } else {
- stats.adapted_width = stats.adapted_height = -1;
- }
-
+ stats.cropped_width = cropped_width_;
+ stats.cropped_height = cropped_height_;
+ stats.out_width = out_width_;
+ stats.out_height = out_height_;
return stats;
}
private:
rtc::CriticalSection crit_;
VideoAdapter* video_adapter_;
- VideoFormat adapted_format_;
+ int cropped_width_;
+ int cropped_height_;
+ int out_width_;
+ int out_height_;
int captured_frames_;
int dropped_frames_;
bool last_adapt_was_no_op_;
@@ -104,20 +116,28 @@ class VideoAdapterTest : public testing::Test {
void VerifyAdaptedResolution(const VideoCapturerListener::Stats& stats,
- int width,
- int height) {
- EXPECT_EQ(width, stats.adapted_width);
- EXPECT_EQ(height, stats.adapted_height);
+ int cropped_width,
+ int cropped_height,
+ int out_width,
+ int out_height) {
+ EXPECT_EQ(cropped_width, stats.cropped_width);
+ EXPECT_EQ(cropped_height, stats.cropped_height);
+ EXPECT_EQ(out_width, stats.out_width);
+ EXPECT_EQ(out_height, stats.out_height);
}
std::unique_ptr<FakeVideoCapturer> capturer_;
VideoAdapter adapter_;
+ int cropped_width_;
+ int cropped_height_;
+ int out_width_;
+ int out_height_;
std::unique_ptr<VideoCapturerListener> listener_;
VideoFormat capture_format_;
};
-// Do not adapt the frame rate or the resolution. Expect no frame drop and no
-// resolution change.
+// Do not adapt the frame rate or the resolution. Expect no frame drop, no
+// cropping, and no resolution change.
TEST_F(VideoAdapterTest, AdaptNothing) {
EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
for (int i = 0; i < 10; ++i)
@@ -127,7 +147,8 @@ TEST_F(VideoAdapterTest, AdaptNothing) {
VideoCapturerListener::Stats stats = listener_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(0, stats.dropped_frames);
- VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
EXPECT_TRUE(stats.last_adapt_was_no_op);
}
@@ -143,7 +164,8 @@ TEST_F(VideoAdapterTest, AdaptZeroInterval) {
VideoCapturerListener::Stats stats = listener_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(0, stats.dropped_frames);
- VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
}
// Adapt the frame rate to be half of the capture rate at the beginning. Expect
@@ -160,7 +182,8 @@ TEST_F(VideoAdapterTest, AdaptFramerate) {
VideoCapturerListener::Stats stats = listener_->GetStats();
EXPECT_GE(stats.captured_frames, 10);
EXPECT_EQ(stats.captured_frames / 2, stats.dropped_frames);
- VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
}
// Adapt the frame rate to be half of the capture rate at the beginning. Expect
@@ -178,7 +201,8 @@ TEST_F(VideoAdapterTest, AdaptFramerateVariable) {
EXPECT_GE(stats.captured_frames, 30);
// Verify 2 / 3 kept (20) and 1 / 3 dropped (10).
EXPECT_EQ(stats.captured_frames * 1 / 3, stats.dropped_frames);
- VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
}
// Adapt the frame rate to be half of the capture rate after capturing no less
@@ -205,39 +229,49 @@ TEST_F(VideoAdapterTest, AdaptFramerateOntheFly) {
EXPECT_GT(listener_->GetStats().dropped_frames, 0);
}
-// Set a very high output pixel resolution. Expect no resolution change.
+// Set a very high output pixel resolution. Expect no cropping or resolution
+// change.
TEST_F(VideoAdapterTest, AdaptFrameResolutionHighLimit) {
VideoFormat output_format = capture_format_;
- output_format.width = 2560;
- output_format.height = 2560;
+ output_format.width *= 10;
+ output_format.height *= 10;
adapter_.OnOutputFormatRequest(output_format);
- VideoFormat adapted_format = adapter_.AdaptFrameResolution(
- capture_format_.width, capture_format_.height);
- EXPECT_EQ(capture_format_.width, adapted_format.width);
- EXPECT_EQ(capture_format_.height, adapted_format.height);
+ adapter_.AdaptFrameResolution(capture_format_.width, capture_format_.height,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(capture_format_.width, out_width_);
+ EXPECT_EQ(capture_format_.height, out_height_);
}
// Adapt the frame resolution to be the same as capture resolution. Expect no
-// resolution change.
+// cropping or resolution change.
TEST_F(VideoAdapterTest, AdaptFrameResolutionIdentical) {
adapter_.OnOutputFormatRequest(capture_format_);
- const VideoFormat adapted_format = adapter_.AdaptFrameResolution(
- capture_format_.width, capture_format_.height);
- EXPECT_EQ(capture_format_.width, adapted_format.width);
- EXPECT_EQ(capture_format_.height, adapted_format.height);
+ adapter_.AdaptFrameResolution(capture_format_.width, capture_format_.height,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(capture_format_.width, out_width_);
+ EXPECT_EQ(capture_format_.height, out_height_);
}
// Adapt the frame resolution to be a quarter of the capture resolution. Expect
-// resolution change.
+// no cropping, but a resolution change.
TEST_F(VideoAdapterTest, AdaptFrameResolutionQuarter) {
VideoFormat request_format = capture_format_;
request_format.width /= 2;
request_format.height /= 2;
adapter_.OnOutputFormatRequest(request_format);
- const VideoFormat adapted_format = adapter_.AdaptFrameResolution(
- request_format.width, request_format.height);
- EXPECT_EQ(request_format.width, adapted_format.width);
- EXPECT_EQ(request_format.height, adapted_format.height);
+ adapter_.AdaptFrameResolution(capture_format_.width, capture_format_.height,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(request_format.width, out_width_);
+ EXPECT_EQ(request_format.height, out_height_);
}
// Adapt the pixel resolution to 0. Expect frame drop.
@@ -246,14 +280,15 @@ TEST_F(VideoAdapterTest, AdaptFrameResolutionDrop) {
output_format.width = 0;
output_format.height = 0;
adapter_.OnOutputFormatRequest(output_format);
- EXPECT_TRUE(
- adapter_
- .AdaptFrameResolution(capture_format_.width, capture_format_.height)
- .IsSize0x0());
+ adapter_.AdaptFrameResolution(capture_format_.width, capture_format_.height,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(0, out_width_);
+ EXPECT_EQ(0, out_height_);
}
// Adapt the frame resolution to be a quarter of the capture resolution at the
-// beginning. Expect resolution change.
+// beginning. Expect no cropping but a resolution change.
TEST_F(VideoAdapterTest, AdaptResolution) {
VideoFormat request_format = capture_format_;
request_format.width /= 2;
@@ -263,10 +298,11 @@ TEST_F(VideoAdapterTest, AdaptResolution) {
for (int i = 0; i < 10; ++i)
capturer_->CaptureFrame();
- // Verify no frame drop and resolution change.
+ // Verify no frame drop, no cropping, and resolution change.
VideoCapturerListener::Stats stats = listener_->GetStats();
EXPECT_EQ(0, stats.dropped_frames);
- VerifyAdaptedResolution(stats, request_format.width, request_format.height);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
}
// Adapt the frame resolution to be a quarter of the capture resolution after
@@ -280,8 +316,9 @@ TEST_F(VideoAdapterTest, AdaptResolutionOnTheFly) {
capturer_->CaptureFrame();
// Verify no resolution change before adaptation.
- VerifyAdaptedResolution(
- listener_->GetStats(), request_format.width, request_format.height);
+ VerifyAdaptedResolution(listener_->GetStats(),
+ capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
// Adapt the frame resolution.
request_format.width /= 2;
@@ -291,8 +328,9 @@ TEST_F(VideoAdapterTest, AdaptResolutionOnTheFly) {
capturer_->CaptureFrame();
// Verify resolution change after adaptation.
- VerifyAdaptedResolution(
- listener_->GetStats(), request_format.width, request_format.height);
+ VerifyAdaptedResolution(listener_->GetStats(),
+ capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
}
// Drop all frames.
@@ -312,104 +350,155 @@ TEST_F(VideoAdapterTest, DropAllFrames) {
TEST_F(VideoAdapterTest, TestOnOutputFormatRequest) {
VideoFormat format(640, 400, VideoFormat::FpsToInterval(30), 0);
adapter_.SetExpectedInputFrameInterval(VideoFormat::FpsToInterval(30));
- VideoFormat out_format =
- adapter_.AdaptFrameResolution(format.width, format.height);
- EXPECT_EQ(format, adapter_.input_format());
- EXPECT_EQ(format, out_format);
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(400, out_height_);
// Format request 640x400.
format.height = 400;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 400);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(400, out_format.height);
-
- // Request 1280x720, higher than input. Adapt nothing.
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(400, out_height_);
+
+ // Request 1280x720, higher than input, but aspect 16:9. Expect cropping but
+ // no scaling.
format.width = 1280;
format.height = 720;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 400);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(400, out_format.height);
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
// Request 0x0.
format.width = 0;
format.height = 0;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 400);
- EXPECT_TRUE(out_format.IsSize0x0());
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(0, out_width_);
+ EXPECT_EQ(0, out_height_);
- // Request 320x200.
+ // Request 320x200. Expect scaling, but no cropping.
format.width = 320;
format.height = 200;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 400);
- EXPECT_EQ(320, out_format.width);
- EXPECT_EQ(200, out_format.height);
-
- // Request resolution of 2 / 3. Expect adapt down. Scaling to 1/3 is not
- // optimized and not allowed.
- format.width = (640 * 2 + 1) / 3;
- format.height = (400 * 2 + 1) / 3;
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
+
+ // Request resolution close to 2/3 scale. Expect adapt down. Scaling to 2/3
+ // is not optimized and not allowed, therefore 1/2 scaling will be used
+ // instead.
+ format.width = 424;
+ format.height = 265;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 400);
- EXPECT_EQ(320, out_format.width);
- EXPECT_EQ(200, out_format.height);
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
// Request resolution of 3 / 8. Expect adapt down.
format.width = 640 * 3 / 8;
format.height = 400 * 3 / 8;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 400);
- EXPECT_EQ(640 * 3 / 8, out_format.width);
- EXPECT_EQ(400 * 3 / 8, out_format.height);
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640 * 3 / 8, out_width_);
+ EXPECT_EQ(400 * 3 / 8, out_height_);
// Switch back up. Expect adapt.
format.width = 320;
format.height = 200;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 400);
- EXPECT_EQ(320, out_format.width);
- EXPECT_EQ(200, out_format.height);
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
// Format request 480x300.
format.width = 480;
format.height = 300;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 400);
- EXPECT_EQ(480, out_format.width);
- EXPECT_EQ(300, out_format.height);
+ adapter_.AdaptFrameResolution(640, 400,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(300, out_height_);
}
TEST_F(VideoAdapterTest, TestViewRequestPlusCameraSwitch) {
// Start at HD.
VideoFormat format(1280, 720, VideoFormat::FpsToInterval(30), 0);
adapter_.SetExpectedInputFrameInterval(VideoFormat::FpsToInterval(30));
- VideoFormat out_format =
- adapter_.AdaptFrameResolution(format.width, format.height);
- EXPECT_EQ(format, adapter_.input_format());
- EXPECT_EQ(out_format, adapter_.input_format());
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
// Format request for VGA.
format.width = 640;
format.height = 360;
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(360, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
// Now, the camera reopens at VGA.
// Both the frame and the output format should be 640x360.
- out_format = adapter_.AdaptFrameResolution(640, 360);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(360, out_format.height);
+ adapter_.AdaptFrameResolution(640, 360,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
// And another view request comes in for 640x360, which should have no
// real impact.
adapter_.OnOutputFormatRequest(format);
- out_format = adapter_.AdaptFrameResolution(640, 360);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(360, out_format.height);
+ adapter_.AdaptFrameResolution(640, 360,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
}
TEST_F(VideoAdapterTest, TestVGAWidth) {
@@ -418,130 +507,305 @@ TEST_F(VideoAdapterTest, TestVGAWidth) {
adapter_.SetExpectedInputFrameInterval(VideoFormat::FpsToInterval(30));
adapter_.OnOutputFormatRequest(format);
- VideoFormat out_format = adapter_.AdaptFrameResolution(640, 480);
- // At this point, we have to adapt down to something lower.
- EXPECT_EQ(480, out_format.width);
- EXPECT_EQ(360, out_format.height);
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ // Expect cropping.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
// But if frames come in at 640x360, we shouldn't adapt them down.
- out_format = adapter_.AdaptFrameResolution(640, 360);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(360, out_format.height);
-
- out_format = adapter_.AdaptFrameResolution(640, 480);
- EXPECT_EQ(480, out_format.width);
- EXPECT_EQ(360, out_format.height);
+ adapter_.AdaptFrameResolution(640, 360,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
- VideoFormat out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(1280, out_format.width);
- EXPECT_EQ(720, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
// Adapt down one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(1280 * 720 - 1),
rtc::Optional<int>());
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(960, out_format.width);
- EXPECT_EQ(540, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(960, out_width_);
+ EXPECT_EQ(540, out_height_);
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540 - 1),
rtc::Optional<int>());
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(360, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(480, out_format.width);
- EXPECT_EQ(270, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
// Adapt up one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(480 * 270));
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(360, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360));
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(960, out_format.width);
- EXPECT_EQ(540, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(960, out_width_);
+ EXPECT_EQ(540, out_height_);
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(1280, out_format.width);
- EXPECT_EQ(720, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestMaxZero) {
- VideoFormat out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(1280, out_format.width);
- EXPECT_EQ(720, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(0), rtc::Optional<int>());
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(0, out_format.width);
- EXPECT_EQ(0, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(0, out_width_);
+ EXPECT_EQ(0, out_height_);
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
- VideoFormat out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(480, out_format.width);
- EXPECT_EQ(270, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(1280, out_format.width);
- EXPECT_EQ(720, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
}
TEST_F(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) {
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
- VideoFormat out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(480, out_format.width);
- EXPECT_EQ(270, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
VideoFormat new_format(640, 360, VideoFormat::FpsToInterval(30), FOURCC_I420);
adapter_.SetExpectedInputFrameInterval(VideoFormat::FpsToInterval(30));
adapter_.OnOutputFormatRequest(new_format);
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(480, out_format.width);
- EXPECT_EQ(270, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(640, out_format.width);
- EXPECT_EQ(360, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestReset) {
- VideoFormat out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(1280, out_format.width);
- EXPECT_EQ(720, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(480, out_format.width);
- EXPECT_EQ(270, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(), rtc::Optional<int>());
- out_format = adapter_.AdaptFrameResolution(1280, 720);
- EXPECT_EQ(1280, out_format.width);
- EXPECT_EQ(720, out_format.height);
+ adapter_.AdaptFrameResolution(1280, 720,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
+ // Ask for 640x360 (16:9 aspect).
+ adapter_.SetExpectedInputFrameInterval(VideoFormat::FpsToInterval(30));
+ adapter_.OnOutputFormatRequest(
+ VideoFormat(640, 360, VideoFormat::FpsToInterval(30), FOURCC_I420));
+ // Send 640x480 (4:3 aspect).
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ // Expect cropping to 16:9 format and no scaling.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Adapt down one step.
+ adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
+ rtc::Optional<int>());
+ // Expect cropping to 16:9 format and 3/4 scaling.
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Adapt down one step more.
+ adapter_.OnResolutionRequest(rtc::Optional<int>(480 * 270 - 1),
+ rtc::Optional<int>());
+ // Expect cropping to 16:9 format and 1/2 scaling.
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(180, out_height_);
+
+ // Adapt up one step.
+ adapter_.OnResolutionRequest(rtc::Optional<int>(),
+ rtc::Optional<int>(320 * 180));
+ // Expect cropping to 16:9 format and 3/4 scaling.
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Adapt up one step more.
+ adapter_.OnResolutionRequest(rtc::Optional<int>(),
+ rtc::Optional<int>(480 * 270));
+ // Expect cropping to 16:9 format and no scaling.
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Try to adapt up one step more.
+ adapter_.OnResolutionRequest(rtc::Optional<int>(),
+ rtc::Optional<int>(640 * 360));
+ // Expect cropping to 16:9 format and no scaling.
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestCroppingOddResolution) {
+ // Ask for 640x360 (16:9 aspect), with 3/16 scaling.
+ adapter_.SetExpectedInputFrameInterval(VideoFormat::FpsToInterval(30));
+ adapter_.OnOutputFormatRequest(
+ VideoFormat(640, 360, VideoFormat::FpsToInterval(30), FOURCC_I420));
+ adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 * 3 / 16 * 3 / 16),
+ rtc::Optional<int>());
+
+ // Send 640x480 (4:3 aspect).
+ adapter_.AdaptFrameResolution(640, 480,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+
+ // Instead of getting the exact aspect ratio with cropped resolution 640x360,
+ // the resolution should be adjusted to get a perfect scale factor instead.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(368, cropped_height_);
+ EXPECT_EQ(120, out_width_);
+ EXPECT_EQ(69, out_height_);
}
} // namespace cricket
diff --git a/chromium/third_party/webrtc/media/base/videobroadcaster.cc b/chromium/third_party/webrtc/media/base/videobroadcaster.cc
index 704f4081187..18c38b731d1 100644
--- a/chromium/third_party/webrtc/media/base/videobroadcaster.cc
+++ b/chromium/third_party/webrtc/media/base/videobroadcaster.cc
@@ -98,14 +98,14 @@ const cricket::VideoFrame& VideoBroadcaster::GetBlackFrame(
const cricket::VideoFrame& frame) {
if (black_frame_ && black_frame_->width() == frame.width() &&
black_frame_->height() == frame.height() &&
- black_frame_->GetVideoRotation() == frame.GetVideoRotation()) {
- black_frame_->SetTimeStamp(frame.GetTimeStamp());
+ black_frame_->rotation() == frame.rotation()) {
+ black_frame_->set_timestamp_us(frame.timestamp_us());
return *black_frame_;
}
black_frame_.reset(new cricket::WebRtcVideoFrame(
- new rtc::RefCountedObject<webrtc::I420Buffer>(
- frame.width(), frame.height()),
- frame.GetTimeStamp(), frame.GetVideoRotation()));
+ new rtc::RefCountedObject<webrtc::I420Buffer>(frame.width(),
+ frame.height()),
+ frame.rotation(), frame.timestamp_us()));
black_frame_->SetToBlack();
return *black_frame_;
}
diff --git a/chromium/third_party/webrtc/media/base/videobroadcaster.h b/chromium/third_party/webrtc/media/base/videobroadcaster.h
index c89c7eea975..764c749b0c3 100644
--- a/chromium/third_party/webrtc/media/base/videobroadcaster.h
+++ b/chromium/third_party/webrtc/media/base/videobroadcaster.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MEDIA_BASE_VIDEOBROADCASTER_H_
#define WEBRTC_MEDIA_BASE_VIDEOBROADCASTER_H_
+#include <memory>
#include <utility>
#include <vector>
@@ -55,7 +56,7 @@ class VideoBroadcaster : public VideoSourceBase,
rtc::CriticalSection sinks_and_wants_lock_;
VideoSinkWants current_wants_ GUARDED_BY(sinks_and_wants_lock_);
- rtc::scoped_ptr<cricket::WebRtcVideoFrame> black_frame_;
+ std::unique_ptr<cricket::WebRtcVideoFrame> black_frame_;
};
} // namespace rtc
diff --git a/chromium/third_party/webrtc/media/base/videobroadcaster_unittest.cc b/chromium/third_party/webrtc/media/base/videobroadcaster_unittest.cc
index c6a4df0e664..e4e6d325646 100644
--- a/chromium/third_party/webrtc/media/base/videobroadcaster_unittest.cc
+++ b/chromium/third_party/webrtc/media/base/videobroadcaster_unittest.cc
@@ -135,14 +135,14 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
broadcaster.AddOrUpdateSink(&sink2, wants2);
cricket::WebRtcVideoFrame frame1;
- frame1.InitToBlack(100, 200, 10 /*ts*/);
+ frame1.InitToBlack(100, 200, 10000 /*ts*/);
// Make it not all-black
- frame1.GetUPlane()[0] = 0;
+ frame1.video_frame_buffer()->MutableDataU()[0] = 0;
broadcaster.OnFrame(frame1);
EXPECT_TRUE(sink1.black_frame());
- EXPECT_EQ(10, sink1.timestamp());
+ EXPECT_EQ(10000, sink1.timestamp());
EXPECT_FALSE(sink2.black_frame());
- EXPECT_EQ(10, sink2.timestamp());
+ EXPECT_EQ(10000, sink2.timestamp());
// Switch the sink wants.
wants1.black_frames = false;
@@ -151,12 +151,12 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
broadcaster.AddOrUpdateSink(&sink2, wants2);
cricket::WebRtcVideoFrame frame2;
- frame2.InitToBlack(100, 200, 30 /*ts*/);
+ frame2.InitToBlack(100, 200, 30000 /*ts*/);
// Make it not all-black
- frame2.GetUPlane()[0] = 0;
+ frame2.video_frame_buffer()->MutableDataU()[0] = 0;
broadcaster.OnFrame(frame2);
EXPECT_FALSE(sink1.black_frame());
- EXPECT_EQ(30, sink1.timestamp());
+ EXPECT_EQ(30000, sink1.timestamp());
EXPECT_TRUE(sink2.black_frame());
- EXPECT_EQ(30, sink2.timestamp());
+ EXPECT_EQ(30000, sink2.timestamp());
}
diff --git a/chromium/third_party/webrtc/media/base/videocapturer.cc b/chromium/third_party/webrtc/media/base/videocapturer.cc
index 94f65143531..084a9b913c7 100644
--- a/chromium/third_party/webrtc/media/base/videocapturer.cc
+++ b/chromium/third_party/webrtc/media/base/videocapturer.cc
@@ -30,7 +30,6 @@ static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63);
#ifdef WEBRTC_LINUX
static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
#endif
-static const int kDefaultScreencastFps = 5;
} // namespace
@@ -66,10 +65,7 @@ VideoCapturer::VideoCapturer() : apply_rotation_(false) {
}
void VideoCapturer::Construct() {
- ratio_w_ = 0;
- ratio_h_ = 0;
enable_camera_list_ = false;
- square_pixel_aspect_ratio_ = false;
capture_state_ = CS_STOPPED;
SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
scaled_width_ = 0;
@@ -224,158 +220,19 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*,
return;
}
- // Use a temporary buffer to scale
- std::unique_ptr<uint8_t[]> scale_buffer;
- if (IsScreencast()) {
- int scaled_width, scaled_height;
- int desired_screencast_fps =
- capture_format_.get()
- ? VideoFormat::IntervalToFps(capture_format_->interval)
- : kDefaultScreencastFps;
- ComputeScale(captured_frame->width, captured_frame->height,
- desired_screencast_fps, &scaled_width, &scaled_height);
-
- if (FOURCC_ARGB == captured_frame->fourcc &&
- (scaled_width != captured_frame->width ||
- scaled_height != captured_frame->height)) {
- if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
- LOG(LS_INFO) << "Scaling Screencast from " << captured_frame->width
- << "x" << captured_frame->height << " to " << scaled_width
- << "x" << scaled_height;
- scaled_width_ = scaled_width;
- scaled_height_ = scaled_height;
- }
- CapturedFrame* modified_frame =
- const_cast<CapturedFrame*>(captured_frame);
- const int modified_frame_size = scaled_width * scaled_height * 4;
- scale_buffer.reset(new uint8_t[modified_frame_size]);
- // Compute new width such that width * height is less than maximum but
- // maintains original captured frame aspect ratio.
- // Round down width to multiple of 4 so odd width won't round up beyond
- // maximum, and so chroma channel is even width to simplify spatial
- // resampling.
- libyuv::ARGBScale(reinterpret_cast<const uint8_t*>(captured_frame->data),
- captured_frame->width * 4, captured_frame->width,
- captured_frame->height, scale_buffer.get(),
- scaled_width * 4, scaled_width, scaled_height,
- libyuv::kFilterBilinear);
- modified_frame->width = scaled_width;
- modified_frame->height = scaled_height;
- modified_frame->data_size = scaled_width * 4 * scaled_height;
- modified_frame->data = scale_buffer.get();
- }
- }
-
- const int kYuy2Bpp = 2;
- const int kArgbBpp = 4;
- // TODO(fbarchard): Make a helper function to adjust pixels to square.
- // TODO(fbarchard): Hook up experiment to scaling.
- // Temporary buffer is scoped here so it will persist until i420_frame.Init()
- // makes a copy of the frame, converting to I420.
- std::unique_ptr<uint8_t[]> temp_buffer;
- // YUY2 can be scaled vertically using an ARGB scaler. Aspect ratio is only
- // a problem on OSX. OSX always converts webcams to YUY2 or UYVY.
- bool can_scale =
- FOURCC_YUY2 == CanonicalFourCC(captured_frame->fourcc) ||
- FOURCC_UYVY == CanonicalFourCC(captured_frame->fourcc);
-
- // If pixels are not square, optionally use vertical scaling to make them
- // square. Square pixels simplify the rest of the pipeline, including
- // effects and rendering.
- if (can_scale && square_pixel_aspect_ratio_ &&
- captured_frame->pixel_width != captured_frame->pixel_height) {
- int scaled_width, scaled_height;
- // modified_frame points to the captured_frame but with const casted away
- // so it can be modified.
- CapturedFrame* modified_frame = const_cast<CapturedFrame*>(captured_frame);
- // Compute the frame size that makes pixels square pixel aspect ratio.
- ComputeScaleToSquarePixels(captured_frame->width, captured_frame->height,
- captured_frame->pixel_width,
- captured_frame->pixel_height,
- &scaled_width, &scaled_height);
-
- if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
- LOG(LS_INFO) << "Scaling WebCam from "
- << captured_frame->width << "x"
- << captured_frame->height << " to "
- << scaled_width << "x" << scaled_height
- << " for PAR "
- << captured_frame->pixel_width << "x"
- << captured_frame->pixel_height;
- scaled_width_ = scaled_width;
- scaled_height_ = scaled_height;
- }
- const int modified_frame_size = scaled_width * scaled_height * kYuy2Bpp;
- uint8_t* temp_buffer_data;
- // Pixels are wide and short; Increasing height. Requires temporary buffer.
- if (scaled_height > captured_frame->height) {
- temp_buffer.reset(new uint8_t[modified_frame_size]);
- temp_buffer_data = temp_buffer.get();
- } else {
- // Pixels are narrow and tall; Decreasing height. Scale will be done
- // in place.
- temp_buffer_data = reinterpret_cast<uint8_t*>(captured_frame->data);
- }
-
- // Use ARGBScaler to vertically scale the YUY2 image, adjusting for 16 bpp.
- libyuv::ARGBScale(reinterpret_cast<const uint8_t*>(captured_frame->data),
- captured_frame->width * kYuy2Bpp, // Stride for YUY2.
- captured_frame->width * kYuy2Bpp / kArgbBpp, // Width.
- abs(captured_frame->height), // Height.
- temp_buffer_data,
- scaled_width * kYuy2Bpp, // Stride for YUY2.
- scaled_width * kYuy2Bpp / kArgbBpp, // Width.
- abs(scaled_height), // New height.
- libyuv::kFilterBilinear);
- modified_frame->width = scaled_width;
- modified_frame->height = scaled_height;
- modified_frame->pixel_width = 1;
- modified_frame->pixel_height = 1;
- modified_frame->data_size = modified_frame_size;
- modified_frame->data = temp_buffer_data;
- }
-
- // Size to crop captured frame to. This adjusts the captured frames
- // aspect ratio to match the final view aspect ratio, considering pixel
- // aspect ratio and rotation. The final size may be scaled down by video
- // adapter to better match ratio_w_ x ratio_h_.
- // Note that abs() of frame height is passed in, because source may be
- // inverted, but output will be positive.
int cropped_width = captured_frame->width;
int cropped_height = captured_frame->height;
-
- // TODO(fbarchard): Improve logic to pad or crop.
- // MJPG can crop vertically, but not horizontally. This logic disables crop.
- // Alternatively we could pad the image with black, or implement a 2 step
- // crop.
- bool can_crop = true;
- if (captured_frame->fourcc == FOURCC_MJPG) {
- float cam_aspect = static_cast<float>(captured_frame->width) /
- static_cast<float>(captured_frame->height);
- float view_aspect = static_cast<float>(ratio_w_) /
- static_cast<float>(ratio_h_);
- can_crop = cam_aspect <= view_aspect;
- }
- if (can_crop && !IsScreencast()) {
- // TODO(ronghuawu): The capturer should always produce the native
- // resolution and the cropping should be done in downstream code.
- ComputeCrop(ratio_w_, ratio_h_, captured_frame->width,
- abs(captured_frame->height), captured_frame->pixel_width,
- captured_frame->pixel_height, captured_frame->rotation,
- &cropped_width, &cropped_height);
- }
-
- int adapted_width = cropped_width;
- int adapted_height = cropped_height;
+ int out_width = captured_frame->width;
+ int out_height = captured_frame->height;
if (enable_video_adapter_ && !IsScreencast()) {
- const VideoFormat adapted_format =
- video_adapter_.AdaptFrameResolution(cropped_width, cropped_height);
- if (adapted_format.IsSize0x0()) {
+ video_adapter_.AdaptFrameResolution(
+ captured_frame->width, captured_frame->height,
+ &cropped_width, &cropped_height,
+ &out_width, &out_height);
+ if (out_width == 0 || out_height == 0) {
// VideoAdapter dropped the frame.
return;
}
- adapted_width = adapted_format.width;
- adapted_height = adapted_format.height;
}
if (!frame_factory_) {
@@ -383,16 +240,15 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*,
return;
}
- std::unique_ptr<VideoFrame> adapted_frame(
- frame_factory_->CreateAliasedFrame(captured_frame,
- cropped_width, cropped_height,
- adapted_width, adapted_height));
+ // TODO(nisse): Reorganize frame factory methods.
+ std::unique_ptr<VideoFrame> adapted_frame(frame_factory_->CreateAliasedFrame(
+ captured_frame, cropped_width, cropped_height, out_width, out_height));
if (!adapted_frame) {
// TODO(fbarchard): LOG more information about captured frame attributes.
LOG(LS_ERROR) << "Couldn't convert to I420! "
<< "From " << ToString(captured_frame) << " To "
- << cropped_width << " x " << cropped_height;
+ << out_width << " x " << out_height;
return;
}
diff --git a/chromium/third_party/webrtc/media/base/videocapturer.h b/chromium/third_party/webrtc/media/base/videocapturer.h
index 270315582a1..edc6cd31a1b 100644
--- a/chromium/third_party/webrtc/media/base/videocapturer.h
+++ b/chromium/third_party/webrtc/media/base/videocapturer.h
@@ -19,6 +19,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/base/sigslot.h"
@@ -283,10 +284,7 @@ class VideoCapturer : public sigslot::has_slots<>,
std::unique_ptr<VideoFormat> max_format_;
std::vector<VideoFormat> filtered_supported_formats_;
- int ratio_w_; // View resolution. e.g. 1280 x 720.
- int ratio_h_;
bool enable_camera_list_;
- bool square_pixel_aspect_ratio_; // Enable scaling to square pixels.
int scaled_width_; // Current output size from ComputeScale.
int scaled_height_;
diff --git a/chromium/third_party/webrtc/media/base/videocapturer_unittest.cc b/chromium/third_party/webrtc/media/base/videocapturer_unittest.cc
index bd145e33ea1..25230b5118a 100644
--- a/chromium/third_party/webrtc/media/base/videocapturer_unittest.cc
+++ b/chromium/third_party/webrtc/media/base/videocapturer_unittest.cc
@@ -9,6 +9,8 @@
*/
#include <stdio.h>
+
+#include <memory>
#include <vector>
#include "webrtc/base/gunit.h"
@@ -40,7 +42,7 @@ class VideoCapturerTest
protected:
void InitCapturer(bool is_screencast) {
- capturer_ = rtc::scoped_ptr<FakeVideoCapturer>(
+ capturer_ = std::unique_ptr<FakeVideoCapturer>(
new FakeVideoCapturer(is_screencast));
capturer_->SignalStateChange.connect(this,
&VideoCapturerTest::OnStateChange);
@@ -56,7 +58,7 @@ class VideoCapturerTest
cricket::CaptureState capture_state() { return capture_state_; }
int num_state_changes() { return num_state_changes_; }
- rtc::scoped_ptr<cricket::FakeVideoCapturer> capturer_;
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer_;
cricket::CaptureState capture_state_;
int num_state_changes_;
cricket::FakeVideoRenderer renderer_;
@@ -321,32 +323,6 @@ TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
EXPECT_EQ(720, renderer2.height());
}
-TEST_F(VideoCapturerTest, ScreencastScaledSuperLarge) {
- InitScreencast();
-
- const int kMaxWidth = 4096;
- const int kMaxHeight = 3072;
- int kWidth = kMaxWidth + 4;
- int kHeight = kMaxHeight + 4;
-
- std::vector<cricket::VideoFormat> formats;
- formats.push_back(cricket::VideoFormat(kWidth, kHeight,
- cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB));
- capturer_->ResetSupportedFormats(formats);
-
- EXPECT_EQ(cricket::CS_RUNNING, capturer_->Start(cricket::VideoFormat(
- kWidth,
- kHeight,
- cricket::VideoFormat::FpsToInterval(30),
- cricket::FOURCC_ARGB)));
- EXPECT_TRUE(capturer_->IsRunning());
- EXPECT_EQ(0, renderer_.num_rendered_frames());
- EXPECT_TRUE(capturer_->CaptureFrame());
- EXPECT_EQ(1, renderer_.num_rendered_frames());
- EXPECT_EQ(kWidth / 2, renderer_.width());
- EXPECT_EQ(kHeight / 2, renderer_.height());
-}
-
TEST_F(VideoCapturerTest, TestFourccMatch) {
cricket::VideoFormat desired(640, 480,
cricket::VideoFormat::FpsToInterval(30),
diff --git a/chromium/third_party/webrtc/media/base/videocommon.cc b/chromium/third_party/webrtc/media/base/videocommon.cc
index f6f06c24e32..12a17973d64 100644
--- a/chromium/third_party/webrtc/media/base/videocommon.cc
+++ b/chromium/third_party/webrtc/media/base/videocommon.cc
@@ -51,153 +51,6 @@ uint32_t CanonicalFourCC(uint32_t fourcc) {
return fourcc;
}
-static float kScaleFactors[] = {
- 1.f / 1.f, // Full size.
- 1.f / 2.f, // 1/2 scale.
- 1.f / 4.f, // 1/4 scale.
- 1.f / 8.f, // 1/8 scale.
- 1.f / 16.f // 1/16 scale.
-};
-
-static const int kNumScaleFactors = arraysize(kScaleFactors);
-
-// Finds the scale factor that, when applied to width and height, produces
-// fewer than num_pixels.
-static float FindLowerScale(int width, int height, int target_num_pixels) {
- if (!target_num_pixels) {
- return 0.f;
- }
- int best_distance = INT_MAX;
- int best_index = kNumScaleFactors - 1; // Default to max scale.
- for (int i = 0; i < kNumScaleFactors; ++i) {
- int test_num_pixels = static_cast<int>(width * kScaleFactors[i] *
- height * kScaleFactors[i]);
- int diff = target_num_pixels - test_num_pixels;
- if (diff >= 0 && diff < best_distance) {
- best_distance = diff;
- best_index = i;
- if (best_distance == 0) { // Found exact match.
- break;
- }
- }
- }
- return kScaleFactors[best_index];
-}
-
-// Computes a scale less to fit in max_pixels while maintaining aspect ratio.
-void ComputeScaleMaxPixels(int frame_width, int frame_height, int max_pixels,
- int* scaled_width, int* scaled_height) {
- ASSERT(scaled_width != NULL);
- ASSERT(scaled_height != NULL);
- ASSERT(max_pixels > 0);
- const int kMaxWidth = 4096;
- const int kMaxHeight = 3072;
- int new_frame_width = frame_width;
- int new_frame_height = frame_height;
-
- // Limit width.
- if (new_frame_width > kMaxWidth) {
- new_frame_height = new_frame_height * kMaxWidth / new_frame_width;
- new_frame_width = kMaxWidth;
- }
- // Limit height.
- if (new_frame_height > kMaxHeight) {
- new_frame_width = new_frame_width * kMaxHeight / new_frame_height;
- new_frame_height = kMaxHeight;
- }
- // Limit number of pixels.
- if (new_frame_width * new_frame_height > max_pixels) {
- // Compute new width such that width * height is less than maximum but
- // maintains original captured frame aspect ratio.
- new_frame_width = static_cast<int>(sqrtf(static_cast<float>(
- max_pixels) * new_frame_width / new_frame_height));
- new_frame_height = max_pixels / new_frame_width;
- }
- // Snap to a scale factor that is less than or equal to target pixels.
- float scale = FindLowerScale(frame_width, frame_height,
- new_frame_width * new_frame_height);
- *scaled_width = static_cast<int>(frame_width * scale + .5f);
- *scaled_height = static_cast<int>(frame_height * scale + .5f);
-}
-
-// Compute a size to scale frames to that is below maximum compression
-// and rendering size with the same aspect ratio.
-void ComputeScale(int frame_width, int frame_height, int fps,
- int* scaled_width, int* scaled_height) {
- // Maximum pixels limit is set to Retina MacBookPro 15" resolution of
- // 2880 x 1800 as of 4/18/2013.
- // For high fps, maximum pixels limit is set based on common 24" monitor
- // resolution of 2048 x 1280 as of 6/13/2013. The Retina resolution is
- // therefore reduced to 1440 x 900.
- int max_pixels = (fps > 5) ? 2048 * 1280 : 2880 * 1800;
- ComputeScaleMaxPixels(
- frame_width, frame_height, max_pixels, scaled_width, scaled_height);
-}
-
-// Compute size to crop video frame to.
-// If cropped_format_* is 0, return the frame_* size as is.
-void ComputeCrop(int cropped_format_width, int cropped_format_height,
- int frame_width, int frame_height,
- int pixel_width, int pixel_height,
- int rotation,
- int* cropped_width, int* cropped_height) {
- // Transform screen crop to camera space if rotated.
- if (rotation == 90 || rotation == 270) {
- std::swap(cropped_format_width, cropped_format_height);
- }
- ASSERT(cropped_format_width >= 0);
- ASSERT(cropped_format_height >= 0);
- ASSERT(frame_width > 0);
- ASSERT(frame_height > 0);
- ASSERT(pixel_width >= 0);
- ASSERT(pixel_height >= 0);
- ASSERT(rotation == 0 || rotation == 90 || rotation == 180 || rotation == 270);
- ASSERT(cropped_width != NULL);
- ASSERT(cropped_height != NULL);
- if (!pixel_width) {
- pixel_width = 1;
- }
- if (!pixel_height) {
- pixel_height = 1;
- }
- // if cropped_format is 0x0 disable cropping.
- if (!cropped_format_height) {
- cropped_format_height = 1;
- }
- float frame_aspect = static_cast<float>(frame_width * pixel_width) /
- static_cast<float>(frame_height * pixel_height);
- float crop_aspect = static_cast<float>(cropped_format_width) /
- static_cast<float>(cropped_format_height);
- // kAspectThresh is the maximum aspect ratio difference that we'll accept
- // for cropping. The value 1.34 allows cropping from 4:3 to 16:9.
- // Set to zero to disable cropping entirely.
- // TODO(fbarchard): crop to multiple of 16 width for better performance.
- const float kAspectThresh = 1.34f;
- // Wide aspect - crop horizontally
- if (frame_aspect > crop_aspect &&
- frame_aspect < crop_aspect * kAspectThresh) {
- // Round width down to multiple of 4 to avoid odd chroma width.
- // Width a multiple of 4 allows a half size image to have chroma channel
- // that avoids rounding errors.
- frame_width = static_cast<int>((crop_aspect * frame_height *
- pixel_height) / pixel_width + 0.5f) & ~3;
- } else if (frame_aspect < crop_aspect &&
- frame_aspect > crop_aspect / kAspectThresh) {
- frame_height = static_cast<int>((frame_width * pixel_width) /
- (crop_aspect * pixel_height) + 0.5f) & ~1;
- }
- *cropped_width = frame_width;
- *cropped_height = frame_height;
-}
-
-// Compute the frame size that makes pixels square pixel aspect ratio.
-void ComputeScaleToSquarePixels(int in_width, int in_height,
- int pixel_width, int pixel_height,
- int* scaled_width, int* scaled_height) {
- *scaled_width = in_width; // Keep width the same.
- *scaled_height = in_height * pixel_height / pixel_width;
-}
-
// The C++ standard requires a namespace-scope definition of static const
// integral types even when they are initialized in the declaration (see
// [class.static.data]/4), but MSVC with /Ze is non-conforming and treats that
diff --git a/chromium/third_party/webrtc/media/base/videocommon.h b/chromium/third_party/webrtc/media/base/videocommon.h
index d8d7f9cdd38..757100b07d4 100644
--- a/chromium/third_party/webrtc/media/base/videocommon.h
+++ b/chromium/third_party/webrtc/media/base/videocommon.h
@@ -135,31 +135,6 @@ inline std::string GetFourccName(uint32_t fourcc) {
return name;
}
-// Computes a scale less to fit in max_pixels while maintaining aspect ratio.
-void ComputeScaleMaxPixels(int frame_width, int frame_height, int max_pixels,
- int* scaled_width, int* scaled_height);
-
-// For low fps, max pixels limit is set to Retina MacBookPro 15" resolution of
-// 2880 x 1800 as of 4/18/2013.
-// For high fps, maximum pixels limit is set based on common 24" monitor
-// resolution of 2048 x 1280 as of 6/13/2013. The Retina resolution is
-// therefore reduced to 1440 x 900.
-void ComputeScale(int frame_width, int frame_height, int fps,
- int* scaled_width, int* scaled_height);
-
-// Compute the frame size that conversion should crop to based on aspect ratio.
-// Ensures size is multiple of 2 due to I420 and conversion limitations.
-void ComputeCrop(int cropped_format_width, int cropped_format_height,
- int frame_width, int frame_height,
- int pixel_width, int pixel_height,
- int rotation,
- int* cropped_width, int* cropped_height);
-
-// Compute the frame size that makes pixels square pixel aspect ratio.
-void ComputeScaleToSquarePixels(int in_width, int in_height,
- int pixel_width, int pixel_height,
- int* scaled_width, int* scaled_height);
-
//////////////////////////////////////////////////////////////////////////////
// Definition of VideoFormat.
//////////////////////////////////////////////////////////////////////////////
diff --git a/chromium/third_party/webrtc/media/base/videocommon_unittest.cc b/chromium/third_party/webrtc/media/base/videocommon_unittest.cc
index 84a728b19b1..c28a5d25bae 100644
--- a/chromium/third_party/webrtc/media/base/videocommon_unittest.cc
+++ b/chromium/third_party/webrtc/media/base/videocommon_unittest.cc
@@ -91,221 +91,4 @@ TEST(VideoCommonTest, TestVideoFormatCompare) {
EXPECT_TRUE(format.IsPixelRateLess(format2));
}
-TEST(VideoCommonTest, TestComputeScaleWithLowFps) {
- int scaled_width, scaled_height;
-
- // Request small enough. Expect no change.
- ComputeScale(2560, 1600, 5, &scaled_width, &scaled_height);
- EXPECT_EQ(2560, scaled_width);
- EXPECT_EQ(1600, scaled_height);
-
- // Request too many pixels. Expect 1/2 size.
- ComputeScale(4096, 2560, 5, &scaled_width, &scaled_height);
- EXPECT_EQ(2048, scaled_width);
- EXPECT_EQ(1280, scaled_height);
-
- // Request too many pixels and too wide and tall. Expect 1/4 size.
- ComputeScale(16000, 10000, 5, &scaled_width, &scaled_height);
- EXPECT_EQ(2000, scaled_width);
- EXPECT_EQ(1250, scaled_height);
-
- // Request too wide. (two 30 inch monitors). Expect 1/2 size.
- ComputeScale(5120, 1600, 5, &scaled_width, &scaled_height);
- EXPECT_EQ(2560, scaled_width);
- EXPECT_EQ(800, scaled_height);
-
- // Request too wide but not too many pixels. Expect 1/2 size.
- ComputeScale(8192, 1024, 5, &scaled_width, &scaled_height);
- EXPECT_EQ(4096, scaled_width);
- EXPECT_EQ(512, scaled_height);
-
- // Request too tall. Expect 1/4 size.
- ComputeScale(1024, 8192, 5, &scaled_width, &scaled_height);
- EXPECT_EQ(256, scaled_width);
- EXPECT_EQ(2048, scaled_height);
-}
-
-// Same as TestComputeScale but with 15 fps instead of 5 fps.
-// Disabled for UBSan: https://bugs.chromium.org/p/webrtc/issues/detail?id=5487
-#ifdef UNDEFINED_SANITIZER
-#define MAYBE_TestComputeScaleWithHighFps DISABLED_TestComputeScaleWithHighFps
-#else
-#define MAYBE_TestComputeScaleWithHighFps TestComputeScaleWithHighFps
-#endif
-TEST(VideoCommonTest, MAYBE_TestComputeScaleWithHighFps) {
- int scaled_width, scaled_height;
-
- // Request small enough but high fps. Expect 1/2 size.
- ComputeScale(2560, 1600, 15, &scaled_width, &scaled_height);
- EXPECT_EQ(1280, scaled_width);
- EXPECT_EQ(800, scaled_height);
-
- // Request too many pixels. Expect 1/2 size.
- ComputeScale(4096, 2560, 15, &scaled_width, &scaled_height);
- EXPECT_EQ(2048, scaled_width);
- EXPECT_EQ(1280, scaled_height);
-
- // Request too many pixels and too wide and tall. Expect 1/16 size.
- ComputeScale(64000, 40000, 15, &scaled_width, &scaled_height);
- EXPECT_EQ(4000, scaled_width);
- EXPECT_EQ(2500, scaled_height);
-
- // Request too wide. (two 30 inch monitors). Expect 1/2 size.
- ComputeScale(5120, 1600, 15, &scaled_width, &scaled_height);
- EXPECT_EQ(2560, scaled_width);
- EXPECT_EQ(800, scaled_height);
-
- // Request too wide but not too many pixels. Expect 1/2 size.
- ComputeScale(8192, 1024, 15, &scaled_width, &scaled_height);
- EXPECT_EQ(4096, scaled_width);
- EXPECT_EQ(512, scaled_height);
-
- // Request too tall. Expect 1/4 size.
- ComputeScale(1024, 8192, 15, &scaled_width, &scaled_height);
- EXPECT_EQ(256, scaled_width);
- EXPECT_EQ(2048, scaled_height);
-}
-
-TEST(VideoCommonTest, TestComputeCrop) {
- int cropped_width, cropped_height;
-
- // Request 16:9 to 16:9. Expect no cropping.
- ComputeCrop(1280, 720, // Crop size 16:9
- 640, 360, // Frame is 4:3
- 1, 1, // Normal 1:1 pixels
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(640, cropped_width);
- EXPECT_EQ(360, cropped_height);
-
- // Request 4:3 to 16:9. Expect vertical.
- ComputeCrop(640, 360, // Crop size 16:9
- 640, 480, // Frame is 4:3
- 1, 1, // Normal 1:1 pixels
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(640, cropped_width);
- EXPECT_EQ(360, cropped_height);
-
- // Request 16:9 to 4:3. Expect horizontal crop.
- ComputeCrop(640, 480, // Crop size 4:3
- 640, 360, // Frame is 16:9
- 1, 1, // Normal 1:1 pixels
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(480, cropped_width);
- EXPECT_EQ(360, cropped_height);
-
- // Request 16:9 but VGA has 3:8 pixel aspect ratio. Expect no crop.
- // This occurs on HP4110 on OSX 10.5/10.6/10.7
- ComputeCrop(640, 360, // Crop size 16:9
- 640, 480, // Frame is 4:3
- 3, 8, // Pixel aspect ratio is tall
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(640, cropped_width);
- EXPECT_EQ(480, cropped_height);
-
- // Request 16:9 but QVGA has 15:11 pixel aspect ratio. Expect horizontal crop.
- // This occurs on Logitech B910 on OSX 10.5/10.6/10.7 in Hangouts.
- ComputeCrop(640, 360, // Crop size 16:9
- 320, 240, // Frame is 4:3
- 15, 11, // Pixel aspect ratio is wide
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(312, cropped_width);
- EXPECT_EQ(240, cropped_height);
-
- // Request 16:10 but QVGA has 15:11 pixel aspect ratio.
- // Expect horizontal crop.
- // This occurs on Logitech B910 on OSX 10.5/10.6/10.7 in gmail.
- ComputeCrop(640, 400, // Crop size 16:10
- 320, 240, // Frame is 4:3
- 15, 11, // Pixel aspect ratio is wide
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(280, cropped_width);
- EXPECT_EQ(240, cropped_height);
-
- // Request 16:9 but VGA has 6:5 pixel aspect ratio. Expect vertical crop.
- // This occurs on Logitech QuickCam Pro C9000 on OSX
- ComputeCrop(640, 360, // Crop size 16:9
- 640, 480, // Frame is 4:3
- 6, 5, // Pixel aspect ratio is wide
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(640, cropped_width);
- EXPECT_EQ(432, cropped_height);
-
- // Request 16:10 but HD is 16:9. Expect horizontal crop.
- // This occurs in settings and local preview with HD experiment.
- ComputeCrop(1280, 800, // Crop size 16:10
- 1280, 720, // Frame is 4:3
- 1, 1, // Pixel aspect ratio is wide
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(1152, cropped_width);
- EXPECT_EQ(720, cropped_height);
-
- // Request 16:9 but HD has 3:4 pixel aspect ratio. Expect vertical crop.
- // This occurs on Logitech B910 on OSX 10.5/10.6.7 but not OSX 10.6.8 or 10.7
- ComputeCrop(1280, 720, // Crop size 16:9
- 1280, 720, // Frame is 4:3
- 3, 4, // Pixel aspect ratio is wide
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(1280, cropped_width);
- EXPECT_EQ(540, cropped_height);
-
- // Request 16:9 to 3:4 (portrait). Expect no cropping.
- ComputeCrop(640, 360, // Crop size 16:9
- 640, 480, // Frame is 3:4 portrait
- 1, 1, // Normal 1:1 pixels
- 90,
- &cropped_width, &cropped_height);
- EXPECT_EQ(640, cropped_width);
- EXPECT_EQ(480, cropped_height);
-
- // Request 9:16 from VGA rotated (portrait). Expect crop.
- ComputeCrop(360, 640, // Crop size 9:16
- 640, 480, // Frame is 3:4 portrait
- 1, 1, // Normal 1:1 pixels
- 90,
- &cropped_width, &cropped_height);
- EXPECT_EQ(640, cropped_width);
- EXPECT_EQ(360, cropped_height);
-
- // Cropped size 0x0. Expect no cropping.
- // This is used when adding multiple capturers
- ComputeCrop(0, 0, // Crop size 0x0
- 1024, 768, // Frame is 3:4 portrait
- 1, 1, // Normal 1:1 pixels
- 0,
- &cropped_width, &cropped_height);
- EXPECT_EQ(1024, cropped_width);
- EXPECT_EQ(768, cropped_height);
-}
-
-TEST(VideoCommonTest, TestComputeScaleToSquarePixels) {
- int scaled_width, scaled_height;
-
- // Pixel aspect ratio is 4:3. Logical aspect ratio is 16:9. Expect scale
- // to square pixels with physical aspect ratio of 16:9.
- ComputeScaleToSquarePixels(640, 480,
- 4, 3, // 4 x 3 pixel aspect ratio
- &scaled_width, &scaled_height);
- EXPECT_EQ(640, scaled_width);
- EXPECT_EQ(360, scaled_height);
-
- // Pixel aspect ratio is 3:8. Physical aspect ratio is 4:3. Expect scale
- // to square pixels with logical aspect ratio of 1:2.
- // Note that 640x1280 will be scaled down by video adapter to view request
- // of 640*360 and will end up using 320x640.
- ComputeScaleToSquarePixels(640, 480,
- 3, 8, // 4 x 3 pixel aspect ratio
- &scaled_width, &scaled_height);
- EXPECT_EQ(640, scaled_width);
- EXPECT_EQ(1280, scaled_height);
-}
-
} // namespace cricket
diff --git a/chromium/third_party/webrtc/media/base/videoengine_unittest.h b/chromium/third_party/webrtc/media/base/videoengine_unittest.h
index 01a4f76ca76..394b9c9a8e9 100644
--- a/chromium/third_party/webrtc/media/base/videoengine_unittest.h
+++ b/chromium/third_party/webrtc/media/base/videoengine_unittest.h
@@ -74,27 +74,6 @@ inline int TimeBetweenSend(const cricket::VideoCodec& codec) {
rtc::kNumNanosecsPerMillisec);
}
-// Fake video engine that makes it possible to test enabling and disabling
-// capturer (checking that the engine state is updated and that the capturer
-// is indeed capturing) without having to create a channel. It also makes it
-// possible to test that the media processors are indeed being called when
-// registered.
-template<class T>
-class VideoEngineOverride : public T {
- public:
- VideoEngineOverride() : T() {
- }
- virtual ~VideoEngineOverride() {
- }
- bool is_camera_on() const { return T::GetVideoCapturer()->IsRunning(); }
-
- void TriggerMediaFrame(uint32_t ssrc,
- cricket::VideoFrame* frame,
- bool* drop_frame) {
- T::SignalMediaFrame(ssrc, frame, drop_frame);
- }
-};
-
template<class E, class C>
class VideoMediaChannelTest : public testing::Test,
public sigslot::has_slots<> {
@@ -125,7 +104,7 @@ class VideoMediaChannelTest : public testing::Test,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_I420);
EXPECT_EQ(cricket::CS_RUNNING, video_capturer_->Start(format));
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, video_capturer_.get()));
+ channel_->SetSource(kSsrc, video_capturer_.get());
}
virtual cricket::FakeVideoCapturer* CreateFakeVideoCapturer() {
@@ -162,7 +141,7 @@ class VideoMediaChannelTest : public testing::Test,
cricket::FOURCC_I420);
EXPECT_EQ(cricket::CS_RUNNING, video_capturer_2_->Start(format));
- EXPECT_TRUE(channel_->SetCapturer(kSsrc + 2, video_capturer_2_.get()));
+ channel_->SetSource(kSsrc + 2, video_capturer_2_.get());
}
virtual void TearDown() {
channel_.reset();
@@ -172,7 +151,7 @@ class VideoMediaChannelTest : public testing::Test,
}
bool SetOneCodec(int pt, const char* name, int w, int h, int fr) {
- return SetOneCodec(cricket::VideoCodec(pt, name, w, h, fr, 0));
+ return SetOneCodec(cricket::VideoCodec(pt, name, w, h, fr));
}
bool SetOneCodec(const cricket::VideoCodec& codec) {
cricket::VideoFormat capture_format(codec.width, codec.height,
@@ -373,7 +352,7 @@ class VideoMediaChannelTest : public testing::Test,
// Test that SetSend works.
void SetSend() {
EXPECT_FALSE(channel_->sending());
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, video_capturer_.get()));
+ channel_->SetSource(kSsrc, video_capturer_.get());
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
EXPECT_FALSE(channel_->sending());
EXPECT_TRUE(SetSend(true));
@@ -567,7 +546,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_EQ(cricket::CS_RUNNING, capturer->Start(format));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(5678)));
- EXPECT_TRUE(channel_->SetCapturer(5678, capturer.get()));
+ channel_->SetSource(5678, capturer.get());
EXPECT_TRUE(channel_->AddRecvStream(
cricket::StreamParams::CreateLegacy(5678)));
EXPECT_TRUE(channel_->SetSink(5678, &renderer2));
@@ -603,7 +582,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_EQ(kTestWidth, info.senders[1].send_frame_width);
EXPECT_EQ(kTestHeight, info.senders[1].send_frame_height);
// The capturer must be unregistered here as it runs out of it's scope next.
- EXPECT_TRUE(channel_->SetCapturer(5678, NULL));
+ channel_->SetSource(5678, NULL);
}
// Test that we can set the bandwidth.
@@ -640,7 +619,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(SetDefaultCodec());
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(999)));
- EXPECT_TRUE(channel_->SetCapturer(999u, video_capturer_.get()));
+ channel_->SetSource(999u, video_capturer_.get());
EXPECT_TRUE(SetSend(true));
EXPECT_TRUE(WaitAndSendFrame(0));
EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
@@ -706,7 +685,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(789u)));
- EXPECT_TRUE(channel_->SetCapturer(789u, video_capturer_.get()));
+ channel_->SetSource(789u, video_capturer_.get());
EXPECT_EQ(rtp_packets, NumRtpPackets());
// Wait 30ms to guarantee the engine does not drop the frame.
EXPECT_TRUE(WaitAndSendFrame(30));
@@ -789,7 +768,7 @@ class VideoMediaChannelTest : public testing::Test,
int captured_frames = 1;
for (int iterations = 0; iterations < 2; ++iterations) {
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, capturer.get()));
+ channel_->SetSource(kSsrc, capturer.get());
rtc::Thread::Current()->ProcessMessages(time_between_send);
EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
cricket::FOURCC_I420));
@@ -804,7 +783,7 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_EQ(format.height, renderer_.height());
captured_frames = renderer_.num_rendered_frames() + 1;
EXPECT_FALSE(renderer_.black_frame());
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
+ channel_->SetSource(kSsrc, NULL);
// Make sure a black frame is generated within the specified timeout.
// The black frame should be the resolution of the previous frame to
// prevent expensive encoder reconfigurations.
@@ -839,13 +818,12 @@ class VideoMediaChannelTest : public testing::Test,
// tightly.
rtc::Thread::Current()->ProcessMessages(30);
// Remove the capturer.
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
+ channel_->SetSource(kSsrc, NULL);
// Wait for one black frame for removing the capturer.
EXPECT_FRAME_WAIT(2, 640, 400, kTimeout);
- // No capturer was added, so this RemoveCapturer should
- // fail.
- EXPECT_FALSE(channel_->SetCapturer(kSsrc, NULL));
+ // No capturer was added, so this SetSource should be a NOP.
+ channel_->SetSource(kSsrc, NULL);
rtc::Thread::Current()->ProcessMessages(300);
// Verify no more frames were sent.
EXPECT_EQ(2, renderer_.num_rendered_frames());
@@ -887,11 +865,11 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_EQ(cricket::CS_RUNNING, capturer2->Start(capture_format));
// State for all the streams.
EXPECT_TRUE(SetOneCodec(DefaultCodec()));
- // A limitation in the lmi implementation requires that SetCapturer() is
+ // A limitation in the lmi implementation requires that SetSource() is
// called after SetOneCodec().
// TODO(hellner): this seems like an unnecessary constraint, fix it.
- EXPECT_TRUE(channel_->SetCapturer(1, capturer1.get()));
- EXPECT_TRUE(channel_->SetCapturer(2, capturer2.get()));
+ channel_->SetSource(1, capturer1.get());
+ channel_->SetSource(2, capturer2.get());
EXPECT_TRUE(SetSend(true));
// Test capturer associated with engine.
const int kTestWidth = 160;
@@ -906,13 +884,13 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_FRAME_ON_RENDERER_WAIT(
renderer2, 1, kTestWidth, kTestHeight, kTimeout);
// Successfully remove the capturer.
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
+ channel_->SetSource(kSsrc, NULL);
// Fail to re-remove the capturer.
- EXPECT_FALSE(channel_->SetCapturer(kSsrc, NULL));
+ channel_->SetSource(kSsrc, NULL);
// The capturers must be unregistered here as it runs out of it's scope
// next.
- EXPECT_TRUE(channel_->SetCapturer(1, NULL));
- EXPECT_TRUE(channel_->SetCapturer(2, NULL));
+ channel_->SetSource(1, NULL);
+ channel_->SetSource(2, NULL);
}
void HighAspectHighHeightCapturer() {
@@ -945,13 +923,13 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_EQ(cricket::CS_RUNNING, capturer->Start(capture_format));
// Capture frame to not get same frame timestamps as previous capturer.
capturer->CaptureFrame();
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, capturer.get()));
+ channel_->SetSource(kSsrc, capturer.get());
EXPECT_TRUE(rtc::Thread::Current()->ProcessMessages(30));
EXPECT_TRUE(capturer->CaptureCustomFrame(kWidth, kHeight,
cricket::FOURCC_ARGB));
EXPECT_GT_FRAME_ON_RENDERER_WAIT(
renderer, 2, kScaledWidth, kScaledHeight, kTimeout);
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
+ channel_->SetSource(kSsrc, NULL);
}
// Tests that we can adapt video resolution with 16:10 aspect ratio properly.
@@ -1062,57 +1040,6 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_FRAME_WAIT(2, codec.width / 2, codec.height / 2, kTimeout);
}
- // Tests that we can mute and unmute the channel properly.
- void MuteStream() {
- EXPECT_TRUE(SetDefaultCodec());
- cricket::FakeVideoCapturer video_capturer;
- video_capturer.Start(
- cricket::VideoFormat(
- 640, 480,
- cricket::VideoFormat::FpsToInterval(30),
- cricket::FOURCC_I420));
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, &video_capturer));
- EXPECT_TRUE(SetSend(true));
- EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
- EXPECT_EQ(0, renderer_.num_rendered_frames());
- // Mute the channel and expect black output frame.
- int frame_count = 0;
- EXPECT_TRUE(channel_->SetVideoSend(kSsrc, false, nullptr));
- EXPECT_TRUE(video_capturer.CaptureFrame());
- ++frame_count;
- EXPECT_EQ_WAIT(frame_count, renderer_.num_rendered_frames(), kTimeout);
- EXPECT_TRUE(renderer_.black_frame());
- // Unmute the channel and expect non-black output frame.
- EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr));
- EXPECT_TRUE(rtc::Thread::Current()->ProcessMessages(30));
- EXPECT_TRUE(video_capturer.CaptureFrame());
- ++frame_count;
- EXPECT_EQ_WAIT(frame_count, renderer_.num_rendered_frames(), kTimeout);
- EXPECT_FALSE(renderer_.black_frame());
- // Test that we can also Mute using the correct send stream SSRC.
- EXPECT_TRUE(channel_->SetVideoSend(kSsrc, false, nullptr));
- EXPECT_TRUE(rtc::Thread::Current()->ProcessMessages(30));
- EXPECT_TRUE(video_capturer.CaptureFrame());
- ++frame_count;
- EXPECT_EQ_WAIT(frame_count, renderer_.num_rendered_frames(), kTimeout);
- EXPECT_TRUE(renderer_.black_frame());
- EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr));
- EXPECT_TRUE(rtc::Thread::Current()->ProcessMessages(30));
- EXPECT_TRUE(video_capturer.CaptureFrame());
- ++frame_count;
- EXPECT_EQ_WAIT(frame_count, renderer_.num_rendered_frames(), kTimeout);
- EXPECT_FALSE(renderer_.black_frame());
- // Test that muting an existing stream succeeds even if it's muted.
- EXPECT_TRUE(channel_->SetVideoSend(kSsrc, false, nullptr));
- EXPECT_TRUE(channel_->SetVideoSend(kSsrc, false, nullptr));
- // Test that unmuting an existing stream succeeds even if it's not muted.
- EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr));
- EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr));
- // Test that muting an invalid stream fails.
- EXPECT_FALSE(channel_->SetVideoSend(kSsrc+1, false, nullptr));
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
- }
-
// Test that multiple send streams can be created and deleted properly.
void MultipleSendStreams() {
// Remove stream added in Setup. I.e. remove stream corresponding to default
@@ -1144,7 +1071,7 @@ class VideoMediaChannelTest : public testing::Test,
}
const std::unique_ptr<webrtc::Call> call_;
- VideoEngineOverride<E> engine_;
+ E engine_;
std::unique_ptr<cricket::FakeVideoCapturer> video_capturer_;
std::unique_ptr<cricket::FakeVideoCapturer> video_capturer_2_;
std::unique_ptr<C> channel_;
diff --git a/chromium/third_party/webrtc/media/base/videoframe.cc b/chromium/third_party/webrtc/media/base/videoframe.cc
index 6fd8d8168b0..d5c24adf3cb 100644
--- a/chromium/third_party/webrtc/media/base/videoframe.cc
+++ b/chromium/third_party/webrtc/media/base/videoframe.cc
@@ -31,15 +31,17 @@ bool VideoFrame::CopyToPlanes(uint8_t* dst_y,
int32_t dst_pitch_y,
int32_t dst_pitch_u,
int32_t dst_pitch_v) const {
- if (!GetYPlane() || !GetUPlane() || !GetVPlane()) {
- LOG(LS_ERROR) << "NULL plane pointer.";
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
+ video_frame_buffer();
+ if (!buffer) {
+ LOG(LS_ERROR) << "NULL video buffer.";
return false;
}
int32_t src_width = width();
int32_t src_height = height();
- return libyuv::I420Copy(GetYPlane(), GetYPitch(),
- GetUPlane(), GetUPitch(),
- GetVPlane(), GetVPitch(),
+ return libyuv::I420Copy(buffer->DataY(), buffer->StrideY(),
+ buffer->DataU(), buffer->StrideU(),
+ buffer->DataV(), buffer->StrideV(),
dst_y, dst_pitch_y,
dst_u, dst_pitch_u,
dst_v, dst_pitch_v,
@@ -56,9 +58,11 @@ size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
return needed;
}
- if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(),
- GetUPitch(), GetVPlane(), GetVPitch(), buffer,
- stride_rgb, width(), height(), to_fourcc)) {
+ if (libyuv::ConvertFromI420(
+ video_frame_buffer()->DataY(), video_frame_buffer()->StrideY(),
+ video_frame_buffer()->DataU(), video_frame_buffer()->StrideU(),
+ video_frame_buffer()->DataV(), video_frame_buffer()->StrideV(),
+ buffer, stride_rgb, width(), height(), to_fourcc)) {
LOG(LS_ERROR) << "RGB type not supported: " << to_fourcc;
return 0; // 0 indicates error
}
@@ -78,8 +82,8 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
size_t dst_height,
bool interpolate,
bool vert_crop) const {
- if (!GetYPlane() || !GetUPlane() || !GetVPlane()) {
- LOG(LS_ERROR) << "NULL plane pointer.";
+ if (!video_frame_buffer()) {
+ LOG(LS_ERROR) << "NULL frame buffer.";
return;
}
@@ -89,9 +93,9 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
CopyToPlanes(dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v);
return;
}
- const uint8_t* src_y = GetYPlane();
- const uint8_t* src_u = GetUPlane();
- const uint8_t* src_v = GetVPlane();
+ const uint8_t* src_y = video_frame_buffer()->DataY();
+ const uint8_t* src_u = video_frame_buffer()->DataU();
+ const uint8_t* src_v = video_frame_buffer()->DataV();
if (vert_crop) {
// Adjust the input width:height ratio to be the same as the output ratio.
@@ -108,15 +112,16 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y,
int32_t iheight_offset =
static_cast<int32_t>((height() - src_height) >> 2);
iheight_offset <<= 1; // Ensure that iheight_offset is even.
- src_y += iheight_offset * GetYPitch();
- src_u += iheight_offset / 2 * GetUPitch();
- src_v += iheight_offset / 2 * GetVPitch();
+ src_y += iheight_offset * video_frame_buffer()->StrideY();
+ src_u += iheight_offset / 2 * video_frame_buffer()->StrideU();
+ src_v += iheight_offset / 2 * video_frame_buffer()->StrideV();
}
}
// Scale to the output I420 frame.
- libyuv::Scale(src_y, src_u, src_v,
- GetYPitch(), GetUPitch(), GetVPitch(),
+ libyuv::Scale(src_y, src_u, src_v, video_frame_buffer()->StrideY(),
+ video_frame_buffer()->StrideU(),
+ video_frame_buffer()->StrideV(),
static_cast<int>(src_width), static_cast<int>(src_height),
dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v,
static_cast<int>(dst_width), static_cast<int>(dst_height),
@@ -130,13 +135,17 @@ void VideoFrame::StretchToFrame(VideoFrame* dst,
return;
}
- StretchToPlanes(dst->GetYPlane(), dst->GetUPlane(), dst->GetVPlane(),
- dst->GetYPitch(), dst->GetUPitch(), dst->GetVPitch(),
+ StretchToPlanes(dst->video_frame_buffer()->MutableDataY(),
+ dst->video_frame_buffer()->MutableDataU(),
+ dst->video_frame_buffer()->MutableDataV(),
+ dst->video_frame_buffer()->StrideY(),
+ dst->video_frame_buffer()->StrideU(),
+ dst->video_frame_buffer()->StrideV(),
dst->width(), dst->height(),
interpolate, vert_crop);
dst->SetTimeStamp(GetTimeStamp());
// Stretched frame should have the same rotation as the source.
- dst->SetRotation(GetVideoRotation());
+ dst->set_rotation(rotation());
}
VideoFrame* VideoFrame::Stretch(size_t dst_width, size_t dst_height,
@@ -151,9 +160,12 @@ VideoFrame* VideoFrame::Stretch(size_t dst_width, size_t dst_height,
}
bool VideoFrame::SetToBlack() {
- return libyuv::I420Rect(GetYPlane(), GetYPitch(),
- GetUPlane(), GetUPitch(),
- GetVPlane(), GetVPitch(),
+ return libyuv::I420Rect(video_frame_buffer()->MutableDataY(),
+ video_frame_buffer()->StrideY(),
+ video_frame_buffer()->MutableDataU(),
+ video_frame_buffer()->StrideU(),
+ video_frame_buffer()->MutableDataV(),
+ video_frame_buffer()->StrideV(),
0, 0,
width(), height(),
16, 128, 128) == 0;
diff --git a/chromium/third_party/webrtc/media/base/videoframe.h b/chromium/third_party/webrtc/media/base/videoframe.h
index 6045bc08b47..4026c26b3b1 100644
--- a/chromium/third_party/webrtc/media/base/videoframe.h
+++ b/chromium/third_party/webrtc/media/base/videoframe.h
@@ -35,18 +35,6 @@ class VideoFrame {
virtual size_t GetWidth() const final { return width(); }
virtual size_t GetHeight() const final { return height(); }
- // These can return NULL if the object is not backed by a buffer.
- virtual const uint8_t* GetYPlane() const = 0;
- virtual const uint8_t* GetUPlane() const = 0;
- virtual const uint8_t* GetVPlane() const = 0;
- virtual uint8_t* GetYPlane() = 0;
- virtual uint8_t* GetUPlane() = 0;
- virtual uint8_t* GetVPlane() = 0;
-
- virtual int32_t GetYPitch() const = 0;
- virtual int32_t GetUPitch() const = 0;
- virtual int32_t GetVPitch() const = 0;
-
// Returns the handle of the underlying video frame. This is used when the
// frame is backed by a texture. The object should be destroyed when it is no
// longer in use, so the underlying resource can be freed.
@@ -54,14 +42,25 @@ class VideoFrame {
// Returns the underlying video frame buffer. This function is ok to call
// multiple times, but the returned object will refer to the same memory.
- virtual rtc::scoped_refptr<webrtc::VideoFrameBuffer> GetVideoFrameBuffer()
- const = 0;
+ virtual const rtc::scoped_refptr<webrtc::VideoFrameBuffer>&
+ video_frame_buffer() const = 0;
+
+ // System monotonic clock, same timebase as rtc::TimeMicros().
+ virtual int64_t timestamp_us() const = 0;
+ virtual void set_timestamp_us(int64_t time_us) = 0;
- virtual int64_t GetTimeStamp() const = 0;
- virtual void SetTimeStamp(int64_t time_stamp) = 0;
+ // Deprecated methods, for backwards compatibility.
+ // TODO(nisse): Delete when usage in Chrome and other applications
+ // have been replaced.
+ virtual int64_t GetTimeStamp() const {
+ return rtc::kNumNanosecsPerMicrosec * timestamp_us();
+ }
+ virtual void SetTimeStamp(int64_t time_ns) {
+ set_timestamp_us(time_ns / rtc::kNumNanosecsPerMicrosec);
+ }
// Indicates the rotation angle in degrees.
- virtual webrtc::VideoRotation GetVideoRotation() const = 0;
+ virtual webrtc::VideoRotation rotation() const = 0;
// Make a shallow copy of the frame. The frame buffer itself is not copied.
// Both the current and new VideoFrame will share a single reference-counted
@@ -137,9 +136,10 @@ class VideoFrame {
int32_t dst_pitch_v) const;
// Creates an empty frame.
- virtual VideoFrame *CreateEmptyFrame(int w, int h,
- int64_t time_stamp) const = 0;
- virtual void SetRotation(webrtc::VideoRotation rotation) = 0;
+ virtual VideoFrame* CreateEmptyFrame(int w,
+ int h,
+ int64_t timestamp_us) const = 0;
+ virtual void set_rotation(webrtc::VideoRotation rotation) = 0;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/media/base/videoframe_unittest.h b/chromium/third_party/webrtc/media/base/videoframe_unittest.h
index 9e9b7dddb1f..6ceba833b5b 100644
--- a/chromium/third_party/webrtc/media/base/videoframe_unittest.h
+++ b/chromium/third_party/webrtc/media/base/videoframe_unittest.h
@@ -267,24 +267,27 @@ class VideoFrameTest : public testing::Test {
const uint8_t* start = reinterpret_cast<const uint8_t*>(ms->GetBuffer());
int awidth = (width + 1) & ~1;
frame->InitToBlack(width, height, 0);
- int stride_y = frame->GetYPitch();
- int stride_u = frame->GetUPitch();
- int stride_v = frame->GetVPitch();
+ int stride_y = frame->video_frame_buffer()->StrideY();
+ int stride_u = frame->video_frame_buffer()->StrideU();
+ int stride_v = frame->video_frame_buffer()->StrideV();
+ uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY();
+ uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU();
+ uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV();
for (uint32_t y = 0; y < height; ++y) {
for (uint32_t x = 0; x < width; x += 2) {
const uint8_t* quad1 = start + (y * awidth + x) * 2;
- frame->GetYPlane()[stride_y * y + x] = quad1[y1_pos];
+ plane_y[stride_y * y + x] = quad1[y1_pos];
if ((x + 1) < width) {
- frame->GetYPlane()[stride_y * y + x + 1] = quad1[y2_pos];
+ plane_y[stride_y * y + x + 1] = quad1[y2_pos];
}
if ((y & 1) == 0) {
const uint8_t* quad2 = quad1 + awidth * 2;
if ((y + 1) >= height) {
quad2 = quad1;
}
- frame->GetUPlane()[stride_u * (y / 2) + x / 2] =
+ plane_u[stride_u * (y / 2) + x / 2] =
(quad1[u_pos] + quad2[u_pos] + 1) / 2;
- frame->GetVPlane()[stride_v * (y / 2) + x / 2] =
+ plane_v[stride_v * (y / 2) + x / 2] =
(quad1[v_pos] + quad2[v_pos] + 1) / 2;
}
}
@@ -311,9 +314,12 @@ class VideoFrameTest : public testing::Test {
pitch = -pitch;
}
frame->InitToBlack(width, height, 0);
- int stride_y = frame->GetYPitch();
- int stride_u = frame->GetUPitch();
- int stride_v = frame->GetVPitch();
+ int stride_y = frame->video_frame_buffer()->StrideY();
+ int stride_u = frame->video_frame_buffer()->StrideU();
+ int stride_v = frame->video_frame_buffer()->StrideV();
+ uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY();
+ uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU();
+ uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV();
for (int32_t y = 0; y < height; y += 2) {
for (int32_t x = 0; x < width; x += 2) {
const uint8_t* rgb[4];
@@ -326,19 +332,19 @@ class VideoFrameTest : public testing::Test {
ConvertRgbPixel(rgb[i][r_pos], rgb[i][g_pos], rgb[i][b_pos],
&yuv[i][0], &yuv[i][1], &yuv[i][2]);
}
- frame->GetYPlane()[stride_y * y + x] = yuv[0][0];
+ plane_y[stride_y * y + x] = yuv[0][0];
if ((x + 1) < width) {
- frame->GetYPlane()[stride_y * y + x + 1] = yuv[1][0];
+ plane_y[stride_y * y + x + 1] = yuv[1][0];
}
if ((y + 1) < height) {
- frame->GetYPlane()[stride_y * (y + 1) + x] = yuv[2][0];
+ plane_y[stride_y * (y + 1) + x] = yuv[2][0];
if ((x + 1) < width) {
- frame->GetYPlane()[stride_y * (y + 1) + x + 1] = yuv[3][0];
+ plane_y[stride_y * (y + 1) + x + 1] = yuv[3][0];
}
}
- frame->GetUPlane()[stride_u * (y / 2) + x / 2] =
+ plane_u[stride_u * (y / 2) + x / 2] =
(yuv[0][1] + yuv[1][1] + yuv[2][1] + yuv[3][1] + 2) / 4;
- frame->GetVPlane()[stride_v * (y / 2) + x / 2] =
+ plane_v[stride_v * (y / 2) + x / 2] =
(yuv[0][2] + yuv[1][2] + yuv[2][2] + yuv[3][2] + 2) / 4;
}
}
@@ -395,15 +401,15 @@ class VideoFrameTest : public testing::Test {
// Comparison functions for testing.
static bool IsNull(const cricket::VideoFrame& frame) {
- return !frame.GetYPlane();
+ return !frame.video_frame_buffer();
}
static bool IsSize(const cricket::VideoFrame& frame,
int width,
int height) {
- return !IsNull(frame) && frame.GetYPitch() >= width &&
- frame.GetUPitch() >= width / 2 &&
- frame.GetVPitch() >= width / 2 &&
+ return !IsNull(frame) && frame.video_frame_buffer()->StrideY() >= width &&
+ frame.video_frame_buffer()->StrideU() >= width / 2 &&
+ frame.video_frame_buffer()->StrideV() >= width / 2 &&
frame.width() == width && frame.height() == height;
}
@@ -444,15 +450,17 @@ class VideoFrameTest : public testing::Test {
const uint8_t* v,
uint32_t vpitch,
int max_error) {
- return IsSize(frame, width, height) &&
- frame.GetTimeStamp() == time_stamp &&
- IsPlaneEqual("y", frame.GetYPlane(), frame.GetYPitch(), y, ypitch,
+ return IsSize(frame, width, height) && frame.GetTimeStamp() == time_stamp &&
+ IsPlaneEqual("y", frame.video_frame_buffer()->DataY(),
+ frame.video_frame_buffer()->StrideY(), y, ypitch,
static_cast<uint32_t>(width),
static_cast<uint32_t>(height), max_error) &&
- IsPlaneEqual("u", frame.GetUPlane(), frame.GetUPitch(), u, upitch,
+ IsPlaneEqual("u", frame.video_frame_buffer()->DataU(),
+ frame.video_frame_buffer()->StrideU(), u, upitch,
static_cast<uint32_t>((width + 1) / 2),
static_cast<uint32_t>((height + 1) / 2), max_error) &&
- IsPlaneEqual("v", frame.GetVPlane(), frame.GetVPitch(), v, vpitch,
+ IsPlaneEqual("v", frame.video_frame_buffer()->DataV(),
+ frame.video_frame_buffer()->StrideV(), v, vpitch,
static_cast<uint32_t>((width + 1) / 2),
static_cast<uint32_t>((height + 1) / 2), max_error);
}
@@ -463,9 +471,12 @@ class VideoFrameTest : public testing::Test {
return IsEqual(frame1,
frame2.width(), frame2.height(),
frame2.GetTimeStamp(),
- frame2.GetYPlane(), frame2.GetYPitch(),
- frame2.GetUPlane(), frame2.GetUPitch(),
- frame2.GetVPlane(), frame2.GetVPitch(),
+ frame2.video_frame_buffer()->DataY(),
+ frame2.video_frame_buffer()->StrideY(),
+ frame2.video_frame_buffer()->DataU(),
+ frame2.video_frame_buffer()->StrideU(),
+ frame2.video_frame_buffer()->DataV(),
+ frame2.video_frame_buffer()->StrideV(),
max_error);
}
@@ -478,23 +489,26 @@ class VideoFrameTest : public testing::Test {
frame2.width() - hcrop * 2,
frame2.height() - vcrop * 2,
frame2.GetTimeStamp(),
- frame2.GetYPlane() + vcrop * frame2.GetYPitch()
+ frame2.video_frame_buffer()->DataY()
+ + vcrop * frame2.video_frame_buffer()->StrideY()
+ hcrop,
- frame2.GetYPitch(),
- frame2.GetUPlane() + vcrop * frame2.GetUPitch() / 2
+ frame2.video_frame_buffer()->StrideY(),
+ frame2.video_frame_buffer()->DataU()
+ + vcrop * frame2.video_frame_buffer()->StrideU() / 2
+ hcrop / 2,
- frame2.GetUPitch(),
- frame2.GetVPlane() + vcrop * frame2.GetVPitch() / 2
+ frame2.video_frame_buffer()->StrideU(),
+ frame2.video_frame_buffer()->DataV()
+ + vcrop * frame2.video_frame_buffer()->StrideV() / 2
+ hcrop / 2,
- frame2.GetVPitch(),
+ frame2.video_frame_buffer()->StrideV(),
max_error);
}
static bool IsBlack(const cricket::VideoFrame& frame) {
return !IsNull(frame) &&
- *frame.GetYPlane() == 16 &&
- *frame.GetUPlane() == 128 &&
- *frame.GetVPlane() == 128;
+ *frame.video_frame_buffer()->DataY() == 16 &&
+ *frame.video_frame_buffer()->DataU() == 128 &&
+ *frame.video_frame_buffer()->DataV() == 128;
}
////////////////////////
@@ -541,9 +555,12 @@ class VideoFrameTest : public testing::Test {
uint8_t* y = ALIGNP(buf.get(), kAlignment);
uint8_t* u = y + kWidth * kHeight;
uint8_t* v = u + (kWidth / 2) * kHeight;
- EXPECT_EQ(0, libyuv::I420ToI422(frame1.GetYPlane(), frame1.GetYPitch(),
- frame1.GetUPlane(), frame1.GetUPitch(),
- frame1.GetVPlane(), frame1.GetVPitch(),
+ EXPECT_EQ(0, libyuv::I420ToI422(frame1.video_frame_buffer()->DataY(),
+ frame1.video_frame_buffer()->StrideY(),
+ frame1.video_frame_buffer()->DataU(),
+ frame1.video_frame_buffer()->StrideU(),
+ frame1.video_frame_buffer()->DataV(),
+ frame1.video_frame_buffer()->StrideV(),
y, kWidth,
u, kWidth / 2,
v, kWidth / 2,
@@ -560,9 +577,12 @@ class VideoFrameTest : public testing::Test {
size_t buf_size = kWidth * kHeight * 2;
std::unique_ptr<uint8_t[]> buf(new uint8_t[buf_size + kAlignment]);
uint8_t* yuy2 = ALIGNP(buf.get(), kAlignment);
- EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(),
- frame1.GetUPlane(), frame1.GetUPitch(),
- frame1.GetVPlane(), frame1.GetVPitch(),
+ EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.video_frame_buffer()->DataY(),
+ frame1.video_frame_buffer()->StrideY(),
+ frame1.video_frame_buffer()->DataU(),
+ frame1.video_frame_buffer()->StrideU(),
+ frame1.video_frame_buffer()->DataV(),
+ frame1.video_frame_buffer()->StrideV(),
yuy2, kWidth * 2,
kWidth, kHeight));
EXPECT_TRUE(LoadFrame(yuy2, buf_size, cricket::FOURCC_YUY2,
@@ -577,9 +597,12 @@ class VideoFrameTest : public testing::Test {
size_t buf_size = kWidth * kHeight * 2;
std::unique_ptr<uint8_t[]> buf(new uint8_t[buf_size + kAlignment + 1]);
uint8_t* yuy2 = ALIGNP(buf.get(), kAlignment) + 1;
- EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(),
- frame1.GetUPlane(), frame1.GetUPitch(),
- frame1.GetVPlane(), frame1.GetVPitch(),
+ EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.video_frame_buffer()->DataY(),
+ frame1.video_frame_buffer()->StrideY(),
+ frame1.video_frame_buffer()->DataU(),
+ frame1.video_frame_buffer()->StrideU(),
+ frame1.video_frame_buffer()->DataV(),
+ frame1.video_frame_buffer()->StrideV(),
yuy2, kWidth * 2,
kWidth, kHeight));
EXPECT_TRUE(LoadFrame(yuy2, buf_size, cricket::FOURCC_YUY2,
@@ -792,16 +815,23 @@ class VideoFrameTest : public testing::Test {
EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
kHeight, \
reinterpret_cast<uint8_t*>(ms->GetBuffer()), \
- data_size, 0, webrtc::kVideoRotation_0)); \
- int width_rotate = frame1.width(); \
- int height_rotate = frame1.height(); \
- EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
- libyuv::I420Mirror( \
- frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
- frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \
- frame3.GetYPlane(), frame3.GetYPitch(), frame3.GetUPlane(), \
- frame3.GetUPitch(), frame3.GetVPlane(), frame3.GetVPitch(), kWidth, \
- kHeight); \
+ data_size, 0, webrtc::kVideoRotation_0)); \
+ int width_rotate = frame1.width(); \
+ int height_rotate = frame1.height(); \
+ EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
+ libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(), \
+ frame2.video_frame_buffer()->StrideY(), \
+ frame2.video_frame_buffer()->DataU(), \
+ frame2.video_frame_buffer()->StrideU(), \
+ frame2.video_frame_buffer()->DataV(), \
+ frame2.video_frame_buffer()->StrideV(), \
+ frame3.video_frame_buffer()->MutableDataY(), \
+ frame3.video_frame_buffer()->StrideY(), \
+ frame3.video_frame_buffer()->MutableDataU(), \
+ frame3.video_frame_buffer()->StrideU(), \
+ frame3.video_frame_buffer()->MutableDataV(), \
+ frame3.video_frame_buffer()->StrideV(), \
+ kWidth, kHeight); \
EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
}
@@ -823,16 +853,23 @@ class VideoFrameTest : public testing::Test {
EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
kHeight, \
reinterpret_cast<uint8_t*>(ms->GetBuffer()), \
- data_size, 0, webrtc::kVideoRotation_0)); \
- int width_rotate = frame1.width(); \
- int height_rotate = frame1.height(); \
- EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
- libyuv::I420Rotate( \
- frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \
- frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \
- frame3.GetYPlane(), frame3.GetYPitch(), frame3.GetUPlane(), \
- frame3.GetUPitch(), frame3.GetVPlane(), frame3.GetVPitch(), kWidth, \
- kHeight, libyuv::kRotate##ROTATE); \
+ data_size, 0, webrtc::kVideoRotation_0)); \
+ int width_rotate = frame1.width(); \
+ int height_rotate = frame1.height(); \
+ EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \
+ libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(), \
+ frame2.video_frame_buffer()->StrideY(), \
+ frame2.video_frame_buffer()->DataU(), \
+ frame2.video_frame_buffer()->StrideU(), \
+ frame2.video_frame_buffer()->DataV(), \
+ frame2.video_frame_buffer()->StrideV(), \
+ frame3.video_frame_buffer()->MutableDataY(), \
+ frame3.video_frame_buffer()->StrideY(), \
+ frame3.video_frame_buffer()->MutableDataU(), \
+ frame3.video_frame_buffer()->StrideU(), \
+ frame3.video_frame_buffer()->MutableDataV(), \
+ frame3.video_frame_buffer()->StrideV(), \
+ kWidth, kHeight, libyuv::kRotate##ROTATE); \
EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
}
@@ -952,9 +989,9 @@ class VideoFrameTest : public testing::Test {
}
EXPECT_EQ(5, frame.width());
EXPECT_EQ(5, frame.height());
- EXPECT_EQ(5, frame.GetYPitch());
- EXPECT_EQ(3, frame.GetUPitch());
- EXPECT_EQ(3, frame.GetVPitch());
+ EXPECT_EQ(5, frame.video_frame_buffer()->StrideY());
+ EXPECT_EQ(3, frame.video_frame_buffer()->StrideU());
+ EXPECT_EQ(3, frame.video_frame_buffer()->StrideV());
}
// Test 1 pixel edge case image ARGB buffer.
@@ -1121,8 +1158,10 @@ class VideoFrameTest : public testing::Test {
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
ASSERT_TRUE(LoadFrame(kJpeg400Filename,
cricket::FOURCC_MJPG, kWidth, kHeight, &frame2));
- EXPECT_TRUE(IsPlaneEqual("y", frame1.GetYPlane(), frame1.GetYPitch(),
- frame2.GetYPlane(), frame2.GetYPitch(),
+ EXPECT_TRUE(IsPlaneEqual("y", frame1.video_frame_buffer()->DataY(),
+ frame1.video_frame_buffer()->StrideY(),
+ frame2.video_frame_buffer()->DataY(),
+ frame2.video_frame_buffer()->StrideY(),
kWidth, kHeight, 32));
EXPECT_TRUE(IsEqual(frame1, frame2, 128));
}
@@ -1304,9 +1343,7 @@ class VideoFrameTest : public testing::Test {
EXPECT_TRUE(frame2.Init(frame1));
}
EXPECT_TRUE(IsEqual(frame1, frame2, 0));
- EXPECT_EQ(frame1.GetYPlane(), frame2.GetYPlane());
- EXPECT_EQ(frame1.GetUPlane(), frame2.GetUPlane());
- EXPECT_EQ(frame1.GetVPlane(), frame2.GetVPlane());
+ EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer());
}
// Test creating an empty image and initing it to black.
@@ -1419,9 +1456,12 @@ class VideoFrameTest : public testing::Test {
EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 0));
for (int i = 0; i < repeat_from; ++i) {
EXPECT_EQ(0, RGBToI420(out, stride,
- frame2.GetYPlane(), frame2.GetYPitch(),
- frame2.GetUPlane(), frame2.GetUPitch(),
- frame2.GetVPlane(), frame2.GetVPitch(),
+ frame2.video_frame_buffer()->MutableDataY(),
+ frame2.video_frame_buffer()->StrideY(),
+ frame2.video_frame_buffer()->MutableDataU(),
+ frame2.video_frame_buffer()->StrideU(),
+ frame2.video_frame_buffer()->MutableDataV(),
+ frame2.video_frame_buffer()->StrideV(),
kWidth, kHeight));
}
if (rowpad) {
@@ -1724,9 +1764,12 @@ class VideoFrameTest : public testing::Test {
uint8_t* v = u + (kWidth / 2) * kHeight;
ASSERT_TRUE(LoadFrameNoRepeat(&frame1));
for (int i = 0; i < repeat_; ++i) {
- EXPECT_EQ(0, libyuv::I420ToI422(frame1.GetYPlane(), frame1.GetYPitch(),
- frame1.GetUPlane(), frame1.GetUPitch(),
- frame1.GetVPlane(), frame1.GetVPitch(),
+ EXPECT_EQ(0, libyuv::I420ToI422(frame1.video_frame_buffer()->DataY(),
+ frame1.video_frame_buffer()->StrideY(),
+ frame1.video_frame_buffer()->DataU(),
+ frame1.video_frame_buffer()->StrideU(),
+ frame1.video_frame_buffer()->DataV(),
+ frame1.video_frame_buffer()->StrideV(),
y, kWidth,
u, kWidth / 2,
v, kWidth / 2,
@@ -1749,7 +1792,8 @@ class VideoFrameTest : public testing::Test {
target.reset(source->Copy());
EXPECT_TRUE(IsEqual(*source, *target, 0));
source.reset();
- EXPECT_TRUE(target->GetYPlane() != NULL);
+ ASSERT_TRUE(target->video_frame_buffer() != NULL);
+ EXPECT_TRUE(target->video_frame_buffer()->DataY() != NULL);
}
void CopyIsRef() {
@@ -1759,9 +1803,7 @@ class VideoFrameTest : public testing::Test {
target.reset(source->Copy());
EXPECT_TRUE(IsEqual(*source, *target, 0));
const T* const_source = source.get();
- EXPECT_EQ(const_source->GetYPlane(), target->GetYPlane());
- EXPECT_EQ(const_source->GetUPlane(), target->GetUPlane());
- EXPECT_EQ(const_source->GetVPlane(), target->GetVPlane());
+ EXPECT_EQ(const_source->video_frame_buffer(), target->video_frame_buffer());
}
void StretchToFrame() {
diff --git a/chromium/third_party/webrtc/media/engine/fakewebrtccall.cc b/chromium/third_party/webrtc/media/engine/fakewebrtccall.cc
index 8eff0ebcf8e..e9edf27bcb6 100644
--- a/chromium/third_party/webrtc/media/engine/fakewebrtccall.cc
+++ b/chromium/third_party/webrtc/media/engine/fakewebrtccall.cc
@@ -67,8 +67,17 @@ void FakeAudioReceiveStream::SetStats(
stats_ = stats;
}
-void FakeAudioReceiveStream::IncrementReceivedPackets() {
- received_packets_++;
+bool FakeAudioReceiveStream::VerifyLastPacket(const uint8_t* data,
+ size_t length) const {
+ return last_packet_ == rtc::Buffer(data, length);
+}
+
+bool FakeAudioReceiveStream::DeliverRtp(const uint8_t* packet,
+ size_t length,
+ const webrtc::PacketTime& packet_time) {
+ ++received_packets_;
+ last_packet_.SetData(packet, length);
+ return true;
}
webrtc::AudioReceiveStream::Stats FakeAudioReceiveStream::GetStats() const {
@@ -409,7 +418,7 @@ FakeCall::DeliveryStatus FakeCall::DeliverPacket(
media_type == webrtc::MediaType::AUDIO) {
for (auto receiver : audio_receive_streams_) {
if (receiver->GetConfig().rtp.remote_ssrc == ssrc) {
- receiver->IncrementReceivedPackets();
+ receiver->DeliverRtp(packet, length, packet_time);
return DELIVERY_OK;
}
}
diff --git a/chromium/third_party/webrtc/media/engine/fakewebrtccall.h b/chromium/third_party/webrtc/media/engine/fakewebrtccall.h
index 9caa02a9b71..c9caf8e158e 100644
--- a/chromium/third_party/webrtc/media/engine/fakewebrtccall.h
+++ b/chromium/third_party/webrtc/media/engine/fakewebrtccall.h
@@ -25,6 +25,7 @@
#include "webrtc/audio_receive_stream.h"
#include "webrtc/audio_send_stream.h"
+#include "webrtc/base/buffer.h"
#include "webrtc/call.h"
#include "webrtc/video_frame.h"
#include "webrtc/video_receive_stream.h"
@@ -47,15 +48,10 @@ class FakeAudioSendStream final : public webrtc::AudioSendStream {
bool IsSending() const { return sending_; }
private:
- // webrtc::SendStream implementation.
+ // webrtc::AudioSendStream implementation.
void Start() override { sending_ = true; }
void Stop() override { sending_ = false; }
- void SignalNetworkState(webrtc::NetworkState state) override {}
- bool DeliverRtcp(const uint8_t* packet, size_t length) override {
- return true;
- }
- // webrtc::AudioSendStream implementation.
bool SendTelephoneEvent(int payload_type, int event,
int duration_ms) override;
webrtc::AudioSendStream::Stats GetStats() const override;
@@ -74,24 +70,17 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream {
const webrtc::AudioReceiveStream::Config& GetConfig() const;
void SetStats(const webrtc::AudioReceiveStream::Stats& stats);
int received_packets() const { return received_packets_; }
- void IncrementReceivedPackets();
+ bool VerifyLastPacket(const uint8_t* data, size_t length) const;
const webrtc::AudioSinkInterface* sink() const { return sink_.get(); }
+ bool DeliverRtp(const uint8_t* packet,
+ size_t length,
+ const webrtc::PacketTime& packet_time);
private:
- // webrtc::ReceiveStream implementation.
+ // webrtc::AudioReceiveStream implementation.
void Start() override {}
void Stop() override {}
- void SignalNetworkState(webrtc::NetworkState state) override {}
- bool DeliverRtcp(const uint8_t* packet, size_t length) override {
- return true;
- }
- bool DeliverRtp(const uint8_t* packet,
- size_t length,
- const webrtc::PacketTime& packet_time) override {
- return true;
- }
- // webrtc::AudioReceiveStream implementation.
webrtc::AudioReceiveStream::Stats GetStats() const override;
void SetSink(std::unique_ptr<webrtc::AudioSinkInterface> sink) override;
@@ -99,6 +88,7 @@ class FakeAudioReceiveStream final : public webrtc::AudioReceiveStream {
webrtc::AudioReceiveStream::Stats stats_;
int received_packets_;
std::unique_ptr<webrtc::AudioSinkInterface> sink_;
+ rtc::Buffer last_packet_;
};
class FakeVideoSendStream final : public webrtc::VideoSendStream,
@@ -126,15 +116,9 @@ class FakeVideoSendStream final : public webrtc::VideoSendStream,
private:
void IncomingCapturedFrame(const webrtc::VideoFrame& frame) override;
- // webrtc::SendStream implementation.
+ // webrtc::VideoSendStream implementation.
void Start() override;
void Stop() override;
- void SignalNetworkState(webrtc::NetworkState state) override {}
- bool DeliverRtcp(const uint8_t* packet, size_t length) override {
- return true;
- }
-
- // webrtc::VideoSendStream implementation.
webrtc::VideoSendStream::Stats GetStats() override;
void ReconfigureVideoEncoder(
const webrtc::VideoEncoderConfig& config) override;
@@ -168,20 +152,10 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream {
void SetStats(const webrtc::VideoReceiveStream::Stats& stats);
private:
- // webrtc::ReceiveStream implementation.
+ // webrtc::VideoReceiveStream implementation.
void Start() override;
void Stop() override;
- void SignalNetworkState(webrtc::NetworkState state) override {}
- bool DeliverRtcp(const uint8_t* packet, size_t length) override {
- return true;
- }
- bool DeliverRtp(const uint8_t* packet,
- size_t length,
- const webrtc::PacketTime& packet_time) override {
- return true;
- }
- // webrtc::VideoReceiveStream implementation.
webrtc::VideoReceiveStream::Stats GetStats() const override;
webrtc::VideoReceiveStream::Config config_;
@@ -239,6 +213,8 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver {
void SetBitrateConfig(
const webrtc::Call::Config::BitrateConfig& bitrate_config) override;
+ void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) override {}
void SignalChannelNetworkState(webrtc::MediaType media,
webrtc::NetworkState state) override;
void OnSentPacket(const rtc::SentPacket& sent_packet) override;
diff --git a/chromium/third_party/webrtc/media/engine/fakewebrtcvideocapturemodule.h b/chromium/third_party/webrtc/media/engine/fakewebrtcvideocapturemodule.h
index 3202502e6c5..2bc715b1e1f 100644
--- a/chromium/third_party/webrtc/media/engine/fakewebrtcvideocapturemodule.h
+++ b/chromium/third_party/webrtc/media/engine/fakewebrtcvideocapturemodule.h
@@ -87,11 +87,16 @@ class FakeWebRtcVideoCaptureModule : public webrtc::VideoCaptureModule {
void SendFrame(int w, int h) {
if (!running_) return;
- webrtc::VideoFrame sample;
- // Setting stride based on width.
- sample.CreateEmptyFrame(w, h, w, (w + 1) / 2, (w + 1) / 2);
+
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+ new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
+ // Initialize memory to satisfy DrMemory tests. See
+ // https://bugs.chromium.org/p/libyuv/issues/detail?id=377
+ buffer->InitializeData();
if (callback_) {
- callback_->OnIncomingCapturedFrame(id_, sample);
+ callback_->OnIncomingCapturedFrame(
+ id_,
+ webrtc::VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
}
}
diff --git a/chromium/third_party/webrtc/media/engine/fakewebrtcvideoengine.h b/chromium/third_party/webrtc/media/engine/fakewebrtcvideoengine.h
index 475797b16d5..f8b8cbb4925 100644
--- a/chromium/third_party/webrtc/media/engine/fakewebrtcvideoengine.h
+++ b/chromium/third_party/webrtc/media/engine/fakewebrtcvideoengine.h
@@ -38,13 +38,12 @@ static const int kMaxVideoBitrate = 1000;
// renderer for a channel or it is adding a renderer for a capturer.
static const int kViEChannelIdBase = 0;
static const int kViEChannelIdMax = 1000;
+static const int kEventTimeoutMs = 10000;
// Fake class for mocking out webrtc::VideoDecoder
class FakeWebRtcVideoDecoder : public webrtc::VideoDecoder {
public:
- FakeWebRtcVideoDecoder()
- : num_frames_received_(0) {
- }
+ FakeWebRtcVideoDecoder() : num_frames_received_(0) {}
virtual int32_t InitDecode(const webrtc::VideoCodec*, int32_t) {
return WEBRTC_VIDEO_CODEC_OK;
@@ -120,16 +119,20 @@ class FakeWebRtcVideoDecoderFactory : public WebRtcVideoDecoderFactory {
// Fake class for mocking out webrtc::VideoEnoder
class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
public:
- FakeWebRtcVideoEncoder() : num_frames_encoded_(0) {}
+ FakeWebRtcVideoEncoder()
+ : init_encode_event_(false, false), num_frames_encoded_(0) {}
virtual int32_t InitEncode(const webrtc::VideoCodec* codecSettings,
int32_t numberOfCores,
size_t maxPayloadSize) {
rtc::CritScope lock(&crit_);
codec_settings_ = *codecSettings;
+ init_encode_event_.Set();
return WEBRTC_VIDEO_CODEC_OK;
}
+ bool WaitForInitEncode() { return init_encode_event_.Wait(kEventTimeoutMs); }
+
webrtc::VideoCodec GetCodecSettings() {
rtc::CritScope lock(&crit_);
return codec_settings_;
@@ -140,6 +143,7 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
const std::vector<webrtc::FrameType>* frame_types) {
rtc::CritScope lock(&crit_);
++num_frames_encoded_;
+ init_encode_event_.Set();
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -165,6 +169,7 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
private:
rtc::CriticalSection crit_;
+ rtc::Event init_encode_event_;
int num_frames_encoded_ GUARDED_BY(crit_);
webrtc::VideoCodec codec_settings_ GUARDED_BY(crit_);
};
@@ -173,32 +178,45 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
class FakeWebRtcVideoEncoderFactory : public WebRtcVideoEncoderFactory {
public:
FakeWebRtcVideoEncoderFactory()
- : num_created_encoders_(0), encoders_have_internal_sources_(false) {}
+ : created_video_encoder_event_(false, false),
+ num_created_encoders_(0),
+ encoders_have_internal_sources_(false) {}
- virtual webrtc::VideoEncoder* CreateVideoEncoder(
- webrtc::VideoCodecType type) {
+ webrtc::VideoEncoder* CreateVideoEncoder(
+ webrtc::VideoCodecType type) override {
+ rtc::CritScope lock(&crit_);
if (supported_codec_types_.count(type) == 0) {
return NULL;
}
FakeWebRtcVideoEncoder* encoder = new FakeWebRtcVideoEncoder();
encoders_.push_back(encoder);
num_created_encoders_++;
+ created_video_encoder_event_.Set();
return encoder;
}
- virtual void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) {
+ bool WaitForCreatedVideoEncoders(int num_encoders) {
+ while (created_video_encoder_event_.Wait(kEventTimeoutMs)) {
+ if (GetNumCreatedEncoders() >= num_encoders)
+ return true;
+ }
+ return false;
+ }
+
+ void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override {
+ rtc::CritScope lock(&crit_);
encoders_.erase(
std::remove(encoders_.begin(), encoders_.end(), encoder),
encoders_.end());
delete encoder;
}
- virtual const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs()
- const {
+ const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs()
+ const override {
return codecs_;
}
- virtual bool EncoderTypeHasInternalSource(
+ bool EncoderTypeHasInternalSource(
webrtc::VideoCodecType type) const override {
return encoders_have_internal_sources_;
}
@@ -215,18 +233,22 @@ class FakeWebRtcVideoEncoderFactory : public WebRtcVideoEncoderFactory {
}
int GetNumCreatedEncoders() {
+ rtc::CritScope lock(&crit_);
return num_created_encoders_;
}
- const std::vector<FakeWebRtcVideoEncoder*>& encoders() {
+ const std::vector<FakeWebRtcVideoEncoder*> encoders() {
+ rtc::CritScope lock(&crit_);
return encoders_;
}
private:
+ rtc::CriticalSection crit_;
+ rtc::Event created_video_encoder_event_;
std::set<webrtc::VideoCodecType> supported_codec_types_;
std::vector<WebRtcVideoEncoderFactory::VideoCodec> codecs_;
- std::vector<FakeWebRtcVideoEncoder*> encoders_;
- int num_created_encoders_;
+ std::vector<FakeWebRtcVideoEncoder*> encoders_ GUARDED_BY(crit_);
+ int num_created_encoders_ GUARDED_BY(crit_);
bool encoders_have_internal_sources_;
};
diff --git a/chromium/third_party/webrtc/media/engine/fakewebrtcvoiceengine.h b/chromium/third_party/webrtc/media/engine/fakewebrtcvoiceengine.h
index b5ad81c6a98..13a36968531 100644
--- a/chromium/third_party/webrtc/media/engine/fakewebrtcvoiceengine.h
+++ b/chromium/third_party/webrtc/media/engine/fakewebrtcvoiceengine.h
@@ -121,15 +121,13 @@ class FakeAudioProcessing : public webrtc::AudioProcessing {
class FakeWebRtcVoiceEngine
: public webrtc::VoEAudioProcessing,
public webrtc::VoEBase, public webrtc::VoECodec,
- public webrtc::VoEHardware,
- public webrtc::VoENetwork, public webrtc::VoERTP_RTCP,
+ public webrtc::VoEHardware, public webrtc::VoERTP_RTCP,
public webrtc::VoEVolumeControl {
public:
struct Channel {
Channel() {
memset(&send_codec, 0, sizeof(send_codec));
}
- bool external_transport = false;
bool playout = false;
float volume_scale = 1.0f;
bool vad = false;
@@ -146,8 +144,6 @@ class FakeWebRtcVoiceEngine
int associate_send_channel = -1;
std::vector<webrtc::CodecInst> recv_codecs;
webrtc::CodecInst send_codec;
- webrtc::PacketTime last_rtp_packet_time;
- std::list<std::string> packets;
int neteq_capacity = -1;
bool neteq_fast_accelerate = false;
};
@@ -191,10 +187,6 @@ class FakeWebRtcVoiceEngine
int GetNACKMaxPackets(int channel) {
return channels_[channel]->nack_max_packets;
}
- const webrtc::PacketTime& GetLastRtpPacketTime(int channel) {
- RTC_DCHECK(channels_.find(channel) != channels_.end());
- return channels_[channel]->last_rtp_packet_time;
- }
int GetSendCNPayloadType(int channel, bool wideband) {
return (wideband) ?
channels_[channel]->cn16_type :
@@ -203,18 +195,6 @@ class FakeWebRtcVoiceEngine
int GetSendREDPayloadType(int channel) {
return channels_[channel]->red_type;
}
- bool CheckPacket(int channel, const void* data, size_t len) {
- bool result = !CheckNoPacket(channel);
- if (result) {
- std::string packet = channels_[channel]->packets.front();
- result = (packet == std::string(static_cast<const char*>(data), len));
- channels_[channel]->packets.pop_front();
- }
- return result;
- }
- bool CheckNoPacket(int channel) {
- return channels_[channel]->packets.empty();
- }
void set_playout_fail_channel(int channel) {
playout_fail_channel_ = channel;
}
@@ -310,7 +290,7 @@ class FakeWebRtcVoiceEngine
channels_[channel]->associate_send_channel = accociate_send_channel;
return 0;
}
- webrtc::RtcEventLog* GetEventLog() { return nullptr; }
+ webrtc::RtcEventLog* GetEventLog() override { return nullptr; }
// webrtc::VoECodec
WEBRTC_STUB(NumOfCodecs, ());
@@ -456,62 +436,16 @@ class FakeWebRtcVoiceEngine
WEBRTC_STUB(SetPlayoutDevice, (int));
WEBRTC_STUB(SetAudioDeviceLayer, (webrtc::AudioLayers));
WEBRTC_STUB(GetAudioDeviceLayer, (webrtc::AudioLayers&));
- WEBRTC_FUNC(SetRecordingSampleRate, (unsigned int samples_per_sec)) {
- recording_sample_rate_ = samples_per_sec;
- return 0;
- }
- WEBRTC_FUNC_CONST(RecordingSampleRate, (unsigned int* samples_per_sec)) {
- *samples_per_sec = recording_sample_rate_;
- return 0;
- }
- WEBRTC_FUNC(SetPlayoutSampleRate, (unsigned int samples_per_sec)) {
- playout_sample_rate_ = samples_per_sec;
- return 0;
- }
- WEBRTC_FUNC_CONST(PlayoutSampleRate, (unsigned int* samples_per_sec)) {
- *samples_per_sec = playout_sample_rate_;
- return 0;
- }
+ WEBRTC_STUB(SetRecordingSampleRate, (unsigned int samples_per_sec));
+ WEBRTC_STUB_CONST(RecordingSampleRate, (unsigned int* samples_per_sec));
+ WEBRTC_STUB(SetPlayoutSampleRate, (unsigned int samples_per_sec));
+ WEBRTC_STUB_CONST(PlayoutSampleRate, (unsigned int* samples_per_sec));
WEBRTC_STUB(EnableBuiltInAEC, (bool enable));
- virtual bool BuiltInAECIsAvailable() const { return false; }
+ bool BuiltInAECIsAvailable() const override { return false; }
WEBRTC_STUB(EnableBuiltInAGC, (bool enable));
- virtual bool BuiltInAGCIsAvailable() const { return false; }
+ bool BuiltInAGCIsAvailable() const override { return false; }
WEBRTC_STUB(EnableBuiltInNS, (bool enable));
- virtual bool BuiltInNSIsAvailable() const { return false; }
-
- // webrtc::VoENetwork
- WEBRTC_FUNC(RegisterExternalTransport, (int channel,
- webrtc::Transport& transport)) {
- WEBRTC_CHECK_CHANNEL(channel);
- channels_[channel]->external_transport = true;
- return 0;
- }
- WEBRTC_FUNC(DeRegisterExternalTransport, (int channel)) {
- WEBRTC_CHECK_CHANNEL(channel);
- channels_[channel]->external_transport = false;
- return 0;
- }
- WEBRTC_FUNC(ReceivedRTPPacket, (int channel, const void* data,
- size_t length)) {
- WEBRTC_CHECK_CHANNEL(channel);
- if (!channels_[channel]->external_transport) return -1;
- channels_[channel]->packets.push_back(
- std::string(static_cast<const char*>(data), length));
- return 0;
- }
- WEBRTC_FUNC(ReceivedRTPPacket, (int channel, const void* data,
- size_t length,
- const webrtc::PacketTime& packet_time)) {
- WEBRTC_CHECK_CHANNEL(channel);
- if (ReceivedRTPPacket(channel, data, length) == -1) {
- return -1;
- }
- channels_[channel]->last_rtp_packet_time = packet_time;
- return 0;
- }
-
- WEBRTC_STUB(ReceivedRTCPPacket, (int channel, const void* data,
- size_t length));
+ bool BuiltInNSIsAvailable() const override { return false; }
// webrtc::VoERTP_RTCP
WEBRTC_FUNC(SetLocalSSRC, (int channel, unsigned int ssrc)) {
@@ -685,17 +619,17 @@ class FakeWebRtcVoiceEngine
int reportingThreshold,
int penaltyDecay,
int typeEventDelay));
- int EnableHighPassFilter(bool enable) {
+ int EnableHighPassFilter(bool enable) override {
highpass_filter_enabled_ = enable;
return 0;
}
- bool IsHighPassFilterEnabled() {
+ bool IsHighPassFilterEnabled() override {
return highpass_filter_enabled_;
}
- bool IsStereoChannelSwappingEnabled() {
+ bool IsStereoChannelSwappingEnabled() override {
return stereo_swapping_enabled_;
}
- void EnableStereoChannelSwapping(bool enable) {
+ void EnableStereoChannelSwapping(bool enable) override {
stereo_swapping_enabled_ = enable;
}
int GetNetEqCapacity() const {
@@ -729,8 +663,6 @@ class FakeWebRtcVoiceEngine
webrtc::AgcModes agc_mode_ = webrtc::kAgcDefault;
webrtc::AgcConfig agc_config_;
int playout_fail_channel_ = -1;
- int recording_sample_rate_ = -1;
- int playout_sample_rate_ = -1;
FakeAudioProcessing audio_processing_;
};
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideocapturer.cc b/chromium/third_party/webrtc/media/engine/webrtcvideocapturer.cc
index a5405510597..71c88413f89 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideocapturer.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideocapturer.cc
@@ -278,7 +278,7 @@ CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
return CS_FAILED;
}
- uint32_t start = rtc::Time();
+ int64_t start = rtc::TimeMillis();
module_->RegisterCaptureDataCallback(*this);
if (module_->StartCapture(cap) != 0) {
LOG(LS_ERROR) << "Camera '" << GetId() << "' failed to start";
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideocapturer.h b/chromium/third_party/webrtc/media/engine/webrtcvideocapturer.h
index b6b39386c1b..1efa4ad66fe 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideocapturer.h
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideocapturer.h
@@ -61,14 +61,14 @@ class WebRtcVideoCapturer : public VideoCapturer,
protected:
void OnSinkWantsChanged(const rtc::VideoSinkWants& wants) override;
// Override virtual methods of the parent class VideoCapturer.
- virtual bool GetPreferredFourccs(std::vector<uint32_t>* fourccs);
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
private:
// Callback when a frame is captured by camera.
- virtual void OnIncomingCapturedFrame(const int32_t id,
- const webrtc::VideoFrame& frame);
- virtual void OnCaptureDelayChanged(const int32_t id,
- const int32_t delay);
+ void OnIncomingCapturedFrame(const int32_t id,
+ const webrtc::VideoFrame& frame) override;
+ void OnCaptureDelayChanged(const int32_t id,
+ const int32_t delay) override;
// Used to signal captured frames on the same thread as invoked Start().
// With WebRTC's current VideoCapturer implementations, this will mean a
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideoengine2.cc b/chromium/third_party/webrtc/media/engine/webrtcvideoengine2.cc
index b90950f0fbe..d81c849234e 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideoengine2.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideoengine2.cc
@@ -21,7 +21,6 @@
#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/call.h"
-#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/engine/constants.h"
#include "webrtc/media/engine/simulcast.h"
#include "webrtc/media/engine/webrtcmediaengine.h"
@@ -30,6 +29,7 @@
#include "webrtc/media/engine/webrtcvoiceengine.h"
#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
#include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h"
+#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#include "webrtc/system_wrappers/include/field_trial.h"
#include "webrtc/video_decoder.h"
#include "webrtc/video_encoder.h"
@@ -160,7 +160,8 @@ bool CodecIsInternallySupported(const std::string& codec_name) {
return true;
}
if (CodecNamesEq(codec_name, kVp9CodecName)) {
- return true;
+ return webrtc::VP9Encoder::IsSupported() &&
+ webrtc::VP9Decoder::IsSupported();
}
if (CodecNamesEq(codec_name, kH264CodecName)) {
return webrtc::H264Encoder::IsSupported() &&
@@ -181,7 +182,7 @@ void AddDefaultFeedbackParams(VideoCodec* codec) {
static VideoCodec MakeVideoCodecWithDefaultFeedbackParams(int payload_type,
const char* name) {
VideoCodec codec(payload_type, name, kDefaultVideoMaxWidth,
- kDefaultVideoMaxHeight, kDefaultVideoMaxFramerate, 0);
+ kDefaultVideoMaxHeight, kDefaultVideoMaxFramerate);
AddDefaultFeedbackParams(&codec);
return codec;
}
@@ -262,39 +263,6 @@ inline bool ContainsHeaderExtension(
return false;
}
-// Merges two fec configs and logs an error if a conflict arises
-// such that merging in different order would trigger a different output.
-static void MergeFecConfig(const webrtc::FecConfig& other,
- webrtc::FecConfig* output) {
- if (other.ulpfec_payload_type != -1) {
- if (output->ulpfec_payload_type != -1 &&
- output->ulpfec_payload_type != other.ulpfec_payload_type) {
- LOG(LS_WARNING) << "Conflict merging ulpfec_payload_type configs: "
- << output->ulpfec_payload_type << " and "
- << other.ulpfec_payload_type;
- }
- output->ulpfec_payload_type = other.ulpfec_payload_type;
- }
- if (other.red_payload_type != -1) {
- if (output->red_payload_type != -1 &&
- output->red_payload_type != other.red_payload_type) {
- LOG(LS_WARNING) << "Conflict merging red_payload_type configs: "
- << output->red_payload_type << " and "
- << other.red_payload_type;
- }
- output->red_payload_type = other.red_payload_type;
- }
- if (other.red_rtx_payload_type != -1) {
- if (output->red_rtx_payload_type != -1 &&
- output->red_rtx_payload_type != other.red_rtx_payload_type) {
- LOG(LS_WARNING) << "Conflict merging red_rtx_payload_type configs: "
- << output->red_rtx_payload_type << " and "
- << other.red_rtx_payload_type;
- }
- output->red_rtx_payload_type = other.red_rtx_payload_type;
- }
-}
-
// Returns true if the given codec is disallowed from doing simulcast.
bool IsCodecBlacklistedForSimulcast(const std::string& codec_name) {
return CodecNamesEq(codec_name, kH264CodecName) ||
@@ -631,12 +599,19 @@ std::vector<VideoCodec> WebRtcVideoEngine2::GetSupportedCodecs() const {
std::vector<VideoCodec> supported_codecs = DefaultVideoCodecList();
if (external_encoder_factory_ == NULL) {
+ LOG(LS_INFO) << "Supported codecs: "
+ << CodecVectorToString(supported_codecs);
return supported_codecs;
}
+ std::stringstream out;
const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs =
external_encoder_factory_->codecs();
for (size_t i = 0; i < codecs.size(); ++i) {
+ out << codecs[i].name;
+ if (i != codecs.size() - 1) {
+ out << ", ";
+ }
// Don't add internally-supported codecs twice.
if (CodecIsInternallySupported(codecs[i].name)) {
continue;
@@ -647,16 +622,17 @@ std::vector<VideoCodec> WebRtcVideoEngine2::GetSupportedCodecs() const {
const int kExternalVideoPayloadTypeBase = 120;
size_t payload_type = kExternalVideoPayloadTypeBase + i;
RTC_DCHECK(payload_type < 128);
- VideoCodec codec(static_cast<int>(payload_type),
- codecs[i].name,
- codecs[i].max_width,
- codecs[i].max_height,
- codecs[i].max_fps,
- 0);
+ VideoCodec codec(static_cast<int>(payload_type), codecs[i].name,
+ codecs[i].max_width, codecs[i].max_height,
+ codecs[i].max_fps);
AddDefaultFeedbackParams(&codec);
supported_codecs.push_back(codec);
}
+ LOG(LS_INFO) << "Supported codecs (incl. external codecs): "
+ << CodecVectorToString(supported_codecs);
+ LOG(LS_INFO) << "Codecs supported by the external encoder factory: "
+ << out.str();
return supported_codecs;
}
@@ -673,7 +649,8 @@ WebRtcVideoChannel2::WebRtcVideoChannel2(
video_config_(config.video),
external_encoder_factory_(external_encoder_factory),
external_decoder_factory_(external_decoder_factory),
- default_send_options_(options) {
+ default_send_options_(options),
+ red_disabled_by_remote_side_(false) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc;
@@ -740,17 +717,7 @@ bool WebRtcVideoChannel2::ReceiveCodecsHaveChanged(
};
std::sort(before.begin(), before.end(), comparison);
std::sort(after.begin(), after.end(), comparison);
- for (size_t i = 0; i < before.size(); ++i) {
- // For the same reason that we sort the codecs, we also ignore the
- // preference. We don't want a preference change on the receive
- // side to cause recreation of the stream.
- before[i].codec.preference = 0;
- after[i].codec.preference = 0;
- if (before[i] != after[i]) {
- return true;
- }
- }
- return false;
+ return before != after;
}
bool WebRtcVideoChannel2::GetChangedSendParameters(
@@ -784,7 +751,7 @@ bool WebRtcVideoChannel2::GetChangedSendParameters(
}
// Handle max bitrate.
- if (params.max_bandwidth_bps != bitrate_config_.max_bitrate_bps &&
+ if (params.max_bandwidth_bps != send_params_.max_bandwidth_bps &&
params.max_bandwidth_bps >= 0) {
// 0 uncaps max bitrate (-1).
changed_params->max_bandwidth_bps = rtc::Optional<int>(
@@ -819,39 +786,38 @@ bool WebRtcVideoChannel2::SetSendParameters(const VideoSendParameters& params) {
return false;
}
- bool bitrate_config_changed = false;
-
if (changed_params.codec) {
const VideoCodecSettings& codec_settings = *changed_params.codec;
send_codec_ = rtc::Optional<VideoCodecSettings>(codec_settings);
-
LOG(LS_INFO) << "Using codec: " << codec_settings.codec.ToString();
- // TODO(holmer): Changing the codec parameters shouldn't necessarily mean
- // that we change the min/max of bandwidth estimation. Reevaluate this.
- bitrate_config_ = GetBitrateConfigForCodec(codec_settings.codec);
- bitrate_config_changed = true;
}
if (changed_params.rtp_header_extensions) {
send_rtp_extensions_ = *changed_params.rtp_header_extensions;
}
- if (changed_params.max_bandwidth_bps) {
- // TODO(pbos): Figure out whether b=AS means max bitrate for this
- // WebRtcVideoChannel2 (in which case we're good), or per sender (SSRC), in
- // which case this should not set a Call::BitrateConfig but rather
- // reconfigure all senders.
- int max_bitrate_bps = *changed_params.max_bandwidth_bps;
- bitrate_config_.start_bitrate_bps = -1;
- bitrate_config_.max_bitrate_bps = max_bitrate_bps;
- if (max_bitrate_bps > 0 &&
- bitrate_config_.min_bitrate_bps > max_bitrate_bps) {
- bitrate_config_.min_bitrate_bps = max_bitrate_bps;
+ if (changed_params.codec || changed_params.max_bandwidth_bps) {
+ if (send_codec_) {
+ // TODO(holmer): Changing the codec parameters shouldn't necessarily mean
+ // that we change the min/max of bandwidth estimation. Reevaluate this.
+ bitrate_config_ = GetBitrateConfigForCodec(send_codec_->codec);
+ if (!changed_params.codec) {
+ // If the codec isn't changing, set the start bitrate to -1 which means
+ // "unchanged" so that BWE isn't affected.
+ bitrate_config_.start_bitrate_bps = -1;
+ }
+ }
+ if (params.max_bandwidth_bps >= 0) {
+ // Note that max_bandwidth_bps intentionally takes priority over the
+ // bitrate config for the codec. This allows FEC to be applied above the
+ // codec target bitrate.
+ // TODO(pbos): Figure out whether b=AS means max bitrate for this
+ // WebRtcVideoChannel2 (in which case we're good), or per sender (SSRC),
+ // in which case this should not set a Call::BitrateConfig but rather
+ // reconfigure all senders.
+ bitrate_config_.max_bitrate_bps =
+ params.max_bandwidth_bps == 0 ? -1 : params.max_bandwidth_bps;
}
- bitrate_config_changed = true;
- }
-
- if (bitrate_config_changed) {
call_->SetBitrateConfig(bitrate_config_);
}
@@ -874,37 +840,106 @@ bool WebRtcVideoChannel2::SetSendParameters(const VideoSendParameters& params) {
: webrtc::RtcpMode::kCompound);
}
}
+ if (changed_params.codec) {
+ bool red_was_disabled = red_disabled_by_remote_side_;
+ red_disabled_by_remote_side_ =
+ changed_params.codec->fec.red_payload_type == -1;
+ if (red_was_disabled != red_disabled_by_remote_side_) {
+ for (auto& kv : receive_streams_) {
+ // In practice VideoChannel::SetRemoteContent appears to most of the
+ // time also call UpdateRemoteStreams, which recreates the receive
+ // streams. If that's always true this call isn't needed.
+ kv.second->SetFecDisabledRemotely(red_disabled_by_remote_side_);
+ }
+ }
+ }
}
send_params_ = params;
return true;
}
-webrtc::RtpParameters WebRtcVideoChannel2::GetRtpParameters(
+
+webrtc::RtpParameters WebRtcVideoChannel2::GetRtpSendParameters(
uint32_t ssrc) const {
rtc::CritScope stream_lock(&stream_crit_);
auto it = send_streams_.find(ssrc);
if (it == send_streams_.end()) {
- LOG(LS_WARNING) << "Attempting to get RTP parameters for stream with ssrc "
- << ssrc << " which doesn't exist.";
+ LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
return webrtc::RtpParameters();
}
- return it->second->GetRtpParameters();
+ webrtc::RtpParameters rtp_params = it->second->GetRtpParameters();
+ // Need to add the common list of codecs to the send stream-specific
+ // RTP parameters.
+ for (const VideoCodec& codec : send_params_.codecs) {
+ rtp_params.codecs.push_back(codec.ToCodecParameters());
+ }
+ return rtp_params;
}
-bool WebRtcVideoChannel2::SetRtpParameters(
+bool WebRtcVideoChannel2::SetRtpSendParameters(
uint32_t ssrc,
const webrtc::RtpParameters& parameters) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetRtpSendParameters");
rtc::CritScope stream_lock(&stream_crit_);
auto it = send_streams_.find(ssrc);
if (it == send_streams_.end()) {
- LOG(LS_ERROR) << "Attempting to set RTP parameters for stream with ssrc "
- << ssrc << " which doesn't exist.";
+ LOG(LS_ERROR) << "Attempting to set RTP send parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return false;
+ }
+
+ // TODO(deadbeef): Handle setting parameters with a list of codecs in a
+ // different order (which should change the send codec).
+ webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc);
+ if (current_parameters.codecs != parameters.codecs) {
+ LOG(LS_ERROR) << "Using SetParameters to change the set of codecs "
+ << "is not currently supported.";
return false;
}
return it->second->SetRtpParameters(parameters);
}
+webrtc::RtpParameters WebRtcVideoChannel2::GetRtpReceiveParameters(
+ uint32_t ssrc) const {
+ rtc::CritScope stream_lock(&stream_crit_);
+ auto it = receive_streams_.find(ssrc);
+ if (it == receive_streams_.end()) {
+ LOG(LS_WARNING) << "Attempting to get RTP receive parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return webrtc::RtpParameters();
+ }
+
+ // TODO(deadbeef): Return stream-specific parameters.
+ webrtc::RtpParameters rtp_params = CreateRtpParametersWithOneEncoding();
+ for (const VideoCodec& codec : recv_params_.codecs) {
+ rtp_params.codecs.push_back(codec.ToCodecParameters());
+ }
+ return rtp_params;
+}
+
+bool WebRtcVideoChannel2::SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetRtpReceiveParameters");
+ rtc::CritScope stream_lock(&stream_crit_);
+ auto it = receive_streams_.find(ssrc);
+ if (it == receive_streams_.end()) {
+ LOG(LS_ERROR) << "Attempting to set RTP receive parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return false;
+ }
+
+ webrtc::RtpParameters current_parameters = GetRtpReceiveParameters(ssrc);
+ if (current_parameters != parameters) {
+ LOG(LS_ERROR) << "Changing the RTP receive parameters is currently "
+ << "unsupported.";
+ return false;
+ }
+ return true;
+}
+
bool WebRtcVideoChannel2::GetChangedRecvParameters(
const VideoRecvParameters& params,
ChangedRecvParameters* changed_params) const {
@@ -996,6 +1031,7 @@ bool WebRtcVideoChannel2::GetSendCodec(VideoCodec* codec) {
}
bool WebRtcVideoChannel2::SetSend(bool send) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetSend");
LOG(LS_VERBOSE) << "SetSend: " << (send ? "true" : "false");
if (send && !send_codec_) {
LOG(LS_ERROR) << "SetSend(true) called before setting codec.";
@@ -1011,6 +1047,9 @@ bool WebRtcVideoChannel2::SetSend(bool send) {
return true;
}
+// TODO(nisse): The enable argument was used for mute logic which has
+// been moved to VideoBroadcaster. So delete this method, and use
+// SetOptions instead.
bool WebRtcVideoChannel2::SetVideoSend(uint32_t ssrc, bool enable,
const VideoOptions* options) {
TRACE_EVENT0("webrtc", "SetVideoSend");
@@ -1018,11 +1057,6 @@ bool WebRtcVideoChannel2::SetVideoSend(uint32_t ssrc, bool enable,
<< "options: " << (options ? options->ToString() : "nullptr")
<< ").";
- // TODO(solenberg): The state change should be fully rolled back if any one of
- // these calls fail.
- if (!MuteStream(ssrc, !enable)) {
- return false;
- }
if (enable && options) {
SetOptions(ssrc, *options);
}
@@ -1184,7 +1218,7 @@ bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp,
receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
call_, sp, config, external_decoder_factory_, default_stream,
- recv_codecs_);
+ recv_codecs_, red_disabled_by_remote_side_);
return true;
}
@@ -1220,10 +1254,6 @@ void WebRtcVideoChannel2::ConfigureReceiverRtp(
}
for (size_t i = 0; i < recv_codecs_.size(); ++i) {
- MergeFecConfig(recv_codecs_[i].fec, &config->rtp.fec);
- }
-
- for (size_t i = 0; i < recv_codecs_.size(); ++i) {
uint32_t rtx_ssrc;
if (recv_codecs_[i].rtx_payload_type != -1 &&
sp.GetFidSsrc(ssrc, &rtx_ssrc)) {
@@ -1325,22 +1355,21 @@ void WebRtcVideoChannel2::FillBandwidthEstimationStats(
video_media_info->bw_estimations.push_back(bwe_info);
}
-bool WebRtcVideoChannel2::SetCapturer(uint32_t ssrc, VideoCapturer* capturer) {
- LOG(LS_INFO) << "SetCapturer: " << ssrc << " -> "
- << (capturer != NULL ? "(capturer)" : "NULL");
+void WebRtcVideoChannel2::SetSource(
+ uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
+ LOG(LS_INFO) << "SetSource: " << ssrc << " -> "
+ << (source ? "(source)" : "NULL");
RTC_DCHECK(ssrc != 0);
- {
- rtc::CritScope stream_lock(&stream_crit_);
- const auto& kv = send_streams_.find(ssrc);
- if (kv == send_streams_.end()) {
- LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc;
- return false;
- }
- if (!kv->second->SetCapturer(capturer)) {
- return false;
- }
+
+ rtc::CritScope stream_lock(&stream_crit_);
+ const auto& kv = send_streams_.find(ssrc);
+ if (kv == send_streams_.end()) {
+ // Allow unknown ssrc only if source is null.
+ RTC_CHECK(source == nullptr);
+ } else {
+ kv->second->SetSource(source);
}
- return true;
}
void WebRtcVideoChannel2::OnPacketReceived(
@@ -1425,24 +1454,8 @@ void WebRtcVideoChannel2::OnReadyToSend(bool ready) {
void WebRtcVideoChannel2::OnNetworkRouteChanged(
const std::string& transport_name,
- const NetworkRoute& network_route) {
- // TODO(honghaiz): uncomment this once the function in call is implemented.
- // call_->OnNetworkRouteChanged(transport_name, network_route);
-}
-
-bool WebRtcVideoChannel2::MuteStream(uint32_t ssrc, bool mute) {
- LOG(LS_VERBOSE) << "MuteStream: " << ssrc << " -> "
- << (mute ? "mute" : "unmute");
- RTC_DCHECK(ssrc != 0);
- rtc::CritScope stream_lock(&stream_crit_);
- const auto& kv = send_streams_.find(ssrc);
- if (kv == send_streams_.end()) {
- LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc;
- return false;
- }
-
- kv->second->MuteStream(mute);
- return true;
+ const rtc::NetworkRoute& network_route) {
+ call_->OnNetworkRouteChanged(transport_name, network_route);
}
// TODO(pbos): Remove SetOptions in favor of SetSendParameters.
@@ -1533,7 +1546,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
call_(call),
cpu_restricted_counter_(0),
number_of_cpu_adapt_changes_(0),
- capturer_(nullptr),
+ source_(nullptr),
external_encoder_factory_(external_encoder_factory),
stream_(nullptr),
parameters_(config, options, max_bitrate_bps, codec_settings),
@@ -1541,8 +1554,6 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
pending_encoder_reconfiguration_(false),
allocated_encoder_(nullptr, webrtc::kVideoCodecUnknown, false),
sending_(false),
- muted_(false),
- first_frame_timestamp_ms_(0),
last_frame_timestamp_ms_(0) {
parameters_.config.rtp.max_packet_size = kVideoMtu;
parameters_.conference_mode = send_params.conference_mode;
@@ -1567,54 +1578,52 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
}
WebRtcVideoChannel2::WebRtcVideoSendStream::~WebRtcVideoSendStream() {
- DisconnectCapturer();
+ DisconnectSource();
if (stream_ != NULL) {
call_->DestroyVideoSendStream(stream_);
}
DestroyVideoEncoder(&allocated_encoder_);
}
-static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
- int width,
- int height,
- webrtc::VideoRotation rotation) {
- video_frame->CreateEmptyFrame(width, height, width, (width + 1) / 2,
- (width + 1) / 2);
- memset(video_frame->buffer(webrtc::kYPlane), 16,
- video_frame->allocated_size(webrtc::kYPlane));
- memset(video_frame->buffer(webrtc::kUPlane), 128,
- video_frame->allocated_size(webrtc::kUPlane));
- memset(video_frame->buffer(webrtc::kVPlane), 128,
- video_frame->allocated_size(webrtc::kVPlane));
- video_frame->set_rotation(rotation);
+static webrtc::VideoFrame CreateBlackFrame(int width,
+ int height,
+ int64_t render_time_ms_,
+ webrtc::VideoRotation rotation) {
+ webrtc::VideoFrame frame;
+ frame.CreateEmptyFrame(width, height, width, (width + 1) / 2,
+ (width + 1) / 2);
+ memset(frame.video_frame_buffer()->MutableDataY(), 16,
+ frame.allocated_size(webrtc::kYPlane));
+ memset(frame.video_frame_buffer()->MutableDataU(), 128,
+ frame.allocated_size(webrtc::kUPlane));
+ memset(frame.video_frame_buffer()->MutableDataV(), 128,
+ frame.allocated_size(webrtc::kVPlane));
+ frame.set_rotation(rotation);
+ frame.set_render_time_ms(render_time_ms_);
+ return frame;
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
const VideoFrame& frame) {
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::OnFrame");
- webrtc::VideoFrame video_frame(frame.GetVideoFrameBuffer(), 0, 0,
- frame.GetVideoRotation());
+ webrtc::VideoFrame video_frame(frame.video_frame_buffer(), 0, 0,
+ frame.rotation());
rtc::CritScope cs(&lock_);
if (stream_ == NULL) {
// Frame input before send codecs are configured, dropping frame.
return;
}
- if (muted_) {
- // Create a black frame to transmit instead.
- CreateBlackFrame(&video_frame,
- frame.width(),
- frame.height(),
- video_frame.rotation());
- }
-
int64_t frame_delta_ms = frame.GetTimeStamp() / rtc::kNumNanosecsPerMillisec;
+
// frame->GetTimeStamp() is essentially a delta, align to webrtc time
- if (first_frame_timestamp_ms_ == 0) {
- first_frame_timestamp_ms_ = rtc::Time() - frame_delta_ms;
+ if (!first_frame_timestamp_ms_) {
+ first_frame_timestamp_ms_ =
+ rtc::Optional<int64_t>(rtc::TimeMillis() - frame_delta_ms);
}
- last_frame_timestamp_ms_ = first_frame_timestamp_ms_ + frame_delta_ms;
+ last_frame_timestamp_ms_ = *first_frame_timestamp_ms_ + frame_delta_ms;
+
video_frame.set_render_time_ms(last_frame_timestamp_ms_);
// Reconfigure codec if necessary.
SetDimensions(video_frame.width(), video_frame.height());
@@ -1630,71 +1639,62 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
stream_->Input()->IncomingCapturedFrame(video_frame);
}
-bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
- VideoCapturer* capturer) {
- TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetCapturer");
+void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSource(
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
+ TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetSource");
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!DisconnectCapturer() && capturer == NULL) {
- return false;
- }
+
+ if (!source && !source_)
+ return;
+ DisconnectSource();
{
rtc::CritScope cs(&lock_);
// Reset timestamps to realign new incoming frames to a webrtc timestamp. A
// new capturer may have a different timestamp delta than the previous one.
- first_frame_timestamp_ms_ = 0;
+ first_frame_timestamp_ms_ = rtc::Optional<int64_t>();
- if (capturer == NULL) {
+ if (source == NULL) {
if (stream_ != NULL) {
LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
- webrtc::VideoFrame black_frame;
-
- CreateBlackFrame(&black_frame, last_dimensions_.width,
- last_dimensions_.height, last_rotation_);
-
// Force this black frame not to be dropped due to timestamp order
// check. As IncomingCapturedFrame will drop the frame if this frame's
// timestamp is less than or equal to last frame's timestamp, it is
// necessary to give this black frame a larger timestamp than the
// previous one.
last_frame_timestamp_ms_ += 1;
- black_frame.set_render_time_ms(last_frame_timestamp_ms_);
- stream_->Input()->IncomingCapturedFrame(black_frame);
- }
+ stream_->Input()->IncomingCapturedFrame(
+ CreateBlackFrame(last_dimensions_.width, last_dimensions_.height,
+ last_frame_timestamp_ms_, last_rotation_));
- capturer_ = NULL;
- return true;
+
+ }
}
}
- capturer_ = capturer;
- // |capturer_->AddOrUpdateSink| may not be called while holding |lock_| since
+ source_ = source;
+ // |source_->AddOrUpdateSink| may not be called while holding |lock_| since
// that might cause a lock order inversion.
- capturer_->AddOrUpdateSink(this, sink_wants_);
- return true;
-}
-
-void WebRtcVideoChannel2::WebRtcVideoSendStream::MuteStream(bool mute) {
- rtc::CritScope cs(&lock_);
- muted_ = mute;
+ if (source_) {
+ source_->AddOrUpdateSink(this, sink_wants_);
+ }
}
-bool WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectCapturer() {
+void WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectSource() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (capturer_ == NULL) {
- return false;
+ if (source_ == NULL) {
+ return;
}
- // |capturer_->RemoveSink| may not be called while holding |lock_| since
+ // |source_->RemoveSink| may not be called while holding |lock_| since
// that might cause a lock order inversion.
- capturer_->RemoveSink(this);
- capturer_ = NULL;
+ source_->RemoveSink(this);
+ source_ = nullptr;
// Reset |cpu_restricted_counter_| if the capturer is changed. It is not
// possible to know if the video resolution is restricted by CPU usage after
// the capturer is changed since the next capturer might be screen capture
// with another resolution and frame rate.
cpu_restricted_counter_ = 0;
- return true;
}
const std::vector<uint32_t>&
@@ -1855,8 +1855,8 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSendParameters(
if (params.rtp_header_extensions) {
sink_wants_.rotation_applied = !ContainsHeaderExtension(
*params.rtp_header_extensions, kRtpVideoRotationHeaderExtension);
- if (capturer_) {
- capturer_->AddOrUpdateSink(this, sink_wants_);
+ if (source_) {
+ source_->AddOrUpdateSink(this, sink_wants_);
}
}
}
@@ -1873,6 +1873,8 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetRtpParameters(
pending_encoder_reconfiguration_ = true;
}
rtp_parameters_ = new_parameters;
+ // Codecs are currently handled at the WebRtcVideoChannel2 level.
+ rtp_parameters_.codecs.clear();
// Encoding may have been activated/deactivated.
UpdateSendState();
return true;
@@ -2021,7 +2023,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) {
return;
}
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!capturer_) {
+ if (!source_) {
return;
}
{
@@ -2077,9 +2079,9 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) {
sink_wants_.max_pixel_count = max_pixel_count;
sink_wants_.max_pixel_count_step_up = max_pixel_count_step_up;
}
- // |capturer_->AddOrUpdateSink| may not be called while holding |lock_| since
+ // |source_->AddOrUpdateSink| may not be called while holding |lock_| since
// that might cause a lock order inversion.
- capturer_->AddOrUpdateSink(this, sink_wants_);
+ source_->AddOrUpdateSink(this, sink_wants_);
}
VideoSenderInfo
@@ -2214,13 +2216,15 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
const webrtc::VideoReceiveStream::Config& config,
WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream,
- const std::vector<VideoCodecSettings>& recv_codecs)
+ const std::vector<VideoCodecSettings>& recv_codecs,
+ bool red_disabled_by_remote_side)
: call_(call),
ssrcs_(sp.ssrcs),
ssrc_groups_(sp.ssrc_groups),
stream_(NULL),
default_stream_(default_stream),
config_(config),
+ red_disabled_by_remote_side_(red_disabled_by_remote_side),
external_decoder_factory_(external_decoder_factory),
sink_(NULL),
last_width_(-1),
@@ -2396,7 +2400,13 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RecreateWebRtcStream() {
if (stream_ != NULL) {
call_->DestroyVideoReceiveStream(stream_);
}
- stream_ = call_->CreateVideoReceiveStream(config_);
+ webrtc::VideoReceiveStream::Config config = config_;
+ if (red_disabled_by_remote_side_) {
+ config.rtp.fec.red_payload_type = -1;
+ config.rtp.fec.ulpfec_payload_type = -1;
+ config.rtp.fec.red_rtx_payload_type = -1;
+ }
+ stream_ = call_->CreateVideoReceiveStream(config);
stream_->Start();
}
@@ -2435,8 +2445,8 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::OnFrame(
last_height_ = frame.height();
const WebRtcVideoFrame render_frame(
- frame.video_frame_buffer(),
- frame.render_time_ms() * rtc::kNumNanosecsPerMillisec, frame.rotation());
+ frame.video_frame_buffer(), frame.rotation(),
+ frame.render_time_ms() * rtc::kNumNanosecsPerMicrosec);
sink_->OnFrame(render_frame);
}
@@ -2504,6 +2514,12 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::GetVideoReceiverInfo() {
return info;
}
+void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetFecDisabledRemotely(
+ bool disable) {
+ red_disabled_by_remote_side_ = disable;
+ RecreateWebRtcStream();
+}
+
WebRtcVideoChannel2::VideoCodecSettings::VideoCodecSettings()
: rtx_payload_type(-1) {}
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideoengine2.h b/chromium/third_party/webrtc/media/engine/webrtcvideoengine2.h
index ba2dbd8a02b..2d1b1923ed7 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideoengine2.h
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideoengine2.h
@@ -74,6 +74,7 @@ class UnsignalledSsrcHandler {
};
virtual Action OnUnsignalledSsrc(WebRtcVideoChannel2* channel,
uint32_t ssrc) = 0;
+ virtual ~UnsignalledSsrcHandler() = default;
};
// TODO(pbos): Remove, use external handlers only.
@@ -86,6 +87,7 @@ class DefaultUnsignalledSsrcHandler : public UnsignalledSsrcHandler {
rtc::VideoSinkInterface<VideoFrame>* GetDefaultSink() const;
void SetDefaultSink(VideoMediaChannel* channel,
rtc::VideoSinkInterface<VideoFrame>* sink);
+ virtual ~DefaultUnsignalledSsrcHandler() = default;
private:
uint32_t default_recv_ssrc_;
@@ -96,7 +98,7 @@ class DefaultUnsignalledSsrcHandler : public UnsignalledSsrcHandler {
class WebRtcVideoEngine2 {
public:
WebRtcVideoEngine2();
- ~WebRtcVideoEngine2();
+ virtual ~WebRtcVideoEngine2();
// Basic video engine implementation.
void Init();
@@ -145,9 +147,13 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
bool SetSendParameters(const VideoSendParameters& params) override;
bool SetRecvParameters(const VideoRecvParameters& params) override;
- webrtc::RtpParameters GetRtpParameters(uint32_t ssrc) const override;
- bool SetRtpParameters(uint32_t ssrc,
- const webrtc::RtpParameters& parameters) override;
+ webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override;
+ bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) override;
+ webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override;
+ bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) override;
bool GetSendCodec(VideoCodec* send_codec) override;
bool SetSend(bool send) override;
bool SetVideoSend(uint32_t ssrc,
@@ -161,7 +167,9 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
bool SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<VideoFrame>* sink) override;
bool GetStats(VideoMediaInfo* info) override;
- bool SetCapturer(uint32_t ssrc, VideoCapturer* capturer) override;
+ void SetSource(
+ uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) override;
void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketTime& packet_time) override;
@@ -169,7 +177,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
const rtc::PacketTime& packet_time) override;
void OnReadyToSend(bool ready) override;
void OnNetworkRouteChanged(const std::string& transport_name,
- const NetworkRoute& network_route) override;
+ const rtc::NetworkRoute& network_route) override;
void SetInterface(NetworkInterface* iface) override;
// Implemented for VideoMediaChannelTest.
@@ -217,8 +225,6 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
bool GetChangedRecvParameters(const VideoRecvParameters& params,
ChangedRecvParameters* changed_params) const;
- bool MuteStream(uint32_t ssrc, bool mute);
-
void SetMaxSendBandwidth(int bps);
void SetOptions(uint32_t ssrc, const VideoOptions& options);
@@ -235,7 +241,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
static std::string CodecSettingsVectorToString(
const std::vector<VideoCodecSettings>& codecs);
- // Wrapper for the sender part, this is where the capturer is connected and
+ // Wrapper for the sender part, this is where the source is connected and
// frames are then converted from cricket frames to webrtc frames.
class WebRtcVideoSendStream
: public rtc::VideoSinkInterface<cricket::VideoFrame>,
@@ -261,9 +267,8 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
webrtc::RtpParameters GetRtpParameters() const;
void OnFrame(const cricket::VideoFrame& frame) override;
- bool SetCapturer(VideoCapturer* capturer);
- void MuteStream(bool mute);
- bool DisconnectCapturer();
+ void SetSource(rtc::VideoSourceInterface<cricket::VideoFrame>* source);
+ void DisconnectSource();
void SetSend(bool send);
@@ -365,12 +370,12 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
webrtc::Call* const call_;
rtc::VideoSinkWants sink_wants_;
// Counter used for deciding if the video resolution is currently
- // restricted by CPU usage. It is reset if |capturer_| is changed.
+ // restricted by CPU usage. It is reset if |source_| is changed.
int cpu_restricted_counter_;
// Total number of times resolution as been requested to be changed due to
// CPU adaptation.
int number_of_cpu_adapt_changes_;
- VideoCapturer* capturer_;
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source_;
WebRtcVideoEncoderFactory* const external_encoder_factory_
GUARDED_BY(lock_);
@@ -381,6 +386,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
// entire channel.
VideoSendStreamParameters parameters_ GUARDED_BY(lock_);
// Contains settings that are unique for each stream, such as max_bitrate.
+ // Does *not* contain codecs, however.
// TODO(skvlad): Move ssrcs_ and ssrc_groups_ into rtp_parameters_.
// TODO(skvlad): Combine parameters_ and rtp_parameters_ once we have only
// one stream per MediaChannel.
@@ -393,14 +399,13 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
webrtc::kVideoRotation_0;
bool sending_ GUARDED_BY(lock_);
- bool muted_ GUARDED_BY(lock_);
// The timestamp of the first frame received
// Used to generate the timestamps of subsequent frames
- int64_t first_frame_timestamp_ms_ GUARDED_BY(lock_);
+ rtc::Optional<int64_t> first_frame_timestamp_ms_ GUARDED_BY(lock_);
// The timestamp of the last frame received
- // Used to generate timestamp for the black frame when capturer is removed
+ // Used to generate timestamp for the black frame when source is removed
int64_t last_frame_timestamp_ms_ GUARDED_BY(lock_);
};
@@ -417,7 +422,8 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
const webrtc::VideoReceiveStream::Config& config,
WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream,
- const std::vector<VideoCodecSettings>& recv_codecs);
+ const std::vector<VideoCodecSettings>& recv_codecs,
+ bool red_disabled_by_remote_side);
~WebRtcVideoReceiveStream();
const std::vector<uint32_t>& GetSsrcs() const;
@@ -437,6 +443,14 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
VideoReceiverInfo GetVideoReceiverInfo();
+ // Used to disable RED/FEC when the remote description doesn't contain those
+ // codecs. This is needed to be able to work around an RTX bug which is only
+ // happening if the remote side doesn't send RED, but the local side is
+ // configured to receive RED.
+ // TODO(holmer): Remove this after a couple of Chrome versions, M53-54
+ // time frame.
+ void SetFecDisabledRemotely(bool disable);
+
private:
struct AllocatedDecoder {
AllocatedDecoder(webrtc::VideoDecoder* decoder,
@@ -467,6 +481,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
webrtc::VideoReceiveStream* stream_;
const bool default_stream_;
webrtc::VideoReceiveStream::Config config_;
+ bool red_disabled_by_remote_side_;
WebRtcVideoDecoderFactory* const external_decoder_factory_;
std::vector<AllocatedDecoder> allocated_decoders_;
@@ -537,6 +552,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
VideoSendParameters send_params_;
VideoOptions default_send_options_;
VideoRecvParameters recv_params_;
+ bool red_disabled_by_remote_side_;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideoengine2_unittest.cc b/chromium/third_party/webrtc/media/engine/webrtcvideoengine2_unittest.cc
index e1f540660f2..fd7a6463b8b 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideoengine2_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideoengine2_unittest.cc
@@ -31,16 +31,16 @@ namespace {
static const int kDefaultQpMax = 56;
static const int kDefaultFramerate = 30;
-static const cricket::VideoCodec kVp8Codec720p(100, "VP8", 1280, 720, 30, 0);
-static const cricket::VideoCodec kVp8Codec360p(100, "VP8", 640, 360, 30, 0);
-static const cricket::VideoCodec kVp8Codec270p(100, "VP8", 480, 270, 30, 0);
+static const cricket::VideoCodec kVp8Codec720p(100, "VP8", 1280, 720, 30);
+static const cricket::VideoCodec kVp8Codec360p(100, "VP8", 640, 360, 30);
+static const cricket::VideoCodec kVp8Codec270p(100, "VP8", 480, 270, 30);
-static const cricket::VideoCodec kVp8Codec(100, "VP8", 640, 400, 30, 0);
-static const cricket::VideoCodec kVp9Codec(101, "VP9", 640, 400, 30, 0);
-static const cricket::VideoCodec kH264Codec(102, "H264", 640, 400, 30, 0);
+static const cricket::VideoCodec kVp8Codec(100, "VP8", 640, 400, 30);
+static const cricket::VideoCodec kVp9Codec(101, "VP9", 640, 400, 30);
+static const cricket::VideoCodec kH264Codec(102, "H264", 640, 400, 30);
-static const cricket::VideoCodec kRedCodec(116, "red", 0, 0, 0, 0);
-static const cricket::VideoCodec kUlpfecCodec(117, "ulpfec", 0, 0, 0, 0);
+static const cricket::VideoCodec kRedCodec(116, "red", 0, 0, 0);
+static const cricket::VideoCodec kUlpfecCodec(117, "ulpfec", 0, 0, 0);
static const uint8_t kRedRtxPayloadType = 125;
@@ -69,11 +69,11 @@ static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
int height) {
video_frame->CreateEmptyFrame(
width, height, width, (width + 1) / 2, (width + 1) / 2);
- memset(video_frame->buffer(webrtc::kYPlane), 16,
+ memset(video_frame->video_frame_buffer()->MutableDataY(), 16,
video_frame->allocated_size(webrtc::kYPlane));
- memset(video_frame->buffer(webrtc::kUPlane), 128,
+ memset(video_frame->video_frame_buffer()->MutableDataU(), 128,
video_frame->allocated_size(webrtc::kUPlane));
- memset(video_frame->buffer(webrtc::kVPlane), 128,
+ memset(video_frame->video_frame_buffer()->MutableDataV(), 128,
video_frame->allocated_size(webrtc::kVPlane));
}
@@ -150,6 +150,21 @@ class WebRtcVideoEngine2Test : public ::testing::Test {
std::map<int, int> default_apt_rtx_types_;
};
+TEST_F(WebRtcVideoEngine2Test, AnnouncesVp9AccordingToBuildFlags) {
+ bool claims_vp9_support = false;
+ for (const cricket::VideoCodec& codec : engine_.codecs()) {
+ if (codec.name == "VP9") {
+ claims_vp9_support = true;
+ break;
+ }
+ }
+#if defined(RTC_DISABLE_VP9)
+ EXPECT_FALSE(claims_vp9_support);
+#else
+ EXPECT_TRUE(claims_vp9_support);
+#endif // defined(RTC_DISABLE_VP9)
+}
+
TEST_F(WebRtcVideoEngine2Test, DefaultRtxCodecHasAssociatedPayloadTypeSet) {
std::vector<VideoCodec> engine_codecs = engine_.codecs();
for (size_t i = 0; i < engine_codecs.size(); ++i) {
@@ -241,7 +256,7 @@ TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionBeforeCapturer) {
EXPECT_TRUE(channel->SetSendParameters(parameters));
// Set capturer.
- EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
+ channel->SetSource(kSsrc, &capturer);
// Verify capturer has turned off applying rotation.
EXPECT_FALSE(capturer.GetApplyRotation());
@@ -272,7 +287,7 @@ TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionBeforeAddSendStream) {
EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
// Set capturer.
- EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
+ channel->SetSource(kSsrc, &capturer);
// Verify capturer has turned off applying rotation.
EXPECT_FALSE(capturer.GetApplyRotation());
@@ -293,7 +308,7 @@ TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionAfterCapturer) {
EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(kSsrc)));
// Set capturer.
- EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
+ channel->SetSource(kSsrc, &capturer);
// Verify capturer has turned on applying rotation.
EXPECT_TRUE(capturer.GetApplyRotation());
@@ -348,19 +363,22 @@ TEST_F(WebRtcVideoEngine2Test, UseExternalFactoryForVp8WhenSupported) {
EXPECT_TRUE(
channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
+ ASSERT_TRUE(encoder_factory.WaitForCreatedVideoEncoders(1));
ASSERT_EQ(1u, encoder_factory.encoders().size());
EXPECT_TRUE(channel->SetSend(true));
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
+ channel->SetSource(kSsrc, &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(capturer.CaptureFrame());
+ // Sending one frame will have reallocated the encoder since input size
+ // changes from a small default to the actual frame width/height. Wait for
+ // that to happen then for the frame to be sent.
+ ASSERT_TRUE(encoder_factory.WaitForCreatedVideoEncoders(2));
EXPECT_TRUE_WAIT(encoder_factory.encoders()[0]->GetNumEncodedFrames() > 0,
kTimeout);
- // Sending one frame will have reallocated the encoder since input size
- // changes from a small default to the actual frame width/height.
int num_created_encoders = encoder_factory.GetNumCreatedEncoders();
EXPECT_EQ(num_created_encoders, 2);
@@ -411,6 +429,7 @@ TEST_F(WebRtcVideoEngine2Test, DisablesFullEncoderTimeForNonExternalEncoders) {
TestExtendedEncoderOveruse(false);
}
+#if !defined(RTC_DISABLE_VP9)
TEST_F(WebRtcVideoEngine2Test, CanConstructDecoderForVp9EncoderFactory) {
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecVP9, "VP9");
@@ -423,6 +442,7 @@ TEST_F(WebRtcVideoEngine2Test, CanConstructDecoderForVp9EncoderFactory) {
EXPECT_TRUE(
channel->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc)));
}
+#endif // !defined(RTC_DISABLE_VP9)
TEST_F(WebRtcVideoEngine2Test, PropagatesInputFrameTimestamp) {
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
@@ -439,7 +459,7 @@ TEST_F(WebRtcVideoEngine2Test, PropagatesInputFrameTimestamp) {
channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
FakeVideoCapturer capturer;
- EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer));
+ channel->SetSource(kSsrc, &capturer);
capturer.Start(cricket::VideoFormat(1280, 720,
cricket::VideoFormat::FpsToInterval(60),
cricket::FOURCC_I420));
@@ -501,7 +521,7 @@ TEST_F(WebRtcVideoEngine2Test,
FakeVideoSendStream* stream = fake_call->GetVideoSendStreams()[0];
FakeVideoCapturer capturer1;
- EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer1));
+ channel->SetSource(kSsrc, &capturer1);
cricket::CapturedFrame frame;
frame.width = 1280;
@@ -512,8 +532,8 @@ TEST_F(WebRtcVideoEngine2Test,
std::unique_ptr<char[]> data(new char[frame.data_size]);
frame.data = data.get();
memset(frame.data, 1, frame.data_size);
- const int kInitialTimestamp = 123456;
- frame.time_stamp = kInitialTimestamp;
+ int64_t initial_timestamp = rtc::TimeNanos();
+ frame.time_stamp = initial_timestamp;
// Deliver initial frame.
capturer1.SignalCapturedFrame(&frame);
@@ -526,12 +546,12 @@ TEST_F(WebRtcVideoEngine2Test,
// Reset input source, should still be continuous even though input-frame
// timestamp is less than before.
FakeVideoCapturer capturer2;
- EXPECT_TRUE(channel->SetCapturer(kSsrc, &capturer2));
+ channel->SetSource(kSsrc, &capturer2);
rtc::Thread::Current()->SleepMs(1);
// Deliver with a timestamp (10 seconds) before the previous initial one,
// these should not be related at all anymore and it should still work fine.
- frame.time_stamp = kInitialTimestamp - 10000;
+ frame.time_stamp = initial_timestamp - 10 * rtc::kNumNanosecsPerSec;
capturer2.SignalCapturedFrame(&frame);
// New timestamp should be at least 1ms in the future and not old.
@@ -586,17 +606,18 @@ TEST_F(WebRtcVideoEngine2Test, UsesSimulcastAdapterForVp8Factories) {
EXPECT_TRUE(channel->SetSend(true));
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel->SetCapturer(ssrcs.front(), &capturer));
+ channel->SetSource(ssrcs.front(), &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(capturer.CaptureFrame());
- EXPECT_GT(encoder_factory.encoders().size(), 1u);
+ ASSERT_TRUE(encoder_factory.WaitForCreatedVideoEncoders(2));
// Verify that encoders are configured for simulcast through adapter
// (increasing resolution and only configured to send one stream each).
int prev_width = -1;
for (size_t i = 0; i < encoder_factory.encoders().size(); ++i) {
+ ASSERT_TRUE(encoder_factory.encoders()[i]->WaitForInitEncode());
webrtc::VideoCodec codec_settings =
encoder_factory.encoders()[i]->GetCodecSettings();
EXPECT_EQ(0, codec_settings.numberOfSimulcastStreams);
@@ -604,7 +625,7 @@ TEST_F(WebRtcVideoEngine2Test, UsesSimulcastAdapterForVp8Factories) {
prev_width = codec_settings.width;
}
- EXPECT_TRUE(channel->SetCapturer(ssrcs.front(), NULL));
+ channel->SetSource(ssrcs.front(), NULL);
channel.reset();
ASSERT_EQ(0u, encoder_factory.encoders().size());
@@ -667,12 +688,13 @@ TEST_F(WebRtcVideoEngine2Test,
// encoder adapter at a low-enough size that it'll only create a single
// encoder layer.
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel->SetCapturer(ssrcs.front(), &capturer));
+ channel->SetSource(ssrcs.front(), &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(capturer.CaptureFrame());
- ASSERT_GT(encoder_factory.encoders().size(), 1u);
+ ASSERT_TRUE(encoder_factory.WaitForCreatedVideoEncoders(2));
+ ASSERT_TRUE(encoder_factory.encoders()[0]->WaitForInitEncode());
EXPECT_EQ(webrtc::kVideoCodecVP8,
encoder_factory.encoders()[0]->GetCodecSettings().codecType);
@@ -696,6 +718,7 @@ TEST_F(WebRtcVideoEngine2Test,
EXPECT_TRUE(
channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc)));
ASSERT_EQ(1u, encoder_factory.encoders().size());
+ ASSERT_TRUE(encoder_factory.encoders()[0]->WaitForInitEncode());
EXPECT_EQ(webrtc::kVideoCodecH264,
encoder_factory.encoders()[0]->GetCodecSettings().codecType);
@@ -721,15 +744,16 @@ TEST_F(WebRtcVideoEngine2Test, SimulcastDisabledForH264) {
cricket::VideoFormat format(
1280, 720, cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420);
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel->SetCapturer(ssrcs[0], &capturer));
+ channel->SetSource(ssrcs[0], &capturer);
EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(format));
EXPECT_TRUE(capturer.CaptureFrame());
ASSERT_EQ(1u, encoder_factory.encoders().size());
FakeWebRtcVideoEncoder* encoder = encoder_factory.encoders()[0];
+ ASSERT_TRUE(encoder_factory.encoders()[0]->WaitForInitEncode());
EXPECT_EQ(webrtc::kVideoCodecH264, encoder->GetCodecSettings().codecType);
EXPECT_EQ(1u, encoder->GetCodecSettings().numberOfSimulcastStreams);
- EXPECT_TRUE(channel->SetCapturer(ssrcs[0], nullptr));
+ channel->SetSource(ssrcs[0], nullptr);
}
// Test that external codecs are added to the end of the supported codec list.
@@ -849,24 +873,29 @@ WEBRTC_DISABLED_BASE_TEST(AdaptFramerate);
WEBRTC_BASE_TEST(SendsLowerResolutionOnSmallerFrames);
-WEBRTC_BASE_TEST(MuteStream);
-
WEBRTC_BASE_TEST(MultipleSendStreams);
TEST_F(WebRtcVideoChannel2BaseTest, SendAndReceiveVp8Vga) {
- SendAndReceive(cricket::VideoCodec(100, "VP8", 640, 400, 30, 0));
+ SendAndReceive(cricket::VideoCodec(100, "VP8", 640, 400, 30));
}
TEST_F(WebRtcVideoChannel2BaseTest, SendAndReceiveVp8Qvga) {
- SendAndReceive(cricket::VideoCodec(100, "VP8", 320, 200, 30, 0));
+ SendAndReceive(cricket::VideoCodec(100, "VP8", 320, 200, 30));
}
TEST_F(WebRtcVideoChannel2BaseTest, SendAndReceiveVp8SvcQqvga) {
- SendAndReceive(cricket::VideoCodec(100, "VP8", 160, 100, 30, 0));
+ SendAndReceive(cricket::VideoCodec(100, "VP8", 160, 100, 30));
}
TEST_F(WebRtcVideoChannel2BaseTest, TwoStreamsSendAndReceive) {
- Base::TwoStreamsSendAndReceive(kVp8Codec);
+ // Set a high bitrate to not be downscaled by VP8 due to low initial start
+ // bitrates. This currently happens at <250k, and two streams sharing 300k
+ // initially will use QVGA instead of VGA.
+ // TODO(pbos): Set up the quality scaler so that both senders reliably start
+ // at QVGA, then verify that instead.
+ cricket::VideoCodec codec = kVp8Codec;
+ codec.params[kCodecParamStartBitrate] = "1000000";
+ Base::TwoStreamsSendAndReceive(codec);
}
class WebRtcVideoChannel2Test : public WebRtcVideoEngine2Test {
@@ -1097,12 +1126,13 @@ class WebRtcVideoChannel2Test : public WebRtcVideoEngine2Test {
VideoSendParameters limited_send_params = send_parameters_;
limited_send_params.max_bandwidth_bps = global_max;
EXPECT_TRUE(channel_->SetSendParameters(limited_send_params));
- webrtc::RtpParameters parameters = channel_->GetRtpParameters(last_ssrc_);
+ webrtc::RtpParameters parameters =
+ channel_->GetRtpSendParameters(last_ssrc_);
EXPECT_EQ(1UL, parameters.encodings.size());
parameters.encodings[0].max_bitrate_bps = stream_max;
- EXPECT_TRUE(channel_->SetRtpParameters(last_ssrc_, parameters));
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
// Read back the parameteres and verify they have the correct value
- parameters = channel_->GetRtpParameters(last_ssrc_);
+ parameters = channel_->GetRtpSendParameters(last_ssrc_);
EXPECT_EQ(1UL, parameters.encodings.size());
EXPECT_EQ(stream_max, parameters.encodings[0].max_bitrate_bps);
// Verify that the new value propagated down to the encoder
@@ -1151,11 +1181,13 @@ TEST_F(WebRtcVideoChannel2Test, RecvStreamWithSimAndRtx) {
// Receiver side.
FakeVideoReceiveStream* recv_stream = AddRecvStream(
cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs));
- ASSERT_GT(recv_stream->GetConfig().rtp.rtx.size(), 0u)
- << "No SSRCs for RTX configured by AddRecvStream.";
- EXPECT_EQ(rtx_ssrcs[0],
- recv_stream->GetConfig().rtp.rtx.begin()->second.ssrc);
- // TODO(pbos): Make sure we set the RTX for correct payloads etc.
+ EXPECT_FALSE(recv_stream->GetConfig().rtp.rtx.empty());
+ EXPECT_EQ(recv_stream->GetConfig().decoders.size(),
+ recv_stream->GetConfig().rtp.rtx.size())
+ << "RTX should be mapped for all decoders/payload types.";
+ for (const auto& kv : recv_stream->GetConfig().rtp.rtx) {
+ EXPECT_EQ(rtx_ssrcs[0], kv.second.ssrc);
+ }
}
TEST_F(WebRtcVideoChannel2Test, RecvStreamWithRtx) {
@@ -1538,7 +1570,7 @@ TEST_F(WebRtcVideoChannel2Test, ReconfiguresEncodersWhenNotSending) {
EXPECT_EQ(144u, streams[0].height);
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(capturer.CaptureFrame());
@@ -1550,7 +1582,7 @@ TEST_F(WebRtcVideoChannel2Test, ReconfiguresEncodersWhenNotSending) {
// No frames should have been actually put in there though.
EXPECT_EQ(0, stream->GetNumberOfSwappedFrames());
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
@@ -1567,7 +1599,7 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
channel_->SetVideoSend(last_ssrc_, true, &min_bitrate_options);
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
cricket::VideoFormat capture_format_hd =
capturer.GetSupportedFormats()->front();
EXPECT_EQ(1280, capture_format_hd.width);
@@ -1591,10 +1623,10 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
EXPECT_EQ(0, encoder_config.min_transmit_bitrate_bps)
<< "Non-screenshare shouldn't use min-transmit bitrate.";
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, nullptr));
+ channel_->SetSource(last_ssrc_, nullptr);
// Removing a capturer triggers a black frame to be sent.
EXPECT_EQ(2, send_stream->GetNumberOfSwappedFrames());
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
VideoOptions screencast_options;
screencast_options.is_screencast = rtc::Optional<bool>(true);
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &screencast_options));
@@ -1614,7 +1646,7 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
EXPECT_EQ(capture_format_hd.height, encoder_config.streams.front().height);
EXPECT_TRUE(encoder_config.streams[0].temporal_layer_thresholds_bps.empty());
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
TEST_F(WebRtcVideoChannel2Test, NoRecreateStreamForScreencast) {
@@ -1624,7 +1656,7 @@ TEST_F(WebRtcVideoChannel2Test, NoRecreateStreamForScreencast) {
EXPECT_TRUE(channel_->SetSend(true));
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, &capturer));
+ channel_->SetSource(kSsrc, &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(capturer.CaptureFrame());
@@ -1665,7 +1697,7 @@ TEST_F(WebRtcVideoChannel2Test, NoRecreateStreamForScreencast) {
EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo,
encoder_config.content_type);
- EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
+ channel_->SetSource(kSsrc, NULL);
}
TEST_F(WebRtcVideoChannel2Test,
@@ -1680,7 +1712,7 @@ TEST_F(WebRtcVideoChannel2Test,
options.is_screencast = rtc::Optional<bool>(true);
channel_->SetVideoSend(last_ssrc_, true, &options);
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
cricket::VideoFormat capture_format_hd =
capturer.GetSupportedFormats()->front();
EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(capture_format_hd));
@@ -1702,7 +1734,7 @@ TEST_F(WebRtcVideoChannel2Test,
EXPECT_EQ(kConferenceScreencastTemporalBitrateBps,
encoder_config.streams[0].temporal_layer_thresholds_bps[0]);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
TEST_F(WebRtcVideoChannel2Test, SuspendBelowMinBitrateDisabledByDefault) {
@@ -1752,7 +1784,7 @@ TEST_F(WebRtcVideoChannel2Test, VerifyVp8SpecificSettings) {
cricket::FakeVideoCapturer capturer;
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
channel_->SetSend(true);
EXPECT_TRUE(capturer.CaptureFrame());
@@ -1776,9 +1808,9 @@ TEST_F(WebRtcVideoChannel2Test, VerifyVp8SpecificSettings) {
EXPECT_TRUE(vp8_settings.automaticResizeOn);
EXPECT_TRUE(vp8_settings.frameDroppingOn);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
stream = SetUpSimulcast(true, false);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
channel_->SetSend(true);
EXPECT_TRUE(capturer.CaptureFrame());
@@ -1809,7 +1841,7 @@ TEST_F(WebRtcVideoChannel2Test, VerifyVp8SpecificSettings) {
EXPECT_FALSE(vp8_settings.automaticResizeOn);
EXPECT_FALSE(vp8_settings.frameDroppingOn);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
// Test that setting the same options doesn't result in the encoder being
@@ -1819,7 +1851,7 @@ TEST_F(WebRtcVideoChannel2Test, SetIdenticalOptionsDoesntReconfigureEncoder) {
cricket::FakeVideoCapturer capturer;
FakeVideoSendStream* send_stream = AddSendStream();
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options));
@@ -1833,7 +1865,7 @@ TEST_F(WebRtcVideoChannel2Test, SetIdenticalOptionsDoesntReconfigureEncoder) {
EXPECT_TRUE(capturer.CaptureFrame());
EXPECT_EQ(2, send_stream->num_encoder_reconfigurations());
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, nullptr));
+ channel_->SetSource(last_ssrc_, nullptr);
}
class Vp9SettingsTest : public WebRtcVideoChannel2Test {
@@ -1871,7 +1903,7 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) {
cricket::FakeVideoCapturer capturer;
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
channel_->SetSend(true);
EXPECT_TRUE(capturer.CaptureFrame());
@@ -1912,7 +1944,7 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) {
EXPECT_FALSE(vp9_settings.denoisingOn);
EXPECT_FALSE(vp9_settings.frameDroppingOn);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
class Vp9SettingsTestWithFieldTrial : public Vp9SettingsTest {
@@ -1931,7 +1963,7 @@ class Vp9SettingsTestWithFieldTrial : public Vp9SettingsTest {
cricket::FakeVideoCapturer capturer;
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
channel_->SetSend(true);
EXPECT_TRUE(capturer.CaptureFrame());
@@ -1941,7 +1973,7 @@ class Vp9SettingsTestWithFieldTrial : public Vp9SettingsTest {
EXPECT_EQ(num_spatial_layers, vp9_settings.numberOfSpatialLayers);
EXPECT_EQ(num_temporal_layers, vp9_settings.numberOfTemporalLayers);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
};
@@ -2006,7 +2038,7 @@ TEST_F(WebRtcVideoChannel2Test, AdaptsOnOveruseAndChangeResolution) {
AddSendStream();
cricket::FakeVideoCapturer capturer;
- ASSERT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
ASSERT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
ASSERT_TRUE(channel_->SetSend(true));
@@ -2064,7 +2096,7 @@ TEST_F(WebRtcVideoChannel2Test, AdaptsOnOveruseAndChangeResolution) {
EXPECT_EQ(1284, send_stream->GetLastWidth());
EXPECT_EQ(724, send_stream->GetLastHeight());
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
TEST_F(WebRtcVideoChannel2Test, PreviousAdaptationDoesNotApplyToScreenshare) {
@@ -2080,7 +2112,7 @@ TEST_F(WebRtcVideoChannel2Test, PreviousAdaptationDoesNotApplyToScreenshare) {
AddSendStream();
cricket::FakeVideoCapturer capturer;
- ASSERT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
ASSERT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
ASSERT_TRUE(channel_->SetSend(true));
@@ -2109,7 +2141,7 @@ TEST_F(WebRtcVideoChannel2Test, PreviousAdaptationDoesNotApplyToScreenshare) {
cricket::FakeVideoCapturer screen_share(true);
ASSERT_EQ(cricket::CS_RUNNING,
screen_share.Start(screen_share.GetSupportedFormats()->front()));
- ASSERT_TRUE(channel_->SetCapturer(last_ssrc_, &screen_share));
+ channel_->SetSource(last_ssrc_, &screen_share);
cricket::VideoOptions screenshare_options;
screenshare_options.is_screencast = rtc::Optional<bool>(true);
channel_->SetVideoSend(last_ssrc_, true /* enable */, &screenshare_options);
@@ -2119,14 +2151,14 @@ TEST_F(WebRtcVideoChannel2Test, PreviousAdaptationDoesNotApplyToScreenshare) {
EXPECT_EQ(724, send_stream->GetLastHeight());
// Switch back to the normal capturer. Expect the frame to be CPU adapted.
- ASSERT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
channel_->SetVideoSend(last_ssrc_, true /* enable */, &camera_options);
EXPECT_TRUE(capturer.CaptureCustomFrame(1280, 720, cricket::FOURCC_I420));
EXPECT_EQ(4, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1280 * 3 / 4, send_stream->GetLastWidth());
EXPECT_EQ(720 * 3 / 4, send_stream->GetLastHeight());
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
@@ -2151,7 +2183,7 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, true, &options));
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
@@ -2172,7 +2204,7 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
EXPECT_EQ(codec.width, send_stream->GetLastWidth());
EXPECT_EQ(codec.height, send_stream->GetLastHeight());
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
return;
}
@@ -2201,7 +2233,7 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
EXPECT_EQ(codec.width, send_stream->GetLastWidth());
EXPECT_EQ(codec.height, send_stream->GetLastHeight());
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeCorrectly) {
@@ -2280,7 +2312,7 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithoutFec) {
TEST_F(WebRtcVideoChannel2Test,
SetSendCodecRejectsRtxWithoutAssociatedPayloadType) {
cricket::VideoSendParameters parameters;
- cricket::VideoCodec rtx_codec(96, "rtx", 0, 0, 0, 0);
+ cricket::VideoCodec rtx_codec(96, "rtx", 0, 0, 0);
parameters.codecs.push_back(rtx_codec);
EXPECT_FALSE(channel_->SetSendParameters(parameters))
<< "RTX codec without associated payload type should be rejected.";
@@ -2332,7 +2364,7 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsChangesExistingStreams) {
FakeVideoSendStream* stream = AddSendStream();
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(capturer.CaptureFrame());
@@ -2347,7 +2379,7 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsChangesExistingStreams) {
streams = fake_call_->GetVideoSendStreams()[0]->GetVideoStreams();
EXPECT_EQ(kVp8Codec360p.width, streams[0].width);
EXPECT_EQ(kVp8Codec360p.height, streams[0].height);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithBitrates) {
@@ -2378,6 +2410,43 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsRejectsMaxLessThanMinBitrate) {
EXPECT_FALSE(channel_->SetSendParameters(send_parameters_));
}
+// Test that when both the codec-specific bitrate params and max_bandwidth_bps
+// are present in the same send parameters, the settings are combined correctly.
+TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithBitratesAndMaxSendBandwidth) {
+ send_parameters_.codecs[0].params[kCodecParamMinBitrate] = "100";
+ send_parameters_.codecs[0].params[kCodecParamStartBitrate] = "200";
+ send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "300";
+ send_parameters_.max_bandwidth_bps = 400000;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ EXPECT_EQ(200000, fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ // We expect max_bandwidth_bps to take priority, if set.
+ EXPECT_EQ(400000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+
+ // Decrease max_bandwidth_bps.
+ send_parameters_.max_bandwidth_bps = 350000;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ // Since the codec isn't changing, start_bitrate_bps should be -1.
+ EXPECT_EQ(-1, fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ EXPECT_EQ(350000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+
+ // Now try again with the values flipped around.
+ send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "400";
+ send_parameters_.max_bandwidth_bps = 300000;
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ EXPECT_EQ(200000, fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ EXPECT_EQ(300000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+
+ // If we change the codec max, max_bandwidth_bps should still apply.
+ send_parameters_.codecs[0].params[kCodecParamMaxBitrate] = "350";
+ EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
+ EXPECT_EQ(100000, fake_call_->GetConfig().bitrate_config.min_bitrate_bps);
+ EXPECT_EQ(200000, fake_call_->GetConfig().bitrate_config.start_bitrate_bps);
+ EXPECT_EQ(300000, fake_call_->GetConfig().bitrate_config.max_bitrate_bps);
+}
+
TEST_F(WebRtcVideoChannel2Test,
SetMaxSendBandwidthShouldPreserveOtherBitrates) {
SetSendCodecsShouldWorkForBitrates("100", 100000, "150", 150000, "200",
@@ -2411,7 +2480,7 @@ TEST_F(WebRtcVideoChannel2Test, SetMaxSendBitrateCanIncreaseSenderBitrate) {
FakeVideoSendStream* stream = AddSendStream();
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
@@ -2425,7 +2494,7 @@ TEST_F(WebRtcVideoChannel2Test, SetMaxSendBitrateCanIncreaseSenderBitrate) {
EXPECT_TRUE(capturer.CaptureFrame());
streams = stream->GetVideoStreams();
EXPECT_EQ(initial_max_bitrate_bps * 2, streams[0].max_bitrate_bps);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, nullptr));
+ channel_->SetSource(last_ssrc_, nullptr);
}
TEST_F(WebRtcVideoChannel2Test,
@@ -2440,7 +2509,7 @@ TEST_F(WebRtcVideoChannel2Test,
// Send a frame to make sure this scales up to >1 stream (simulcast).
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], &capturer));
+ channel_->SetSource(kSsrcs3[0], &capturer);
EXPECT_EQ(cricket::CS_RUNNING,
capturer.Start(capturer.GetSupportedFormats()->front()));
EXPECT_TRUE(capturer.CaptureFrame());
@@ -2459,7 +2528,7 @@ TEST_F(WebRtcVideoChannel2Test,
int increased_max_bitrate_bps = GetTotalMaxBitrateBps(streams);
EXPECT_EQ(initial_max_bitrate_bps * 2, increased_max_bitrate_bps);
- EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], nullptr));
+ channel_->SetSource(kSsrcs3[0], nullptr);
}
TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithMaxQuantization) {
@@ -2512,6 +2581,26 @@ TEST_F(WebRtcVideoChannel2Test, SetSendCodecsAcceptAllValidPayloadTypes) {
}
}
+// Test that setting the a different set of codecs but with an identical front
+// codec doesn't result in the stream being recreated.
+// This may happen when a subsequent negotiation includes fewer codecs, as a
+// result of one of the codecs being rejected.
+TEST_F(WebRtcVideoChannel2Test,
+ SetSendCodecsIdenticalFirstCodecDoesntRecreateStream) {
+ cricket::VideoSendParameters parameters1;
+ parameters1.codecs.push_back(kVp8Codec);
+ parameters1.codecs.push_back(kVp9Codec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters1));
+
+ AddSendStream();
+ EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
+
+ cricket::VideoSendParameters parameters2;
+ parameters2.codecs.push_back(kVp8Codec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters2));
+ EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams());
+}
+
TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsWithOnlyVp8) {
cricket::VideoRecvParameters parameters;
parameters.codecs.push_back(kVp8Codec);
@@ -2522,7 +2611,7 @@ TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsWithOnlyVp8) {
TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsWithRtx) {
cricket::VideoRecvParameters parameters;
parameters.codecs.push_back(kVp8Codec);
- cricket::VideoCodec rtx_codec(96, "rtx", 0, 0, 0, 0);
+ cricket::VideoCodec rtx_codec(96, "rtx", 0, 0, 0);
parameters.codecs.push_back(rtx_codec);
EXPECT_FALSE(channel_->SetRecvParameters(parameters))
<< "RTX codec without associated payload should be rejected.";
@@ -2534,7 +2623,7 @@ TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsWithRtx) {
parameters.codecs[1].SetParam("apt", kVp8Codec.id);
EXPECT_TRUE(channel_->SetRecvParameters(parameters));
- cricket::VideoCodec rtx_codec2(97, "rtx", 0, 0, 0, 0);
+ cricket::VideoCodec rtx_codec2(97, "rtx", 0, 0, 0);
rtx_codec2.SetParam("apt", rtx_codec.id);
parameters.codecs.push_back(rtx_codec2);
@@ -2564,7 +2653,7 @@ TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsAcceptDefaultCodecs) {
TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsRejectUnsupportedCodec) {
cricket::VideoRecvParameters parameters;
parameters.codecs.push_back(kVp8Codec);
- parameters.codecs.push_back(VideoCodec(101, "WTF3", 640, 400, 30, 0));
+ parameters.codecs.push_back(VideoCodec(101, "WTF3", 640, 400, 30));
EXPECT_FALSE(channel_->SetRecvParameters(parameters));
}
@@ -2589,6 +2678,7 @@ TEST_F(WebRtcVideoChannel2Test,
TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsWithoutFecDisablesFec) {
cricket::VideoSendParameters send_parameters;
send_parameters.codecs.push_back(kVp8Codec);
+ send_parameters.codecs.push_back(kRedCodec);
send_parameters.codecs.push_back(kUlpfecCodec);
ASSERT_TRUE(channel_->SetSendParameters(send_parameters));
@@ -2607,6 +2697,41 @@ TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsWithoutFecDisablesFec) {
<< "SetSendCodec without FEC should disable current FEC.";
}
+TEST_F(WebRtcVideoChannel2Test, SetSendParamsWithoutFecDisablesReceivingFec) {
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ webrtc::VideoReceiveStream::Config config = stream->GetConfig();
+
+ EXPECT_EQ(kUlpfecCodec.id, config.rtp.fec.ulpfec_payload_type);
+
+ cricket::VideoRecvParameters recv_parameters;
+ recv_parameters.codecs.push_back(kVp8Codec);
+ recv_parameters.codecs.push_back(kRedCodec);
+ recv_parameters.codecs.push_back(kUlpfecCodec);
+ ASSERT_TRUE(channel_->SetRecvParameters(recv_parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ ASSERT_TRUE(stream != NULL);
+ config = stream->GetConfig();
+ EXPECT_EQ(kUlpfecCodec.id, config.rtp.fec.ulpfec_payload_type)
+ << "FEC should be enabled on the recieve stream.";
+
+ cricket::VideoSendParameters send_parameters;
+ send_parameters.codecs.push_back(kVp8Codec);
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ config = stream->GetConfig();
+ EXPECT_EQ(-1, config.rtp.fec.ulpfec_payload_type)
+ << "FEC should have been disabled when we know the other side won't do "
+ "FEC.";
+
+ send_parameters.codecs.push_back(kRedCodec);
+ send_parameters.codecs.push_back(kUlpfecCodec);
+ ASSERT_TRUE(channel_->SetSendParameters(send_parameters));
+ stream = fake_call_->GetVideoReceiveStreams()[0];
+ config = stream->GetConfig();
+ EXPECT_EQ(kUlpfecCodec.id, config.rtp.fec.ulpfec_payload_type)
+ << "FEC should be enabled on the recieve stream.";
+}
+
TEST_F(WebRtcVideoChannel2Test, SetSendCodecsRejectDuplicateFecPayloads) {
cricket::VideoRecvParameters parameters;
parameters.codecs.push_back(kVp8Codec);
@@ -2632,10 +2757,10 @@ TEST_F(WebRtcVideoChannel2Test,
EXPECT_TRUE(channel_->SetRecvParameters(parameters));
}
-// Test that setting the same codecs but with a different order and preference
+// Test that setting the same codecs but with a different order
// doesn't result in the stream being recreated.
TEST_F(WebRtcVideoChannel2Test,
- SetRecvCodecsDifferentOrderAndPreferenceDoesntRecreateStream) {
+ SetRecvCodecsDifferentOrderDoesntRecreateStream) {
cricket::VideoRecvParameters parameters1;
parameters1.codecs.push_back(kVp8Codec);
parameters1.codecs.push_back(kRedCodec);
@@ -2647,7 +2772,6 @@ TEST_F(WebRtcVideoChannel2Test,
cricket::VideoRecvParameters parameters2;
parameters2.codecs.push_back(kRedCodec);
parameters2.codecs.push_back(kVp8Codec);
- parameters2.codecs[1].preference += 1;
EXPECT_TRUE(channel_->SetRecvParameters(parameters2));
EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
}
@@ -2829,10 +2953,10 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationStats) {
video_capturer_vga.GetSupportedFormats();
cricket::VideoFormat capture_format_vga = (*formats)[1];
EXPECT_EQ(cricket::CS_RUNNING, video_capturer_vga.Start(capture_format_vga));
- EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], &video_capturer_vga));
+ channel_->SetSource(kSsrcs3[0], &video_capturer_vga);
EXPECT_TRUE(video_capturer_vga.CaptureFrame());
- cricket::VideoCodec send_codec(100, "VP8", 640, 480, 30, 0);
+ cricket::VideoCodec send_codec(100, "VP8", 640, 480, 30);
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(send_codec);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
@@ -2866,7 +2990,7 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationStats) {
info.senders[0].adapt_reason);
// No capturer (no adapter). Adapt changes from old adapter should be kept.
- EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], NULL));
+ channel_->SetSource(kSsrcs3[0], NULL);
info.Clear();
EXPECT_TRUE(channel_->GetStats(&info));
ASSERT_EQ(1U, info.senders.size());
@@ -2878,7 +3002,7 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationStats) {
cricket::FakeVideoCapturer video_capturer_hd;
cricket::VideoFormat capture_format_hd = (*formats)[0];
EXPECT_EQ(cricket::CS_RUNNING, video_capturer_hd.Start(capture_format_hd));
- EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], &video_capturer_hd));
+ channel_->SetSource(kSsrcs3[0], &video_capturer_hd);
EXPECT_TRUE(video_capturer_hd.CaptureFrame());
// Trigger overuse, HD -> adapt (OnCpuResolutionRequest downgrade) -> HD/2.
@@ -2890,7 +3014,7 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationStats) {
EXPECT_EQ(3, info.senders[0].adapt_changes);
EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_CPU, info.senders[0].adapt_reason);
- EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], NULL));
+ channel_->SetSource(kSsrcs3[0], NULL);
}
TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationAndBandwidthStats) {
@@ -2902,10 +3026,10 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationAndBandwidthStats) {
video_capturer_vga.GetSupportedFormats();
cricket::VideoFormat capture_format_vga = (*formats)[1];
EXPECT_EQ(cricket::CS_RUNNING, video_capturer_vga.Start(capture_format_vga));
- EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], &video_capturer_vga));
+ channel_->SetSource(kSsrcs3[0], &video_capturer_vga);
EXPECT_TRUE(video_capturer_vga.CaptureFrame());
- cricket::VideoCodec send_codec(100, "VP8", 640, 480, 30, 0);
+ cricket::VideoCodec send_codec(100, "VP8", 640, 480, 30);
cricket::VideoSendParameters parameters;
parameters.codecs.push_back(send_codec);
EXPECT_TRUE(channel_->SetSendParameters(parameters));
@@ -2954,7 +3078,7 @@ TEST_F(WebRtcVideoChannel2Test, GetStatsTracksAdaptationAndBandwidthStats) {
EXPECT_EQ(WebRtcVideoChannel2::ADAPTREASON_NONE,
info.senders[0].adapt_reason);
- EXPECT_TRUE(channel_->SetCapturer(kSsrcs3[0], NULL));
+ channel_->SetSource(kSsrcs3[0], NULL);
}
TEST_F(WebRtcVideoChannel2Test,
@@ -3131,7 +3255,7 @@ TEST_F(WebRtcVideoChannel2Test, DefaultReceiveStreamReconfiguresToUseRtx) {
ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size())
<< "No default receive stream created.";
FakeVideoReceiveStream* recv_stream = fake_call_->GetVideoReceiveStreams()[0];
- EXPECT_EQ(0u, recv_stream->GetConfig().rtp.rtx.size())
+ EXPECT_TRUE(recv_stream->GetConfig().rtp.rtx.empty())
<< "Default receive stream should not have configured RTX";
EXPECT_TRUE(channel_->AddRecvStream(
@@ -3139,9 +3263,13 @@ TEST_F(WebRtcVideoChannel2Test, DefaultReceiveStreamReconfiguresToUseRtx) {
ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size())
<< "AddRecvStream should've reconfigured, not added a new receiver.";
recv_stream = fake_call_->GetVideoReceiveStreams()[0];
- ASSERT_GE(2u, recv_stream->GetConfig().rtp.rtx.size());
- EXPECT_EQ(rtx_ssrcs[0],
- recv_stream->GetConfig().rtp.rtx.begin()->second.ssrc);
+ EXPECT_FALSE(recv_stream->GetConfig().rtp.rtx.empty());
+ EXPECT_EQ(recv_stream->GetConfig().decoders.size(),
+ recv_stream->GetConfig().rtp.rtx.size())
+ << "RTX should be mapped for all decoders/payload types.";
+ for (const auto& kv : recv_stream->GetConfig().rtp.rtx) {
+ EXPECT_EQ(rtx_ssrcs[0], kv.second.ssrc);
+ }
}
TEST_F(WebRtcVideoChannel2Test, RejectsAddingStreamsWithMissingSsrcsForRtx) {
@@ -3316,7 +3444,7 @@ TEST_F(WebRtcVideoChannel2Test, CanSentMaxBitrateForExistingStream) {
AddSendStream();
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, &capturer));
+ channel_->SetSource(last_ssrc_, &capturer);
cricket::VideoFormat capture_format_hd =
capturer.GetSupportedFormats()->front();
EXPECT_EQ(1280, capture_format_hd.width);
@@ -3341,20 +3469,21 @@ TEST_F(WebRtcVideoChannel2Test, CanSentMaxBitrateForExistingStream) {
SetAndExpectMaxBitrate(capturer, 0, 800, 800);
SetAndExpectMaxBitrate(capturer, 0, 0, default_encoder_bitrate);
- EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
+ channel_->SetSource(last_ssrc_, NULL);
}
TEST_F(WebRtcVideoChannel2Test, CannotSetMaxBitrateForNonexistentStream) {
webrtc::RtpParameters nonexistent_parameters =
- channel_->GetRtpParameters(last_ssrc_);
+ channel_->GetRtpSendParameters(last_ssrc_);
EXPECT_EQ(0, nonexistent_parameters.encodings.size());
nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters());
- EXPECT_FALSE(channel_->SetRtpParameters(last_ssrc_, nonexistent_parameters));
+ EXPECT_FALSE(
+ channel_->SetRtpSendParameters(last_ssrc_, nonexistent_parameters));
}
TEST_F(WebRtcVideoChannel2Test,
- CannotSetRtpParametersWithIncorrectNumberOfEncodings) {
+ CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) {
// This test verifies that setting RtpParameters succeeds only if
// the structure contains exactly one encoding.
// TODO(skvlad): Update this test when we start supporting setting parameters
@@ -3362,39 +3491,107 @@ TEST_F(WebRtcVideoChannel2Test,
AddSendStream();
// Setting RtpParameters with no encoding is expected to fail.
- webrtc::RtpParameters parameters;
- EXPECT_FALSE(channel_->SetRtpParameters(last_ssrc_, parameters));
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
+ parameters.encodings.clear();
+ EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
// Setting RtpParameters with exactly one encoding should succeed.
parameters.encodings.push_back(webrtc::RtpEncodingParameters());
- EXPECT_TRUE(channel_->SetRtpParameters(last_ssrc_, parameters));
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
// Two or more encodings should result in failure.
parameters.encodings.push_back(webrtc::RtpEncodingParameters());
- EXPECT_FALSE(channel_->SetRtpParameters(last_ssrc_, parameters));
+ EXPECT_FALSE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
}
// Test that a stream will not be sending if its encoding is made
-// inactive through SetRtpParameters.
+// inactive through SetRtpSendParameters.
// TODO(deadbeef): Update this test when we start supporting setting parameters
// for each encoding individually.
-TEST_F(WebRtcVideoChannel2Test, SetRtpParametersEncodingsActive) {
+TEST_F(WebRtcVideoChannel2Test, SetRtpSendParametersEncodingsActive) {
FakeVideoSendStream* stream = AddSendStream();
EXPECT_TRUE(channel_->SetSend(true));
EXPECT_TRUE(stream->IsSending());
// Get current parameters and change "active" to false.
- webrtc::RtpParameters parameters = channel_->GetRtpParameters(last_ssrc_);
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(last_ssrc_);
ASSERT_EQ(1u, parameters.encodings.size());
ASSERT_TRUE(parameters.encodings[0].active);
parameters.encodings[0].active = false;
- EXPECT_TRUE(channel_->SetRtpParameters(last_ssrc_, parameters));
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
EXPECT_FALSE(stream->IsSending());
// Now change it back to active and verify we resume sending.
parameters.encodings[0].active = true;
- EXPECT_TRUE(channel_->SetRtpParameters(last_ssrc_, parameters));
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, parameters));
EXPECT_TRUE(stream->IsSending());
}
+// Test that GetRtpSendParameters returns the currently configured codecs.
+TEST_F(WebRtcVideoChannel2Test, GetRtpSendParametersCodecs) {
+ AddSendStream();
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(kVp8Codec);
+ parameters.codecs.push_back(kVp9Codec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpSendParameters(last_ssrc_);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(kVp8Codec.ToCodecParameters(), rtp_parameters.codecs[0]);
+ EXPECT_EQ(kVp9Codec.ToCodecParameters(), rtp_parameters.codecs[1]);
+}
+
+// Test that if we set/get parameters multiple times, we get the same results.
+TEST_F(WebRtcVideoChannel2Test, SetAndGetRtpSendParameters) {
+ AddSendStream();
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(kVp8Codec);
+ parameters.codecs.push_back(kVp9Codec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ webrtc::RtpParameters initial_params =
+ channel_->GetRtpSendParameters(last_ssrc_);
+
+ // We should be able to set the params we just got.
+ EXPECT_TRUE(channel_->SetRtpSendParameters(last_ssrc_, initial_params));
+
+ // ... And this shouldn't change the params returned by GetRtpSendParameters.
+ EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(last_ssrc_));
+}
+
+// Test that GetRtpReceiveParameters returns the currently configured codecs.
+TEST_F(WebRtcVideoChannel2Test, GetRtpReceiveParametersCodecs) {
+ AddRecvStream();
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(kVp8Codec);
+ parameters.codecs.push_back(kVp9Codec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpReceiveParameters(last_ssrc_);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(kVp8Codec.ToCodecParameters(), rtp_parameters.codecs[0]);
+ EXPECT_EQ(kVp9Codec.ToCodecParameters(), rtp_parameters.codecs[1]);
+}
+
+// Test that if we set/get parameters multiple times, we get the same results.
+TEST_F(WebRtcVideoChannel2Test, SetAndGetRtpReceiveParameters) {
+ AddRecvStream();
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs.push_back(kVp8Codec);
+ parameters.codecs.push_back(kVp9Codec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ webrtc::RtpParameters initial_params =
+ channel_->GetRtpReceiveParameters(last_ssrc_);
+
+ // We should be able to set the params we just got.
+ EXPECT_TRUE(channel_->SetRtpReceiveParameters(last_ssrc_, initial_params));
+
+ // ... And this shouldn't change the params returned by
+ // GetRtpReceiveParameters.
+ EXPECT_EQ(initial_params, channel_->GetRtpReceiveParameters(last_ssrc_));
+}
+
void WebRtcVideoChannel2Test::TestReceiverLocalSsrcConfiguration(
bool receiver_first) {
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
@@ -3475,7 +3672,7 @@ class WebRtcVideoChannel2SimulcastTest : public testing::Test {
// Send a full-size frame to trigger a stream reconfiguration to use all
// expected simulcast layers.
cricket::FakeVideoCapturer capturer;
- EXPECT_TRUE(channel_->SetCapturer(ssrcs.front(), &capturer));
+ channel_->SetSource(ssrcs.front(), &capturer);
EXPECT_EQ(cricket::CS_RUNNING, capturer.Start(cricket::VideoFormat(
codec.width, codec.height,
cricket::VideoFormat::FpsToInterval(30),
@@ -3532,7 +3729,7 @@ class WebRtcVideoChannel2SimulcastTest : public testing::Test {
ASSERT_EQ(1u, info.senders.size());
EXPECT_EQ(total_max_bitrate_bps, info.senders[0].preferred_bitrate);
- EXPECT_TRUE(channel_->SetCapturer(ssrcs.front(), NULL));
+ channel_->SetSource(ssrcs.front(), NULL);
}
FakeVideoSendStream* AddSendStream() {
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideoframe.cc b/chromium/third_party/webrtc/media/engine/webrtcvideoframe.cc
index 3a2d2def0e3..cda00275f70 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideoframe.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideoframe.cc
@@ -22,18 +22,24 @@ using webrtc::kVPlane;
namespace cricket {
-WebRtcVideoFrame::WebRtcVideoFrame():
- time_stamp_ns_(0),
- rotation_(webrtc::kVideoRotation_0) {}
+WebRtcVideoFrame::WebRtcVideoFrame()
+ : timestamp_us_(0), rotation_(webrtc::kVideoRotation_0) {}
+
+WebRtcVideoFrame::WebRtcVideoFrame(
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+ webrtc::VideoRotation rotation,
+ int64_t timestamp_us)
+ : video_frame_buffer_(buffer),
+ timestamp_us_(timestamp_us),
+ rotation_(rotation) {}
WebRtcVideoFrame::WebRtcVideoFrame(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation)
- : video_frame_buffer_(buffer),
- time_stamp_ns_(time_stamp_ns),
- rotation_(rotation) {
-}
+ : WebRtcVideoFrame(buffer,
+ rotation,
+ time_stamp_ns / rtc::kNumNanosecsPerMicrosec) {}
WebRtcVideoFrame::~WebRtcVideoFrame() {}
@@ -47,7 +53,7 @@ bool WebRtcVideoFrame::Init(uint32_t format,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation) {
return Reset(format, w, h, dw, dh, sample, sample_size,
- time_stamp_ns, rotation,
+ time_stamp_ns / rtc::kNumNanosecsPerMicrosec, rotation,
true /*apply_rotation*/);
}
@@ -55,7 +61,7 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh,
bool apply_rotation) {
return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
static_cast<uint8_t*>(frame->data), frame->data_size,
- frame->time_stamp,
+ frame->time_stamp / rtc::kNumNanosecsPerMicrosec,
frame->rotation, apply_rotation);
}
@@ -73,62 +79,21 @@ int WebRtcVideoFrame::height() const {
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
}
-const uint8_t* WebRtcVideoFrame::GetYPlane() const {
- return video_frame_buffer_ ? video_frame_buffer_->data(kYPlane) : nullptr;
-}
-
-const uint8_t* WebRtcVideoFrame::GetUPlane() const {
- return video_frame_buffer_ ? video_frame_buffer_->data(kUPlane) : nullptr;
-}
-
-const uint8_t* WebRtcVideoFrame::GetVPlane() const {
- return video_frame_buffer_ ? video_frame_buffer_->data(kVPlane) : nullptr;
-}
-
-uint8_t* WebRtcVideoFrame::GetYPlane() {
- return video_frame_buffer_ ? video_frame_buffer_->MutableData(kYPlane)
- : nullptr;
-}
-
-uint8_t* WebRtcVideoFrame::GetUPlane() {
- return video_frame_buffer_ ? video_frame_buffer_->MutableData(kUPlane)
- : nullptr;
-}
-
-uint8_t* WebRtcVideoFrame::GetVPlane() {
- return video_frame_buffer_ ? video_frame_buffer_->MutableData(kVPlane)
- : nullptr;
-}
-
-int32_t WebRtcVideoFrame::GetYPitch() const {
- return video_frame_buffer_ ? video_frame_buffer_->stride(kYPlane) : 0;
-}
-
-int32_t WebRtcVideoFrame::GetUPitch() const {
- return video_frame_buffer_ ? video_frame_buffer_->stride(kUPlane) : 0;
-}
-
-int32_t WebRtcVideoFrame::GetVPitch() const {
- return video_frame_buffer_ ? video_frame_buffer_->stride(kVPlane) : 0;
-}
-
bool WebRtcVideoFrame::IsExclusive() const {
- return video_frame_buffer_->HasOneRef();
+ return video_frame_buffer_->IsMutable();
}
void* WebRtcVideoFrame::GetNativeHandle() const {
return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr;
}
-rtc::scoped_refptr<webrtc::VideoFrameBuffer>
-WebRtcVideoFrame::GetVideoFrameBuffer() const {
+const rtc::scoped_refptr<webrtc::VideoFrameBuffer>&
+WebRtcVideoFrame::video_frame_buffer() const {
return video_frame_buffer_;
}
VideoFrame* WebRtcVideoFrame::Copy() const {
- WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
- video_frame_buffer_, time_stamp_ns_, rotation_);
- return new_frame;
+ return new WebRtcVideoFrame(video_frame_buffer_, rotation_, timestamp_us_);
}
size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
@@ -147,7 +112,7 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
int dh,
uint8_t* sample,
size_t sample_size,
- int64_t time_stamp_ns,
+ int64_t timestamp_us,
webrtc::VideoRotation rotation,
bool apply_rotation) {
if (!Validate(format, w, h, sample, sample_size)) {
@@ -166,8 +131,7 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
new_height = dw;
}
- InitToEmptyBuffer(new_width, new_height,
- time_stamp_ns);
+ InitToEmptyBuffer(new_width, new_height);
rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
int horiz_crop = ((w - dw) / 2) & ~1;
@@ -178,9 +142,12 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
int idh = (h < 0) ? -dh : dh;
int r = libyuv::ConvertToI420(
sample, sample_size,
- GetYPlane(), GetYPitch(),
- GetUPlane(), GetUPitch(),
- GetVPlane(), GetVPitch(),
+ video_frame_buffer_->MutableDataY(),
+ video_frame_buffer_->StrideY(),
+ video_frame_buffer_->MutableDataU(),
+ video_frame_buffer_->StrideU(),
+ video_frame_buffer_->MutableDataV(),
+ video_frame_buffer_->StrideV(),
horiz_crop, vert_crop,
w, h,
dw, idh,
@@ -192,28 +159,34 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
<< " return code : " << r;
return false;
}
+ timestamp_us_ = timestamp_us;
return true;
}
-VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
- int w, int h,
- int64_t time_stamp_ns) const {
+VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(int w,
+ int h,
+ int64_t timestamp_us) const {
WebRtcVideoFrame* frame = new WebRtcVideoFrame();
- frame->InitToEmptyBuffer(w, h, time_stamp_ns);
+ frame->InitToEmptyBuffer(w, h, rtc::kNumNanosecsPerMicrosec * timestamp_us);
return frame;
}
+void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) {
+ video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
+ rotation_ = webrtc::kVideoRotation_0;
+}
+
void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h,
int64_t time_stamp_ns) {
video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
- time_stamp_ns_ = time_stamp_ns;
+ SetTimeStamp(time_stamp_ns);
rotation_ = webrtc::kVideoRotation_0;
}
const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const {
// If the frame is not rotated, the caller should reuse this frame instead of
// making a redundant copy.
- if (GetVideoRotation() == webrtc::kVideoRotation_0) {
+ if (rotation() == webrtc::kVideoRotation_0) {
return this;
}
@@ -231,24 +204,29 @@ const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const {
int rotated_width = orig_width;
int rotated_height = orig_height;
- if (GetVideoRotation() == webrtc::kVideoRotation_90 ||
- GetVideoRotation() == webrtc::kVideoRotation_270) {
+ if (rotation() == webrtc::kVideoRotation_90 ||
+ rotation() == webrtc::kVideoRotation_270) {
rotated_width = orig_height;
rotated_height = orig_width;
}
- rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height,
- GetTimeStamp()));
+ rotated_frame_.reset(
+ CreateEmptyFrame(rotated_width, rotated_height, timestamp_us_));
// TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from
// VideoRotation to libyuv::RotationMode.
int ret = libyuv::I420Rotate(
- GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(),
- GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(),
- rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(),
- rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(),
+ video_frame_buffer_->DataY(), video_frame_buffer_->StrideY(),
+ video_frame_buffer_->DataU(), video_frame_buffer_->StrideU(),
+ video_frame_buffer_->DataV(), video_frame_buffer_->StrideV(),
+ rotated_frame_->video_frame_buffer()->MutableDataY(),
+ rotated_frame_->video_frame_buffer()->StrideY(),
+ rotated_frame_->video_frame_buffer()->MutableDataU(),
+ rotated_frame_->video_frame_buffer()->StrideU(),
+ rotated_frame_->video_frame_buffer()->MutableDataV(),
+ rotated_frame_->video_frame_buffer()->StrideV(),
orig_width, orig_height,
- static_cast<libyuv::RotationMode>(GetVideoRotation()));
+ static_cast<libyuv::RotationMode>(rotation()));
if (ret == 0) {
return rotated_frame_.get();
}
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideoframe.h b/chromium/third_party/webrtc/media/engine/webrtcvideoframe.h
index cb280b5e153..ee34c414a47 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideoframe.h
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideoframe.h
@@ -27,6 +27,13 @@ struct CapturedFrame;
class WebRtcVideoFrame : public VideoFrame {
public:
WebRtcVideoFrame();
+
+ // Preferred construction, with microsecond timestamp.
+ WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
+ webrtc::VideoRotation rotation,
+ int64_t timestamp_us);
+
+ // TODO(nisse): Deprecate/delete.
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t time_stamp_ns,
webrtc::VideoRotation rotation);
@@ -47,8 +54,13 @@ class WebRtcVideoFrame : public VideoFrame {
int64_t time_stamp_ns,
webrtc::VideoRotation rotation);
+ // The timestamp of the captured frame is expected to use the same
+ // timescale and epoch as rtc::Time.
+ // TODO(nisse): Consider adding a warning message, or even an RTC_DCHECK, if
+ // the time is too far off.
bool Init(const CapturedFrame* frame, int dw, int dh, bool apply_rotation);
+ void InitToEmptyBuffer(int w, int h);
void InitToEmptyBuffer(int w, int h, int64_t time_stamp_ns);
bool InitToBlack(int w, int h, int64_t time_stamp_ns);
@@ -56,27 +68,15 @@ class WebRtcVideoFrame : public VideoFrame {
int width() const override;
int height() const override;
- const uint8_t* GetYPlane() const override;
- const uint8_t* GetUPlane() const override;
- const uint8_t* GetVPlane() const override;
- uint8_t* GetYPlane() override;
- uint8_t* GetUPlane() override;
- uint8_t* GetVPlane() override;
- int32_t GetYPitch() const override;
- int32_t GetUPitch() const override;
- int32_t GetVPitch() const override;
void* GetNativeHandle() const override;
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> GetVideoFrameBuffer()
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer()
const override;
- int64_t GetTimeStamp() const override { return time_stamp_ns_; }
- void SetTimeStamp(int64_t time_stamp_ns) override {
- time_stamp_ns_ = time_stamp_ns;
- }
+ /* System monotonic clock */
+ int64_t timestamp_us() const override { return timestamp_us_; }
+ void set_timestamp_us(int64_t time_us) override { timestamp_us_ = time_us; };
- webrtc::VideoRotation GetVideoRotation() const override {
- return rotation_;
- }
+ webrtc::VideoRotation rotation() const override { return rotation_; }
VideoFrame* Copy() const override;
bool IsExclusive() const override;
@@ -88,7 +88,7 @@ class WebRtcVideoFrame : public VideoFrame {
const VideoFrame* GetCopyWithRotationApplied() const override;
protected:
- void SetRotation(webrtc::VideoRotation rotation) override {
+ void set_rotation(webrtc::VideoRotation rotation) override {
rotation_ = rotation;
}
// Creates a frame from a raw sample with FourCC |format| and size |w| x |h|.
@@ -97,15 +97,15 @@ class WebRtcVideoFrame : public VideoFrame {
// |dh| is destination height, like |dw|, but must be a positive number.
// Returns whether the function succeeded or failed.
bool Reset(uint32_t format,
- int w,
- int h,
- int dw,
- int dh,
- uint8_t* sample,
- size_t sample_size,
- int64_t time_stamp_ns,
- webrtc::VideoRotation rotation,
- bool apply_rotation);
+ int w,
+ int h,
+ int dw,
+ int dh,
+ uint8_t* sample,
+ size_t sample_size,
+ int64_t timestamp_us,
+ webrtc::VideoRotation rotation,
+ bool apply_rotation);
private:
VideoFrame* CreateEmptyFrame(int w, int h,
@@ -113,7 +113,7 @@ class WebRtcVideoFrame : public VideoFrame {
// An opaque reference counted handle that stores the pixel data.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
- int64_t time_stamp_ns_;
+ int64_t timestamp_us_;
webrtc::VideoRotation rotation_;
// This is mutable as the calculation is expensive but once calculated, it
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideoframe_unittest.cc b/chromium/third_party/webrtc/media/engine/webrtcvideoframe_unittest.cc
index 4b0e988d288..7849de61b0a 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideoframe_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideoframe_unittest.cc
@@ -20,7 +20,8 @@ namespace {
class WebRtcVideoTestFrame : public cricket::WebRtcVideoFrame {
public:
- using cricket::WebRtcVideoFrame::SetRotation;
+ // The ApplyRotationToFrame test needs this as a public method.
+ using cricket::WebRtcVideoFrame::set_rotation;
virtual VideoFrame* CreateEmptyFrame(int w,
int h,
@@ -47,7 +48,7 @@ class WebRtcVideoFrameTest : public VideoFrameTest<cricket::WebRtcVideoFrame> {
// Build the CapturedFrame.
cricket::CapturedFrame captured_frame;
captured_frame.fourcc = cricket::FOURCC_I420;
- captured_frame.time_stamp = 5678;
+ captured_frame.time_stamp = rtc::TimeNanos();
captured_frame.rotation = frame_rotation;
captured_frame.width = frame_width;
captured_frame.height = frame_height;
@@ -66,11 +67,12 @@ class WebRtcVideoFrameTest : public VideoFrameTest<cricket::WebRtcVideoFrame> {
apply_rotation));
// Verify the new frame.
- EXPECT_EQ(5678, frame.GetTimeStamp());
+ EXPECT_EQ(captured_frame.time_stamp / rtc::kNumNanosecsPerMicrosec,
+ frame.timestamp_us());
if (apply_rotation)
- EXPECT_EQ(webrtc::kVideoRotation_0, frame.GetVideoRotation());
+ EXPECT_EQ(webrtc::kVideoRotation_0, frame.rotation());
else
- EXPECT_EQ(frame_rotation, frame.GetVideoRotation());
+ EXPECT_EQ(frame_rotation, frame.rotation());
// If |apply_rotation| and the frame rotation is 90 or 270, width and
// height are flipped.
if (apply_rotation && (frame_rotation == webrtc::kVideoRotation_90
@@ -271,13 +273,16 @@ TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
webrtc::NativeHandleBuffer* buffer =
new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
dummy_handle, 640, 480);
- cricket::WebRtcVideoFrame frame(buffer, 200, webrtc::kVideoRotation_0);
+ // Timestamp is converted from ns to us, so last three digits are lost.
+ cricket::WebRtcVideoFrame frame(buffer, 20000, webrtc::kVideoRotation_0);
EXPECT_EQ(dummy_handle, frame.GetNativeHandle());
EXPECT_EQ(640, frame.width());
EXPECT_EQ(480, frame.height());
- EXPECT_EQ(200, frame.GetTimeStamp());
- frame.SetTimeStamp(400);
- EXPECT_EQ(400, frame.GetTimeStamp());
+ EXPECT_EQ(20000, frame.GetTimeStamp());
+ EXPECT_EQ(20, frame.timestamp_us());
+ frame.set_timestamp_us(40);
+ EXPECT_EQ(40000, frame.GetTimeStamp());
+ EXPECT_EQ(40, frame.timestamp_us());
}
TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
@@ -286,12 +291,14 @@ TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
webrtc::NativeHandleBuffer* buffer =
new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
dummy_handle, 640, 480);
- cricket::WebRtcVideoFrame frame1(buffer, 200, webrtc::kVideoRotation_0);
+ // Timestamp is converted from ns to us, so last three digits are lost.
+ cricket::WebRtcVideoFrame frame1(buffer, 20000, webrtc::kVideoRotation_0);
cricket::VideoFrame* frame2 = frame1.Copy();
EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
EXPECT_EQ(frame1.width(), frame2->width());
EXPECT_EQ(frame1.height(), frame2->height());
EXPECT_EQ(frame1.GetTimeStamp(), frame2->GetTimeStamp());
+ EXPECT_EQ(frame1.timestamp_us(), frame2->timestamp_us());
delete frame2;
}
@@ -303,22 +310,22 @@ TEST_F(WebRtcVideoFrameTest, ApplyRotationToFrame) {
LoadFrame(ms.get(), cricket::FOURCC_I420, kWidth, kHeight, &applied0));
// Claim that this frame needs to be rotated for 90 degree.
- applied0.SetRotation(webrtc::kVideoRotation_90);
+ applied0.set_rotation(webrtc::kVideoRotation_90);
// Apply rotation on frame 1. Output should be different from frame 1.
WebRtcVideoTestFrame* applied90 = const_cast<WebRtcVideoTestFrame*>(
static_cast<const WebRtcVideoTestFrame*>(
applied0.GetCopyWithRotationApplied()));
EXPECT_TRUE(applied90);
- EXPECT_EQ(applied90->GetVideoRotation(), webrtc::kVideoRotation_0);
+ EXPECT_EQ(applied90->rotation(), webrtc::kVideoRotation_0);
EXPECT_FALSE(IsEqual(applied0, *applied90, 0));
// Claim the frame 2 needs to be rotated for another 270 degree. The output
// from frame 2 rotation should be the same as frame 1.
- applied90->SetRotation(webrtc::kVideoRotation_270);
+ applied90->set_rotation(webrtc::kVideoRotation_270);
const cricket::VideoFrame* applied360 =
applied90->GetCopyWithRotationApplied();
EXPECT_TRUE(applied360);
- EXPECT_EQ(applied360->GetVideoRotation(), webrtc::kVideoRotation_0);
+ EXPECT_EQ(applied360->rotation(), webrtc::kVideoRotation_0);
EXPECT_TRUE(IsEqual(applied0, *applied360, 0));
}
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvideoframefactory_unittest.cc b/chromium/third_party/webrtc/media/engine/webrtcvideoframefactory_unittest.cc
index e2630734cae..197784b54a3 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvideoframefactory_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtcvideoframefactory_unittest.cc
@@ -12,12 +12,12 @@
#include <memory>
-#include "webrtc/media/base/videoframe_unittest.h"
+#include "webrtc/base/gunit.h"
+#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
#include "webrtc/media/engine/webrtcvideoframefactory.h"
-class WebRtcVideoFrameFactoryTest
- : public VideoFrameTest<cricket::WebRtcVideoFrameFactory> {
+class WebRtcVideoFrameFactoryTest : public testing::Test {
public:
WebRtcVideoFrameFactoryTest() {}
@@ -29,7 +29,7 @@ class WebRtcVideoFrameFactoryTest
captured_frame_.fourcc = cricket::FOURCC_I420;
captured_frame_.pixel_width = 1;
captured_frame_.pixel_height = 1;
- captured_frame_.time_stamp = 5678;
+ captured_frame_.time_stamp = rtc::TimeNanos();
captured_frame_.rotation = frame_rotation;
captured_frame_.width = frame_width;
captured_frame_.height = frame_height;
@@ -48,11 +48,11 @@ class WebRtcVideoFrameFactoryTest
int src_height,
bool apply_rotation) {
if (!apply_rotation) {
- EXPECT_EQ(dest_frame->GetVideoRotation(), src_rotation);
+ EXPECT_EQ(dest_frame->rotation(), src_rotation);
EXPECT_EQ(dest_frame->width(), src_width);
EXPECT_EQ(dest_frame->height(), src_height);
} else {
- EXPECT_EQ(dest_frame->GetVideoRotation(), webrtc::kVideoRotation_0);
+ EXPECT_EQ(dest_frame->rotation(), webrtc::kVideoRotation_0);
if (src_rotation == webrtc::kVideoRotation_90 ||
src_rotation == webrtc::kVideoRotation_270) {
EXPECT_EQ(dest_frame->width(), src_height);
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvoe.h b/chromium/third_party/webrtc/media/engine/webrtcvoe.h
index 2f7b997dae5..238b4ce7817 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvoe.h
+++ b/chromium/third_party/webrtc/media/engine/webrtcvoe.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_MEDIA_ENGINE_WEBRTCVOE_H_
#define WEBRTC_MEDIA_ENGINE_WEBRTCVOE_H_
+#include <memory>
+
#include "webrtc/base/common.h"
#include "webrtc/media/engine/webrtccommon.h"
@@ -74,15 +76,13 @@ class VoEWrapper {
public:
VoEWrapper()
: engine_(webrtc::VoiceEngine::Create()), processing_(engine_),
- base_(engine_), codec_(engine_),
- hw_(engine_), network_(engine_),
- rtp_(engine_), volume_(engine_) {
+ base_(engine_), codec_(engine_), hw_(engine_), rtp_(engine_),
+ volume_(engine_) {
}
VoEWrapper(webrtc::VoEAudioProcessing* processing,
webrtc::VoEBase* base,
webrtc::VoECodec* codec,
webrtc::VoEHardware* hw,
- webrtc::VoENetwork* network,
webrtc::VoERTP_RTCP* rtp,
webrtc::VoEVolumeControl* volume)
: engine_(NULL),
@@ -90,7 +90,6 @@ class VoEWrapper {
base_(base),
codec_(codec),
hw_(hw),
- network_(network),
rtp_(rtp),
volume_(volume) {
}
@@ -100,7 +99,6 @@ class VoEWrapper {
webrtc::VoEBase* base() const { return base_.get(); }
webrtc::VoECodec* codec() const { return codec_.get(); }
webrtc::VoEHardware* hw() const { return hw_.get(); }
- webrtc::VoENetwork* network() const { return network_.get(); }
webrtc::VoERTP_RTCP* rtp() const { return rtp_.get(); }
webrtc::VoEVolumeControl* volume() const { return volume_.get(); }
int error() { return base_->LastError(); }
@@ -111,7 +109,6 @@ class VoEWrapper {
scoped_voe_ptr<webrtc::VoEBase> base_;
scoped_voe_ptr<webrtc::VoECodec> codec_;
scoped_voe_ptr<webrtc::VoEHardware> hw_;
- scoped_voe_ptr<webrtc::VoENetwork> network_;
scoped_voe_ptr<webrtc::VoERTP_RTCP> rtp_;
scoped_voe_ptr<webrtc::VoEVolumeControl> volume_;
};
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvoiceengine.cc b/chromium/third_party/webrtc/media/engine/webrtcvoiceengine.cc
index b899470b970..09a072a5f3d 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvoiceengine.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtcvoiceengine.cc
@@ -22,6 +22,7 @@
#include "webrtc/base/base64.h"
#include "webrtc/base/byteorder.h"
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/stringencode.h"
@@ -29,7 +30,6 @@
#include "webrtc/base/trace_event.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/common.h"
-#include "webrtc/media/base/audioframe.h"
#include "webrtc/media/base/audiosource.h"
#include "webrtc/media/base/mediaconstants.h"
#include "webrtc/media/base/streamparams.h"
@@ -84,6 +84,9 @@ const int kOpusBitrateFb = 32000;
const int kOpusMinBitrate = 6000;
const int kOpusMaxBitrate = 510000;
+// iSAC bitrate should be <= 56000.
+const int kIsacMaxBitrate = 56000;
+
// Default audio dscp value.
// See http://tools.ietf.org/html/rfc2474 for details.
// See also http://tools.ietf.org/html/draft-jennings-rtcweb-qos-00
@@ -250,31 +253,28 @@ class WebRtcVoiceCodecs final {
// list and add a test which verifies VoE supports the listed codecs.
static std::vector<AudioCodec> SupportedCodecs() {
std::vector<AudioCodec> result;
- for (webrtc::CodecInst voe_codec : webrtc::acm2::RentACodec::Database()) {
- // Change the sample rate of G722 to 8000 to match SDP.
- MaybeFixupG722(&voe_codec, 8000);
- // Skip uncompressed formats.
- if (IsCodec(voe_codec, kL16CodecName)) {
- continue;
- }
+ // Iterate first over our preferred codecs list, so that the results are
+ // added in order of preference.
+ for (size_t i = 0; i < arraysize(kCodecPrefs); ++i) {
+ const CodecPref* pref = &kCodecPrefs[i];
+ for (webrtc::CodecInst voe_codec : webrtc::acm2::RentACodec::Database()) {
+ // Change the sample rate of G722 to 8000 to match SDP.
+ MaybeFixupG722(&voe_codec, 8000);
+ // Skip uncompressed formats.
+ if (IsCodec(voe_codec, kL16CodecName)) {
+ continue;
+ }
- const CodecPref* pref = NULL;
- for (size_t j = 0; j < arraysize(kCodecPrefs); ++j) {
- if (IsCodec(voe_codec, kCodecPrefs[j].name) &&
- kCodecPrefs[j].clockrate == voe_codec.plfreq &&
- kCodecPrefs[j].channels == voe_codec.channels) {
- pref = &kCodecPrefs[j];
- break;
+ if (!IsCodec(voe_codec, pref->name) ||
+ pref->clockrate != voe_codec.plfreq ||
+ pref->channels != voe_codec.channels) {
+ // Not a match.
+ continue;
}
- }
- if (pref) {
- // Use the payload type that we've configured in our pref table;
- // use the offset in our pref table to determine the sort order.
- AudioCodec codec(
- pref->payload_type, voe_codec.plname, voe_codec.plfreq,
- voe_codec.rate, voe_codec.channels,
- static_cast<int>(arraysize(kCodecPrefs)) - (pref - kCodecPrefs));
+ AudioCodec codec(pref->payload_type, voe_codec.plname, voe_codec.plfreq,
+ voe_codec.rate, voe_codec.channels);
+ LOG(LS_INFO) << "Adding supported codec: " << ToString(codec);
if (IsCodec(codec, kIsacCodecName)) {
// Indicate auto-bitrate in signaling.
codec.bitrate = 0;
@@ -297,12 +297,8 @@ class WebRtcVoiceCodecs final {
// when they can be set to values other than the default.
}
result.push_back(codec);
- } else {
- LOG(LS_INFO) << "[Unused] " << ToString(voe_codec);
}
}
- // Make sure they are in local preference order.
- std::sort(result.begin(), result.end(), &AudioCodec::Preferable);
return result;
}
@@ -312,7 +308,7 @@ class WebRtcVoiceCodecs final {
// Change the sample rate of G722 to 8000 to match SDP.
MaybeFixupG722(&voe_codec, 8000);
AudioCodec codec(voe_codec.pltype, voe_codec.plname, voe_codec.plfreq,
- voe_codec.rate, voe_codec.channels, 0);
+ voe_codec.rate, voe_codec.channels);
bool multi_rate = IsCodecMultiRate(voe_codec);
// Allow arbitrary rates for ISAC to be specified.
if (multi_rate) {
@@ -356,6 +352,16 @@ class WebRtcVoiceCodecs final {
return false;
}
+ static int MaxBitrateBps(const webrtc::CodecInst& codec) {
+ for (size_t i = 0; i < arraysize(kCodecPrefs); ++i) {
+ if (IsCodec(codec, kCodecPrefs[i].name) &&
+ kCodecPrefs[i].clockrate == codec.plfreq) {
+ return kCodecPrefs[i].max_bitrate_bps;
+ }
+ }
+ return 0;
+ }
+
// If the AudioCodec param kCodecParamPTime is set, then we will set it to
// codec pacsize if it's valid, or we will pick the next smallest value we
// support.
@@ -426,6 +432,7 @@ class WebRtcVoiceCodecs final {
int payload_type;
bool is_multi_rate;
int packet_sizes_ms[kMaxNumPacketSize];
+ int max_bitrate_bps;
};
// Note: keep the supported packet sizes in ascending order.
static const CodecPref kCodecPrefs[12];
@@ -492,19 +499,19 @@ class WebRtcVoiceCodecs final {
};
const WebRtcVoiceCodecs::CodecPref WebRtcVoiceCodecs::kCodecPrefs[12] = {
- { kOpusCodecName, 48000, 2, 111, true, { 10, 20, 40, 60 } },
- { kIsacCodecName, 16000, 1, 103, true, { 30, 60 } },
- { kIsacCodecName, 32000, 1, 104, true, { 30 } },
- // G722 should be advertised as 8000 Hz because of the RFC "bug".
- { kG722CodecName, 8000, 1, 9, false, { 10, 20, 30, 40, 50, 60 } },
- { kIlbcCodecName, 8000, 1, 102, false, { 20, 30, 40, 60 } },
- { kPcmuCodecName, 8000, 1, 0, false, { 10, 20, 30, 40, 50, 60 } },
- { kPcmaCodecName, 8000, 1, 8, false, { 10, 20, 30, 40, 50, 60 } },
- { kCnCodecName, 32000, 1, 106, false, { } },
- { kCnCodecName, 16000, 1, 105, false, { } },
- { kCnCodecName, 8000, 1, 13, false, { } },
- { kRedCodecName, 8000, 1, 127, false, { } },
- { kDtmfCodecName, 8000, 1, 126, false, { } },
+ {kOpusCodecName, 48000, 2, 111, true, {10, 20, 40, 60}, kOpusMaxBitrate},
+ {kIsacCodecName, 16000, 1, 103, true, {30, 60}, kIsacMaxBitrate},
+ {kIsacCodecName, 32000, 1, 104, true, {30}, kIsacMaxBitrate},
+ // G722 should be advertised as 8000 Hz because of the RFC "bug".
+ {kG722CodecName, 8000, 1, 9, false, {10, 20, 30, 40, 50, 60}},
+ {kIlbcCodecName, 8000, 1, 102, false, {20, 30, 40, 60}},
+ {kPcmuCodecName, 8000, 1, 0, false, {10, 20, 30, 40, 50, 60}},
+ {kPcmaCodecName, 8000, 1, 8, false, {10, 20, 30, 40, 50, 60}},
+ {kCnCodecName, 32000, 1, 106, false, {}},
+ {kCnCodecName, 16000, 1, 105, false, {}},
+ {kCnCodecName, 8000, 1, 13, false, {}},
+ {kRedCodecName, 8000, 1, 127, false, {}},
+ {kDtmfCodecName, 8000, 1, 126, false, {}},
};
} // namespace {
@@ -570,6 +577,7 @@ WebRtcVoiceEngine::WebRtcVoiceEngine(webrtc::AudioDeviceModule* adm,
options.extended_filter_aec = rtc::Optional<bool>(false);
options.delay_agnostic_aec = rtc::Optional<bool>(false);
options.experimental_ns = rtc::Optional<bool>(false);
+ options.intelligibility_enhancer = rtc::Optional<bool>(false);
bool error = ApplyOptions(options);
RTC_DCHECK(error);
}
@@ -655,14 +663,14 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
// Android and in combination with Java based audio layer.
// TODO(henrika): investigate possibility to support built-in EC also
// in combination with Open SL ES audio.
- const bool built_in_aec = voe_wrapper_->hw()->BuiltInAECIsAvailable();
+ const bool built_in_aec = adm()->BuiltInAECIsAvailable();
if (built_in_aec) {
// Built-in EC exists on this device and use_delay_agnostic_aec is not
// overriding it. Enable/Disable it according to the echo_cancellation
// audio option.
const bool enable_built_in_aec =
*options.echo_cancellation && !use_delay_agnostic_aec;
- if (voe_wrapper_->hw()->EnableBuiltInAEC(enable_built_in_aec) == 0 &&
+ if (adm()->EnableBuiltInAEC(enable_built_in_aec) == 0 &&
enable_built_in_aec) {
// Disable internal software EC if built-in EC is enabled,
// i.e., replace the software EC with the built-in EC.
@@ -694,10 +702,9 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
}
if (options.auto_gain_control) {
- const bool built_in_agc = voe_wrapper_->hw()->BuiltInAGCIsAvailable();
+ const bool built_in_agc = adm()->BuiltInAGCIsAvailable();
if (built_in_agc) {
- if (voe_wrapper_->hw()->EnableBuiltInAGC(*options.auto_gain_control) ==
- 0 &&
+ if (adm()->EnableBuiltInAGC(*options.auto_gain_control) == 0 &&
*options.auto_gain_control) {
// Disable internal software AGC if built-in AGC is enabled,
// i.e., replace the software AGC with the built-in AGC.
@@ -740,12 +747,20 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
}
}
+ if (options.intelligibility_enhancer) {
+ intelligibility_enhancer_ = options.intelligibility_enhancer;
+ }
+ if (intelligibility_enhancer_ && *intelligibility_enhancer_) {
+ LOG(LS_INFO) << "Enabling NS when Intelligibility Enhancer is active.";
+ options.noise_suppression = intelligibility_enhancer_;
+ }
+
if (options.noise_suppression) {
- const bool built_in_ns = voe_wrapper_->hw()->BuiltInNSIsAvailable();
- if (built_in_ns) {
- if (voe_wrapper_->hw()->EnableBuiltInNS(*options.noise_suppression) ==
- 0 &&
- *options.noise_suppression) {
+ if (adm()->BuiltInNSIsAvailable()) {
+ bool builtin_ns =
+ *options.noise_suppression &&
+ !(intelligibility_enhancer_ && *intelligibility_enhancer_);
+ if (adm()->EnableBuiltInNS(builtin_ns) == 0 && builtin_ns) {
// Disable internal software NS if built-in NS is enabled,
// i.e., replace the software NS with the built-in NS.
options.noise_suppression = rtc::Optional<bool>(false);
@@ -838,6 +853,13 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
new webrtc::ExperimentalNs(*experimental_ns_));
}
+ if (intelligibility_enhancer_) {
+ LOG(LS_INFO) << "Intelligibility Enhancer is enabled? "
+ << *intelligibility_enhancer_;
+ config.Set<webrtc::Intelligibility>(
+ new webrtc::Intelligibility(*intelligibility_enhancer_));
+ }
+
// We check audioproc for the benefit of tests, since FakeWebRtcVoiceEngine
// returns NULL on audio_processing().
webrtc::AudioProcessing* audioproc = voe_wrapper_->base()->audio_processing();
@@ -848,16 +870,14 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
if (options.recording_sample_rate) {
LOG(LS_INFO) << "Recording sample rate is "
<< *options.recording_sample_rate;
- if (voe_wrapper_->hw()->SetRecordingSampleRate(
- *options.recording_sample_rate)) {
+ if (adm()->SetRecordingSampleRate(*options.recording_sample_rate)) {
LOG_RTCERR1(SetRecordingSampleRate, *options.recording_sample_rate);
}
}
if (options.playout_sample_rate) {
LOG(LS_INFO) << "Playout sample rate is " << *options.playout_sample_rate;
- if (voe_wrapper_->hw()->SetPlayoutSampleRate(
- *options.playout_sample_rate)) {
+ if (adm()->SetPlayoutSampleRate(*options.playout_sample_rate)) {
LOG_RTCERR1(SetPlayoutSampleRate, *options.playout_sample_rate);
}
}
@@ -1053,11 +1073,12 @@ void WebRtcVoiceEngine::StopAecDump() {
}
}
-bool WebRtcVoiceEngine::StartRtcEventLog(rtc::PlatformFile file) {
+bool WebRtcVoiceEngine::StartRtcEventLog(rtc::PlatformFile file,
+ int64_t max_size_bytes) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
webrtc::RtcEventLog* event_log = voe_wrapper_->codec()->GetEventLog();
if (event_log) {
- return event_log->StartLogging(file);
+ return event_log->StartLogging(file, max_size_bytes);
}
LOG_RTCERR0(StartRtcEventLog);
return false;
@@ -1078,16 +1099,26 @@ int WebRtcVoiceEngine::CreateVoEChannel() {
return voe_wrapper_->base()->CreateChannel(voe_config_);
}
+webrtc::AudioDeviceModule* WebRtcVoiceEngine::adm() {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(adm_);
+ return adm_;
+}
+
class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
: public AudioSource::Sink {
public:
- WebRtcAudioSendStream(int ch, webrtc::AudioTransport* voe_audio_transport,
- uint32_t ssrc, const std::string& c_name,
+ WebRtcAudioSendStream(int ch,
+ webrtc::AudioTransport* voe_audio_transport,
+ uint32_t ssrc,
+ const std::string& c_name,
const std::vector<webrtc::RtpExtension>& extensions,
- webrtc::Call* call)
+ webrtc::Call* call,
+ webrtc::Transport* send_transport)
: voe_audio_transport_(voe_audio_transport),
call_(call),
- config_(nullptr) {
+ config_(send_transport),
+ rtp_parameters_(CreateRtpParametersWithOneEncoding()) {
RTC_DCHECK_GE(ch, 0);
// TODO(solenberg): Once we're not using FakeWebRtcVoiceEngine anymore:
// RTC_DCHECK(voe_audio_transport);
@@ -1199,11 +1230,23 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
return config_.voe_channel_id;
}
+ const webrtc::RtpParameters& rtp_parameters() const {
+ return rtp_parameters_;
+ }
+
+ void SetRtpParameters(const webrtc::RtpParameters& parameters) {
+ RTC_CHECK_EQ(1UL, parameters.encodings.size());
+ rtp_parameters_ = parameters;
+ // parameters.encodings[0].active could have changed.
+ UpdateSendState();
+ }
+
private:
void UpdateSendState() {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
RTC_DCHECK(stream_);
- if (send_ && source_ != nullptr) {
+ RTC_DCHECK_EQ(1UL, rtp_parameters_.encodings.size());
+ if (send_ && source_ != nullptr && rtp_parameters_.encodings[0].active) {
stream_->Start();
} else { // !send || source_ = nullptr
stream_->Stop();
@@ -1224,6 +1267,7 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
// goes away.
AudioSource* source_ = nullptr;
bool send_ = false;
+ webrtc::RtpParameters rtp_parameters_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcAudioSendStream);
};
@@ -1236,12 +1280,14 @@ class WebRtcVoiceMediaChannel::WebRtcAudioReceiveStream {
bool use_transport_cc,
const std::string& sync_group,
const std::vector<webrtc::RtpExtension>& extensions,
- webrtc::Call* call)
+ webrtc::Call* call,
+ webrtc::Transport* rtcp_send_transport)
: call_(call), config_() {
RTC_DCHECK_GE(ch, 0);
RTC_DCHECK(call);
config_.rtp.remote_ssrc = remote_ssrc;
config_.rtp.local_ssrc = local_ssrc;
+ config_.rtcp_send_transport = rtcp_send_transport;
config_.voe_channel_id = ch;
config_.sync_group = sync_group;
RecreateAudioReceiveStream(use_transport_cc, extensions);
@@ -1360,7 +1406,7 @@ bool WebRtcVoiceMediaChannel::SetSendParameters(
}
}
- if (!SetMaxSendBandwidth(params.max_bandwidth_bps)) {
+ if (!SetMaxSendBitrate(params.max_bandwidth_bps)) {
return false;
}
return SetOptions(params.options);
@@ -1394,6 +1440,110 @@ bool WebRtcVoiceMediaChannel::SetRecvParameters(
return true;
}
+webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpSendParameters(
+ uint32_t ssrc) const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return webrtc::RtpParameters();
+ }
+
+ webrtc::RtpParameters rtp_params = it->second->rtp_parameters();
+ // Need to add the common list of codecs to the send stream-specific
+ // RTP parameters.
+ for (const AudioCodec& codec : send_codecs_) {
+ rtp_params.codecs.push_back(codec.ToCodecParameters());
+ }
+ return rtp_params;
+}
+
+bool WebRtcVoiceMediaChannel::SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (!ValidateRtpParameters(parameters)) {
+ return false;
+ }
+ auto it = send_streams_.find(ssrc);
+ if (it == send_streams_.end()) {
+ LOG(LS_WARNING) << "Attempting to set RTP send parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return false;
+ }
+
+ // TODO(deadbeef): Handle setting parameters with a list of codecs in a
+ // different order (which should change the send codec).
+ webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc);
+ if (current_parameters.codecs != parameters.codecs) {
+ LOG(LS_ERROR) << "Using SetParameters to change the set of codecs "
+ << "is not currently supported.";
+ return false;
+ }
+
+ if (!SetChannelSendParameters(it->second->channel(), parameters)) {
+ LOG(LS_WARNING) << "Failed to set send RtpParameters.";
+ return false;
+ }
+ // Codecs are handled at the WebRtcVoiceMediaChannel level.
+ webrtc::RtpParameters reduced_params = parameters;
+ reduced_params.codecs.clear();
+ it->second->SetRtpParameters(reduced_params);
+ return true;
+}
+
+webrtc::RtpParameters WebRtcVoiceMediaChannel::GetRtpReceiveParameters(
+ uint32_t ssrc) const {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ LOG(LS_WARNING) << "Attempting to get RTP receive parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return webrtc::RtpParameters();
+ }
+
+ // TODO(deadbeef): Return stream-specific parameters.
+ webrtc::RtpParameters rtp_params = CreateRtpParametersWithOneEncoding();
+ for (const AudioCodec& codec : recv_codecs_) {
+ rtp_params.codecs.push_back(codec.ToCodecParameters());
+ }
+ return rtp_params;
+}
+
+bool WebRtcVoiceMediaChannel::SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ if (!ValidateRtpParameters(parameters)) {
+ return false;
+ }
+ auto it = recv_streams_.find(ssrc);
+ if (it == recv_streams_.end()) {
+ LOG(LS_WARNING) << "Attempting to set RTP receive parameters for stream "
+ << "with ssrc " << ssrc << " which doesn't exist.";
+ return false;
+ }
+
+ webrtc::RtpParameters current_parameters = GetRtpReceiveParameters(ssrc);
+ if (current_parameters != parameters) {
+ LOG(LS_ERROR) << "Changing the RTP receive parameters is currently "
+ << "unsupported.";
+ return false;
+ }
+ return true;
+}
+
+bool WebRtcVoiceMediaChannel::ValidateRtpParameters(
+ const webrtc::RtpParameters& rtp_parameters) {
+ if (rtp_parameters.encodings.size() != 1) {
+ LOG(LS_ERROR)
+ << "Attempted to set RtpParameters without exactly one encoding";
+ return false;
+ }
+ return true;
+}
+
bool WebRtcVoiceMediaChannel::SetOptions(const AudioOptions& options) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << "Setting voice channel options: "
@@ -1588,16 +1738,14 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
// Cache the codecs in order to configure the channel created later.
for (const auto& ch : send_streams_) {
- if (!SetSendCodecs(ch.second->channel())) {
+ if (!SetSendCodecs(ch.second->channel(), ch.second->rtp_parameters())) {
return false;
}
}
// Set nack status on receive channels.
- if (!send_streams_.empty()) {
- for (const auto& kv : recv_streams_) {
- SetNack(kv.second->channel(), send_codec_spec_.nack_enabled);
- }
+ for (const auto& kv : recv_streams_) {
+ SetNack(kv.second->channel(), send_codec_spec_.nack_enabled);
}
// Check if the transport cc feedback has changed on the preferred send codec,
@@ -1611,11 +1759,14 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
}
}
+ send_codecs_ = codecs;
return true;
}
// Apply current codec settings to a single voe::Channel used for sending.
-bool WebRtcVoiceMediaChannel::SetSendCodecs(int channel) {
+bool WebRtcVoiceMediaChannel::SetSendCodecs(
+ int channel,
+ const webrtc::RtpParameters& rtp_parameters) {
// Disable VAD, FEC, and RED unless we know the other side wants them.
engine()->voe()->codec()->SetVADStatus(channel, false);
engine()->voe()->rtp()->SetNACKStatus(channel, false, 0);
@@ -1683,10 +1834,9 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(int channel) {
}
}
}
-
- if (send_bitrate_setting_) {
- SetSendBitrateInternal(send_bitrate_bps_);
- }
+ // TODO(solenberg): SetMaxSendBitrate() yields another call to SetSendCodec().
+ // Check if it is possible to fuse with the previous call in this function.
+ SetChannelSendParameters(channel, rtp_parameters);
// Set the CN payloadtype and the VAD status.
if (send_codec_spec_.cng_payload_type != -1) {
@@ -1801,9 +1951,18 @@ void WebRtcVoiceMediaChannel::SetSend(bool send) {
return;
}
- // Apply channel specific options when channel is enabled for sending.
+ // Apply channel specific options, and initialize the ADM for recording (this
+ // may take time on some platforms, e.g. Android).
if (send) {
engine()->ApplyOptions(options_);
+
+ // InitRecording() may return an error if the ADM is already recording.
+ if (!engine()->adm()->RecordingIsInitialized() &&
+ !engine()->adm()->Recording()) {
+ if (engine()->adm()->InitRecording() != 0) {
+ LOG(LS_WARNING) << "Failed to initialize recording";
+ }
+ }
}
// Change the settings on each send channel.
@@ -1839,18 +1998,11 @@ int WebRtcVoiceMediaChannel::CreateVoEChannel() {
LOG_RTCERR0(CreateVoEChannel);
return -1;
}
- if (engine()->voe()->network()->RegisterExternalTransport(id, *this) == -1) {
- LOG_RTCERR2(RegisterExternalTransport, id, this);
- engine()->voe()->base()->DeleteChannel(id);
- return -1;
- }
+
return id;
}
bool WebRtcVoiceMediaChannel::DeleteVoEChannel(int channel) {
- if (engine()->voe()->network()->DeRegisterExternalTransport(channel) == -1) {
- LOG_RTCERR1(DeRegisterExternalTransport, channel);
- }
if (engine()->voe()->base()->DeleteChannel(channel) == -1) {
LOG_RTCERR1(DeleteChannel, channel);
return false;
@@ -1881,13 +2033,16 @@ bool WebRtcVoiceMediaChannel::AddSendStream(const StreamParams& sp) {
// delete the channel in case failure happens below.
webrtc::AudioTransport* audio_transport =
engine()->voe()->base()->audio_transport();
- send_streams_.insert(std::make_pair(ssrc, new WebRtcAudioSendStream(
- channel, audio_transport, ssrc, sp.cname, send_rtp_extensions_, call_)));
+
+ WebRtcAudioSendStream* stream = new WebRtcAudioSendStream(
+ channel, audio_transport, ssrc, sp.cname, send_rtp_extensions_, call_,
+ this);
+ send_streams_.insert(std::make_pair(ssrc, stream));
// Set the current codecs to be used for the new channel. We need to do this
// after adding the channel to send_channels_, because of how max bitrate is
// currently being configured by SetSendCodec().
- if (HasSendCodec() && !SetSendCodecs(channel)) {
+ if (HasSendCodec() && !SetSendCodecs(channel, stream->rtp_parameters())) {
RemoveSendStream(ssrc);
return false;
}
@@ -2012,7 +2167,7 @@ bool WebRtcVoiceMediaChannel::AddRecvStream(const StreamParams& sp) {
ssrc, new WebRtcAudioReceiveStream(channel, ssrc, receiver_reports_ssrc_,
recv_transport_cc_enabled_,
sp.sync_label, recv_rtp_extensions_,
- call_)));
+ call_, this)));
SetNack(channel, send_codec_spec_.nack_enabled);
SetPlayout(channel, playout_);
@@ -2178,54 +2333,51 @@ void WebRtcVoiceMediaChannel::OnPacketReceived(
rtc::CopyOnWriteBuffer* packet, const rtc::PacketTime& packet_time) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
+ const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
+ packet_time.not_before);
+ webrtc::PacketReceiver::DeliveryStatus delivery_result =
+ call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
+ packet->cdata(), packet->size(),
+ webrtc_packet_time);
+ if (delivery_result != webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC) {
+ return;
+ }
+
+ // Create a default receive stream for this unsignalled and previously not
+ // received ssrc. If there already is a default receive stream, delete it.
+ // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5208
uint32_t ssrc = 0;
if (!GetRtpSsrc(packet->cdata(), packet->size(), &ssrc)) {
return;
}
- // If we don't have a default channel, and the SSRC is unknown, create a
- // default channel.
- if (default_recv_ssrc_ == -1 && GetReceiveChannelId(ssrc) == -1) {
- StreamParams sp;
- sp.ssrcs.push_back(ssrc);
- LOG(LS_INFO) << "Creating default receive stream for SSRC=" << ssrc << ".";
- if (!AddRecvStream(sp)) {
- LOG(LS_WARNING) << "Could not create default receive stream.";
- return;
- }
- default_recv_ssrc_ = ssrc;
- SetOutputVolume(default_recv_ssrc_, default_recv_volume_);
- if (default_sink_) {
- std::unique_ptr<webrtc::AudioSinkInterface> proxy_sink(
- new ProxySink(default_sink_.get()));
- SetRawAudioSink(default_recv_ssrc_, std::move(proxy_sink));
- }
+ if (default_recv_ssrc_ != -1) {
+ LOG(LS_INFO) << "Removing default receive stream with ssrc "
+ << default_recv_ssrc_;
+ RTC_DCHECK_NE(ssrc, default_recv_ssrc_);
+ RemoveRecvStream(default_recv_ssrc_);
+ default_recv_ssrc_ = -1;
}
- // Forward packet to Call. If the SSRC is unknown we'll return after this.
- const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
- packet_time.not_before);
- webrtc::PacketReceiver::DeliveryStatus delivery_result =
- call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
- packet->cdata(), packet->size(), webrtc_packet_time);
- if (webrtc::PacketReceiver::DELIVERY_OK != delivery_result) {
- // If the SSRC is unknown here, route it to the default channel, if we have
- // one. See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5208
- if (default_recv_ssrc_ == -1) {
- return;
- } else {
- ssrc = default_recv_ssrc_;
- }
+ StreamParams sp;
+ sp.ssrcs.push_back(ssrc);
+ LOG(LS_INFO) << "Creating default receive stream for SSRC=" << ssrc << ".";
+ if (!AddRecvStream(sp)) {
+ LOG(LS_WARNING) << "Could not create default receive stream.";
+ return;
}
-
- // Find the channel to send this packet to. It must exist since webrtc::Call
- // was able to demux the packet.
- int channel = GetReceiveChannelId(ssrc);
- RTC_DCHECK(channel != -1);
-
- // Pass it off to the decoder.
- engine()->voe()->network()->ReceivedRTPPacket(
- channel, packet->cdata(), packet->size(), webrtc_packet_time);
+ default_recv_ssrc_ = ssrc;
+ SetOutputVolume(default_recv_ssrc_, default_recv_volume_);
+ if (default_sink_) {
+ std::unique_ptr<webrtc::AudioSinkInterface> proxy_sink(
+ new ProxySink(default_sink_.get()));
+ SetRawAudioSink(default_recv_ssrc_, std::move(proxy_sink));
+ }
+ delivery_result = call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
+ packet->cdata(),
+ packet->size(),
+ webrtc_packet_time);
+ RTC_DCHECK_NE(webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC, delivery_result);
}
void WebRtcVoiceMediaChannel::OnRtcpReceived(
@@ -2237,44 +2389,12 @@ void WebRtcVoiceMediaChannel::OnRtcpReceived(
packet_time.not_before);
call_->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
packet->cdata(), packet->size(), webrtc_packet_time);
-
- // Sending channels need all RTCP packets with feedback information.
- // Even sender reports can contain attached report blocks.
- // Receiving channels need sender reports in order to create
- // correct receiver reports.
- int type = 0;
- if (!GetRtcpType(packet->cdata(), packet->size(), &type)) {
- LOG(LS_WARNING) << "Failed to parse type from received RTCP packet";
- return;
- }
-
- // If it is a sender report, find the receive channel that is listening.
- if (type == kRtcpTypeSR) {
- uint32_t ssrc = 0;
- if (!GetRtcpSsrc(packet->cdata(), packet->size(), &ssrc)) {
- return;
- }
- int recv_channel_id = GetReceiveChannelId(ssrc);
- if (recv_channel_id != -1) {
- engine()->voe()->network()->ReceivedRTCPPacket(
- recv_channel_id, packet->cdata(), packet->size());
- }
- }
-
- // SR may continue RR and any RR entry may correspond to any one of the send
- // channels. So all RTCP packets must be forwarded all send channels. VoE
- // will filter out RR internally.
- for (const auto& ch : send_streams_) {
- engine()->voe()->network()->ReceivedRTCPPacket(
- ch.second->channel(), packet->cdata(), packet->size());
- }
}
void WebRtcVoiceMediaChannel::OnNetworkRouteChanged(
const std::string& transport_name,
- const NetworkRoute& network_route) {
- // TODO(honghaiz): uncomment this once the function in call is implemented.
- // call_->OnNetworkRouteChanged(transport_name, network_route);
+ const rtc::NetworkRoute& network_route) {
+ call_->OnNetworkRouteChanged(transport_name, network_route);
}
bool WebRtcVoiceMediaChannel::MuteStream(uint32_t ssrc, bool muted) {
@@ -2311,18 +2431,37 @@ bool WebRtcVoiceMediaChannel::MuteStream(uint32_t ssrc, bool muted) {
return true;
}
-// TODO(minyue): SetMaxSendBandwidth() is subject to be renamed to
-// SetMaxSendBitrate() in future.
-bool WebRtcVoiceMediaChannel::SetMaxSendBandwidth(int bps) {
- LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetMaxSendBandwidth.";
- return SetSendBitrateInternal(bps);
+bool WebRtcVoiceMediaChannel::SetMaxSendBitrate(int bps) {
+ LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetMaxSendBitrate.";
+ max_send_bitrate_bps_ = bps;
+
+ for (const auto& kv : send_streams_) {
+ if (!SetChannelSendParameters(kv.second->channel(),
+ kv.second->rtp_parameters())) {
+ return false;
+ }
+ }
+ return true;
}
-bool WebRtcVoiceMediaChannel::SetSendBitrateInternal(int bps) {
- LOG(LS_INFO) << "WebRtcVoiceMediaChannel::SetSendBitrateInternal.";
+bool WebRtcVoiceMediaChannel::SetChannelSendParameters(
+ int channel,
+ const webrtc::RtpParameters& parameters) {
+ RTC_CHECK_EQ(1UL, parameters.encodings.size());
+ // TODO(deadbeef): Handle setting parameters with a list of codecs in a
+ // different order (which should change the send codec).
+ return SetMaxSendBitrate(
+ channel, MinPositive(max_send_bitrate_bps_,
+ parameters.encodings[0].max_bitrate_bps));
+}
- send_bitrate_setting_ = true;
- send_bitrate_bps_ = bps;
+bool WebRtcVoiceMediaChannel::SetMaxSendBitrate(int channel, int bps) {
+ // Bitrate is auto by default.
+ // TODO(bemasc): Fix this so that if SetMaxSendBandwidth(50) is followed by
+ // SetMaxSendBandwith(0), the second call removes the previous limit.
+ if (bps <= 0) {
+ return true;
+ }
if (!HasSendCodec()) {
LOG(LS_INFO) << "The send codec has not been set up yet. "
@@ -2330,24 +2469,19 @@ bool WebRtcVoiceMediaChannel::SetSendBitrateInternal(int bps) {
return true;
}
- // Bitrate is auto by default.
- // TODO(bemasc): Fix this so that if SetMaxSendBandwidth(50) is followed by
- // SetMaxSendBandwith(0), the second call removes the previous limit.
- if (bps <= 0)
- return true;
-
webrtc::CodecInst codec = send_codec_spec_.codec_inst;
bool is_multi_rate = WebRtcVoiceCodecs::IsCodecMultiRate(codec);
if (is_multi_rate) {
// If codec is multi-rate then just set the bitrate.
- codec.rate = bps;
- for (const auto& ch : send_streams_) {
- if (!SetSendCodec(ch.second->channel(), codec)) {
- LOG(LS_INFO) << "Failed to set codec " << codec.plname
- << " to bitrate " << bps << " bps.";
- return false;
- }
+ int max_bitrate_bps = WebRtcVoiceCodecs::MaxBitrateBps(codec);
+ codec.rate = std::min(bps, max_bitrate_bps);
+ LOG(LS_INFO) << "Setting codec " << codec.plname << " to bitrate " << bps
+ << " bps.";
+ if (!SetSendCodec(channel, codec)) {
+ LOG(LS_ERROR) << "Failed to set codec " << codec.plname << " to bitrate "
+ << bps << " bps.";
+ return false;
}
return true;
} else {
@@ -2355,9 +2489,9 @@ bool WebRtcVoiceMediaChannel::SetSendBitrateInternal(int bps) {
// then fail. If codec is not multi-rate and |bps| exceeds or equal the
// fixed bitrate then ignore.
if (bps < codec.rate) {
- LOG(LS_INFO) << "Failed to set codec " << codec.plname
- << " to bitrate " << bps << " bps"
- << ", requires at least " << codec.rate << " bps.";
+ LOG(LS_ERROR) << "Failed to set codec " << codec.plname << " to bitrate "
+ << bps << " bps"
+ << ", requires at least " << codec.rate << " bps.";
return false;
}
return true;
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvoiceengine.h b/chromium/third_party/webrtc/media/engine/webrtcvoiceengine.h
index 396e3bbb8c0..5b435348e65 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvoiceengine.h
+++ b/chromium/third_party/webrtc/media/engine/webrtcvoiceengine.h
@@ -18,6 +18,7 @@
#include "webrtc/audio_state.h"
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/networkroute.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/stream.h"
@@ -84,9 +85,10 @@ class WebRtcVoiceEngine final : public webrtc::TraceCallback {
// Stops AEC dump.
void StopAecDump();
- // Starts recording an RtcEventLog using an existing file until 10 minutes
- // pass or the StopRtcEventLog function is called.
- bool StartRtcEventLog(rtc::PlatformFile file);
+ // Starts recording an RtcEventLog using an existing file until the log file
+ // reaches the maximum filesize or the StopRtcEventLog function is called.
+ // If the value of max_size_bytes is <= 0, no limit is used.
+ bool StartRtcEventLog(rtc::PlatformFile file, int64_t max_size_bytes);
// Stops recording the RtcEventLog.
void StopRtcEventLog();
@@ -103,6 +105,7 @@ class WebRtcVoiceEngine final : public webrtc::TraceCallback {
void StartAecDump(const std::string& filename);
int CreateVoEChannel();
+ webrtc::AudioDeviceModule* adm();
rtc::ThreadChecker signal_thread_checker_;
rtc::ThreadChecker worker_thread_checker_;
@@ -118,13 +121,14 @@ class WebRtcVoiceEngine final : public webrtc::TraceCallback {
bool is_dumping_aec_ = false;
webrtc::AgcConfig default_agc_config_;
- // Cache received extended_filter_aec, delay_agnostic_aec and experimental_ns
- // values, and apply them in case they are missing in the audio options. We
- // need to do this because SetExtraOptions() will revert to defaults for
- // options which are not provided.
+ // Cache received extended_filter_aec, delay_agnostic_aec, experimental_ns and
+ // intelligibility_enhancer values, and apply them in case they are missing
+ // in the audio options. We need to do this because SetExtraOptions() will
+ // revert to defaults for options which are not provided.
rtc::Optional<bool> extended_filter_aec_;
rtc::Optional<bool> delay_agnostic_aec_;
rtc::Optional<bool> experimental_ns_;
+ rtc::Optional<bool> intelligibility_enhancer_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WebRtcVoiceEngine);
};
@@ -146,6 +150,14 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel,
bool SetSendParameters(const AudioSendParameters& params) override;
bool SetRecvParameters(const AudioRecvParameters& params) override;
+ webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override;
+ bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) override;
+ webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const override;
+ bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) override;
+
bool SetPlayout(bool playout) override;
bool PausePlayout();
bool ResumePlayout();
@@ -176,7 +188,7 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel,
void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketTime& packet_time) override;
void OnNetworkRouteChanged(const std::string& transport_name,
- const NetworkRoute& network_route) override;
+ const rtc::NetworkRoute& network_route) override;
void OnReadyToSend(bool ready) override;
bool GetStats(VoiceMediaInfo* info) override;
@@ -206,10 +218,9 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel,
bool SetOptions(const AudioOptions& options);
bool SetRecvCodecs(const std::vector<AudioCodec>& codecs);
bool SetSendCodecs(const std::vector<AudioCodec>& codecs);
- bool SetSendCodecs(int channel);
+ bool SetSendCodecs(int channel, const webrtc::RtpParameters& rtp_parameters);
void SetNack(int channel, bool nack_enabled);
bool SetSendCodec(int channel, const webrtc::CodecInst& send_codec);
- bool SetMaxSendBandwidth(int bps);
bool SetLocalSource(uint32_t ssrc, AudioSource* source);
bool MuteStream(uint32_t ssrc, bool mute);
@@ -223,17 +234,22 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel,
bool IsDefaultRecvStream(uint32_t ssrc) {
return default_recv_ssrc_ == static_cast<int64_t>(ssrc);
}
- bool SetSendBitrateInternal(int bps);
+ bool SetMaxSendBitrate(int bps);
+ bool SetChannelSendParameters(int channel,
+ const webrtc::RtpParameters& parameters);
+ bool SetMaxSendBitrate(int channel, int bps);
bool HasSendCodec() const {
return send_codec_spec_.codec_inst.pltype != -1;
}
+ bool ValidateRtpParameters(const webrtc::RtpParameters& parameters);
+ void SetupRecording();
rtc::ThreadChecker worker_thread_checker_;
WebRtcVoiceEngine* const engine_ = nullptr;
+ std::vector<AudioCodec> send_codecs_;
std::vector<AudioCodec> recv_codecs_;
- bool send_bitrate_setting_ = false;
- int send_bitrate_bps_ = 0;
+ int max_send_bitrate_bps_ = 0;
AudioOptions options_;
rtc::Optional<int> dtmf_payload_type_;
bool desired_playout_ = false;
diff --git a/chromium/third_party/webrtc/media/engine/webrtcvoiceengine_unittest.cc b/chromium/third_party/webrtc/media/engine/webrtcvoiceengine_unittest.cc
index f2f7056a7d0..4446e27c162 100644
--- a/chromium/third_party/webrtc/media/engine/webrtcvoiceengine_unittest.cc
+++ b/chromium/third_party/webrtc/media/engine/webrtcvoiceengine_unittest.cc
@@ -33,16 +33,19 @@ using testing::StrictMock;
namespace {
-const cricket::AudioCodec kPcmuCodec(0, "PCMU", 8000, 64000, 1, 0);
-const cricket::AudioCodec kIsacCodec(103, "ISAC", 16000, 32000, 1, 0);
-const cricket::AudioCodec kOpusCodec(111, "opus", 48000, 64000, 2, 0);
-const cricket::AudioCodec kG722CodecVoE(9, "G722", 16000, 64000, 1, 0);
-const cricket::AudioCodec kG722CodecSdp(9, "G722", 8000, 64000, 1, 0);
-const cricket::AudioCodec kRedCodec(117, "red", 8000, 0, 1, 0);
-const cricket::AudioCodec kCn8000Codec(13, "CN", 8000, 0, 1, 0);
-const cricket::AudioCodec kCn16000Codec(105, "CN", 16000, 0, 1, 0);
-const cricket::AudioCodec kTelephoneEventCodec(106, "telephone-event", 8000, 0,
- 1, 0);
+const cricket::AudioCodec kPcmuCodec(0, "PCMU", 8000, 64000, 1);
+const cricket::AudioCodec kIsacCodec(103, "ISAC", 16000, 32000, 1);
+const cricket::AudioCodec kOpusCodec(111, "opus", 48000, 64000, 2);
+const cricket::AudioCodec kG722CodecVoE(9, "G722", 16000, 64000, 1);
+const cricket::AudioCodec kG722CodecSdp(9, "G722", 8000, 64000, 1);
+const cricket::AudioCodec kRedCodec(117, "red", 8000, 0, 1);
+const cricket::AudioCodec kCn8000Codec(13, "CN", 8000, 0, 1);
+const cricket::AudioCodec kCn16000Codec(105, "CN", 16000, 0, 1);
+const cricket::AudioCodec kTelephoneEventCodec(106,
+ "telephone-event",
+ 8000,
+ 0,
+ 1);
const uint32_t kSsrc1 = 0x99;
const uint32_t kSsrc2 = 2;
const uint32_t kSsrc3 = 3;
@@ -55,7 +58,6 @@ class FakeVoEWrapper : public cricket::VoEWrapper {
engine, // base
engine, // codec
engine, // hw
- engine, // network
engine, // rtp
engine) { // volume
}
@@ -67,6 +69,9 @@ TEST(WebRtcVoiceEngineTestStubLibrary, StartupShutdown) {
StrictMock<webrtc::test::MockAudioDeviceModule> adm;
EXPECT_CALL(adm, AddRef()).WillOnce(Return(0));
EXPECT_CALL(adm, Release()).WillOnce(Return(0));
+ EXPECT_CALL(adm, BuiltInAECIsAvailable()).WillOnce(Return(false));
+ EXPECT_CALL(adm, BuiltInAGCIsAvailable()).WillOnce(Return(false));
+ EXPECT_CALL(adm, BuiltInNSIsAvailable()).WillOnce(Return(false));
cricket::FakeWebRtcVoiceEngine voe;
EXPECT_FALSE(voe.IsInited());
{
@@ -93,6 +98,9 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
: call_(webrtc::Call::Config()), override_field_trials_(field_trials) {
EXPECT_CALL(adm_, AddRef()).WillOnce(Return(0));
EXPECT_CALL(adm_, Release()).WillOnce(Return(0));
+ EXPECT_CALL(adm_, BuiltInAECIsAvailable()).WillOnce(Return(false));
+ EXPECT_CALL(adm_, BuiltInAGCIsAvailable()).WillOnce(Return(false));
+ EXPECT_CALL(adm_, BuiltInNSIsAvailable()).WillOnce(Return(false));
engine_.reset(new cricket::WebRtcVoiceEngine(&adm_,
new FakeVoEWrapper(&voe_)));
send_parameters_.codecs.push_back(kPcmuCodec);
@@ -155,6 +163,16 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
return GetRecvStream(ssrc).GetConfig();
}
+ void SetSend(cricket::VoiceMediaChannel* channel, bool enable) {
+ ASSERT_TRUE(channel);
+ if (enable) {
+ EXPECT_CALL(adm_, RecordingIsInitialized()).WillOnce(Return(false));
+ EXPECT_CALL(adm_, Recording()).WillOnce(Return(false));
+ EXPECT_CALL(adm_, InitRecording()).WillOnce(Return(0));
+ }
+ channel->SetSend(enable);
+ }
+
void TestInsertDtmf(uint32_t ssrc, bool caller) {
EXPECT_TRUE(SetupChannel());
if (caller) {
@@ -166,7 +184,7 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
// Test we can only InsertDtmf when the other side supports telephone-event.
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
- channel_->SetSend(true);
+ SetSend(channel_, true);
EXPECT_FALSE(channel_->CanInsertDtmf());
EXPECT_FALSE(channel_->InsertDtmf(ssrc, 1, 111));
send_parameters_.codecs.push_back(kTelephoneEventCodec);
@@ -199,10 +217,10 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
// |max_bitrate| is a parameter to set to SetMaxSendBandwidth().
// |expected_result| is the expected result from SetMaxSendBandwidth().
// |expected_bitrate| is the expected audio bitrate afterward.
- void TestSendBandwidth(const cricket::AudioCodec& codec,
- int max_bitrate,
- bool expected_result,
- int expected_bitrate) {
+ void TestMaxSendBandwidth(const cricket::AudioCodec& codec,
+ int max_bitrate,
+ bool expected_result,
+ int expected_bitrate) {
cricket::AudioSendParameters parameters;
parameters.codecs.push_back(codec);
parameters.max_bandwidth_bps = max_bitrate;
@@ -214,6 +232,56 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
EXPECT_EQ(expected_bitrate, temp_codec.rate);
}
+ // Sets the per-stream maximum bitrate limit for the specified SSRC.
+ bool SetMaxBitrateForStream(int32_t ssrc, int bitrate) {
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(ssrc);
+ EXPECT_EQ(1UL, parameters.encodings.size());
+
+ parameters.encodings[0].max_bitrate_bps = bitrate;
+ return channel_->SetRtpSendParameters(ssrc, parameters);
+ }
+
+ bool SetGlobalMaxBitrate(const cricket::AudioCodec& codec, int bitrate) {
+ cricket::AudioSendParameters send_parameters;
+ send_parameters.codecs.push_back(codec);
+ send_parameters.max_bandwidth_bps = bitrate;
+ return channel_->SetSendParameters(send_parameters);
+ }
+
+ int GetCodecBitrate(int32_t ssrc) {
+ cricket::WebRtcVoiceMediaChannel* media_channel =
+ static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_);
+ int channel = media_channel->GetSendChannelId(ssrc);
+ EXPECT_NE(-1, channel);
+ webrtc::CodecInst codec;
+ EXPECT_FALSE(voe_.GetSendCodec(channel, codec));
+ return codec.rate;
+ }
+
+ void SetAndExpectMaxBitrate(const cricket::AudioCodec& codec,
+ int global_max,
+ int stream_max,
+ bool expected_result,
+ int expected_codec_bitrate) {
+ // Clear the bitrate limit from the previous test case.
+ EXPECT_TRUE(SetMaxBitrateForStream(kSsrc1, -1));
+
+ // Attempt to set the requested bitrate limits.
+ EXPECT_TRUE(SetGlobalMaxBitrate(codec, global_max));
+ EXPECT_EQ(expected_result, SetMaxBitrateForStream(kSsrc1, stream_max));
+
+ // Verify that reading back the parameters gives results
+ // consistent with the Set() result.
+ webrtc::RtpParameters resulting_parameters =
+ channel_->GetRtpSendParameters(kSsrc1);
+ EXPECT_EQ(1UL, resulting_parameters.encodings.size());
+ EXPECT_EQ(expected_result ? stream_max : -1,
+ resulting_parameters.encodings[0].max_bitrate_bps);
+
+ // Verify that the codec settings have the expected bitrate.
+ EXPECT_EQ(expected_codec_bitrate, GetCodecBitrate(kSsrc1));
+ }
+
void TestSetSendRtpHeaderExtensions(const std::string& ext) {
EXPECT_TRUE(SetupSendStream());
@@ -426,20 +494,44 @@ TEST_F(WebRtcVoiceEngineTestFake, CreateChannel) {
EXPECT_TRUE(SetupChannel());
}
+// Test that we can add a send stream and that it has the correct defaults.
+TEST_F(WebRtcVoiceEngineTestFake, CreateSendStream) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_TRUE(
+ channel_->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc1)));
+ const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrc1);
+ EXPECT_EQ(kSsrc1, config.rtp.ssrc);
+ EXPECT_EQ("", config.rtp.c_name);
+ EXPECT_EQ(0u, config.rtp.extensions.size());
+ EXPECT_EQ(static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_),
+ config.send_transport);
+}
+
+// Test that we can add a receive stream and that it has the correct defaults.
+TEST_F(WebRtcVoiceEngineTestFake, CreateRecvStream) {
+ EXPECT_TRUE(SetupChannel());
+ EXPECT_TRUE(
+ channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc1)));
+ const webrtc::AudioReceiveStream::Config& config =
+ GetRecvStreamConfig(kSsrc1);
+ EXPECT_EQ(kSsrc1, config.rtp.remote_ssrc);
+ EXPECT_EQ(0xFA17FA17, config.rtp.local_ssrc);
+ EXPECT_FALSE(config.rtp.transport_cc);
+ EXPECT_EQ(0u, config.rtp.extensions.size());
+ EXPECT_EQ(static_cast<cricket::WebRtcVoiceMediaChannel*>(channel_),
+ config.rtcp_send_transport);
+ EXPECT_EQ("", config.sync_group);
+}
+
// Tests that the list of supported codecs is created properly and ordered
-// correctly
-TEST_F(WebRtcVoiceEngineTestFake, CodecPreference) {
+// correctly (such that opus appears first).
+TEST_F(WebRtcVoiceEngineTestFake, CodecOrder) {
const std::vector<cricket::AudioCodec>& codecs = engine_->codecs();
ASSERT_FALSE(codecs.empty());
EXPECT_STRCASEEQ("opus", codecs[0].name.c_str());
EXPECT_EQ(48000, codecs[0].clockrate);
EXPECT_EQ(2, codecs[0].channels);
EXPECT_EQ(64000, codecs[0].bitrate);
- int pref = codecs[0].preference;
- for (size_t i = 1; i < codecs.size(); ++i) {
- EXPECT_GT(pref, codecs[i].preference);
- pref = codecs[i].preference;
- }
}
TEST_F(WebRtcVoiceEngineTestFake, OpusSupportsTransportCc) {
@@ -523,7 +615,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetRecvCodecsUnsupportedCodec) {
EXPECT_TRUE(SetupChannel());
cricket::AudioRecvParameters parameters;
parameters.codecs.push_back(kIsacCodec);
- parameters.codecs.push_back(cricket::AudioCodec(127, "XYZ", 32000, 0, 1, 0));
+ parameters.codecs.push_back(cricket::AudioCodec(127, "XYZ", 32000, 0, 1));
EXPECT_FALSE(channel_->SetRecvParameters(parameters));
}
@@ -692,13 +784,13 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendBandwidthAuto) {
// bitrate is <= 0.
// ISAC, default bitrate == 32000.
- TestSendBandwidth(kIsacCodec, 0, true, 32000);
+ TestMaxSendBandwidth(kIsacCodec, 0, true, 32000);
// PCMU, default bitrate == 64000.
- TestSendBandwidth(kPcmuCodec, -1, true, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, -1, true, 64000);
// opus, default bitrate == 64000.
- TestSendBandwidth(kOpusCodec, -1, true, 64000);
+ TestMaxSendBandwidth(kOpusCodec, -1, true, 64000);
}
TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCaller) {
@@ -707,12 +799,16 @@ TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCaller) {
// Test that the bitrate of a multi-rate codec is always the maximum.
// ISAC, default bitrate == 32000.
- TestSendBandwidth(kIsacCodec, 128000, true, 128000);
- TestSendBandwidth(kIsacCodec, 16000, true, 16000);
+ TestMaxSendBandwidth(kIsacCodec, 40000, true, 40000);
+ TestMaxSendBandwidth(kIsacCodec, 16000, true, 16000);
+ // Rates above the max (56000) should be capped.
+ TestMaxSendBandwidth(kIsacCodec, 100000, true, 56000);
// opus, default bitrate == 64000.
- TestSendBandwidth(kOpusCodec, 96000, true, 96000);
- TestSendBandwidth(kOpusCodec, 48000, true, 48000);
+ TestMaxSendBandwidth(kOpusCodec, 96000, true, 96000);
+ TestMaxSendBandwidth(kOpusCodec, 48000, true, 48000);
+ // Rates above the max (510000) should be capped.
+ TestMaxSendBandwidth(kOpusCodec, 600000, true, 510000);
}
TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthFixedRateAsCaller) {
@@ -722,13 +818,13 @@ TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthFixedRateAsCaller) {
// if it's bigger than the fixed rate.
// PCMU, fixed bitrate == 64000.
- TestSendBandwidth(kPcmuCodec, 0, true, 64000);
- TestSendBandwidth(kPcmuCodec, 1, false, 64000);
- TestSendBandwidth(kPcmuCodec, 128000, true, 64000);
- TestSendBandwidth(kPcmuCodec, 32000, false, 64000);
- TestSendBandwidth(kPcmuCodec, 64000, true, 64000);
- TestSendBandwidth(kPcmuCodec, 63999, false, 64000);
- TestSendBandwidth(kPcmuCodec, 64001, true, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 0, true, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 1, false, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 128000, true, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 32000, false, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 64000, true, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 63999, false, 64000);
+ TestMaxSendBandwidth(kPcmuCodec, 64001, true, 64000);
}
TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCallee) {
@@ -772,6 +868,175 @@ TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthCbr) {
EXPECT_EQ(64000, codec.rate);
}
+// Test that the per-stream bitrate limit and the global
+// bitrate limit both apply.
+TEST_F(WebRtcVoiceEngineTestFake, SetMaxBitratePerStream) {
+ EXPECT_TRUE(SetupSendStream());
+
+ // opus, default bitrate == 64000.
+ SetAndExpectMaxBitrate(kOpusCodec, 0, 0, true, 64000);
+ SetAndExpectMaxBitrate(kOpusCodec, 48000, 0, true, 48000);
+ SetAndExpectMaxBitrate(kOpusCodec, 48000, 64000, true, 48000);
+ SetAndExpectMaxBitrate(kOpusCodec, 64000, 48000, true, 48000);
+
+ // CBR codecs allow both maximums to exceed the bitrate.
+ SetAndExpectMaxBitrate(kPcmuCodec, 0, 0, true, 64000);
+ SetAndExpectMaxBitrate(kPcmuCodec, 64001, 0, true, 64000);
+ SetAndExpectMaxBitrate(kPcmuCodec, 0, 64001, true, 64000);
+ SetAndExpectMaxBitrate(kPcmuCodec, 64001, 64001, true, 64000);
+
+ // CBR codecs don't allow per stream maximums to be too low.
+ SetAndExpectMaxBitrate(kPcmuCodec, 0, 63999, false, 64000);
+ SetAndExpectMaxBitrate(kPcmuCodec, 64001, 63999, false, 64000);
+}
+
+// Test that an attempt to set RtpParameters for a stream that does not exist
+// fails.
+TEST_F(WebRtcVoiceEngineTestFake, CannotSetMaxBitrateForNonexistentStream) {
+ EXPECT_TRUE(SetupChannel());
+ webrtc::RtpParameters nonexistent_parameters =
+ channel_->GetRtpSendParameters(kSsrc1);
+ EXPECT_EQ(0, nonexistent_parameters.encodings.size());
+
+ nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters());
+ EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrc1, nonexistent_parameters));
+}
+
+TEST_F(WebRtcVoiceEngineTestFake,
+ CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) {
+ // This test verifies that setting RtpParameters succeeds only if
+ // the structure contains exactly one encoding.
+ // TODO(skvlad): Update this test when we start supporting setting parameters
+ // for each encoding individually.
+
+ EXPECT_TRUE(SetupSendStream());
+ // Setting RtpParameters with no encoding is expected to fail.
+ webrtc::RtpParameters parameters;
+ EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrc1, parameters));
+ // Setting RtpParameters with exactly one encoding should succeed.
+ parameters.encodings.push_back(webrtc::RtpEncodingParameters());
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrc1, parameters));
+ // Two or more encodings should result in failure.
+ parameters.encodings.push_back(webrtc::RtpEncodingParameters());
+ EXPECT_FALSE(channel_->SetRtpSendParameters(kSsrc1, parameters));
+}
+
+// Test that a stream will not be sending if its encoding is made
+// inactive through SetRtpSendParameters.
+TEST_F(WebRtcVoiceEngineTestFake, SetRtpParametersEncodingsActive) {
+ EXPECT_TRUE(SetupSendStream());
+ SetSend(channel_, true);
+ EXPECT_TRUE(GetSendStream(kSsrc1).IsSending());
+ // Get current parameters and change "active" to false.
+ webrtc::RtpParameters parameters = channel_->GetRtpSendParameters(kSsrc1);
+ ASSERT_EQ(1u, parameters.encodings.size());
+ ASSERT_TRUE(parameters.encodings[0].active);
+ parameters.encodings[0].active = false;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrc1, parameters));
+ EXPECT_FALSE(GetSendStream(kSsrc1).IsSending());
+
+ // Now change it back to active and verify we resume sending.
+ parameters.encodings[0].active = true;
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrc1, parameters));
+ EXPECT_TRUE(GetSendStream(kSsrc1).IsSending());
+}
+
+// Test that SetRtpSendParameters configures the correct encoding channel for
+// each SSRC.
+TEST_F(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) {
+ SetupForMultiSendStream();
+ // Create send streams.
+ for (uint32_t ssrc : kSsrcs4) {
+ EXPECT_TRUE(
+ channel_->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc)));
+ }
+ // Configure one stream to be limited by the stream config, another to be
+ // limited by the global max, and the third one with no per-stream limit
+ // (still subject to the global limit).
+ EXPECT_TRUE(SetGlobalMaxBitrate(kOpusCodec, 64000));
+ EXPECT_TRUE(SetMaxBitrateForStream(kSsrcs4[0], 48000));
+ EXPECT_TRUE(SetMaxBitrateForStream(kSsrcs4[1], 96000));
+ EXPECT_TRUE(SetMaxBitrateForStream(kSsrcs4[2], -1));
+
+ EXPECT_EQ(48000, GetCodecBitrate(kSsrcs4[0]));
+ EXPECT_EQ(64000, GetCodecBitrate(kSsrcs4[1]));
+ EXPECT_EQ(64000, GetCodecBitrate(kSsrcs4[2]));
+
+ // Remove the global cap; the streams should switch to their respective
+ // maximums (or remain unchanged if there was no other limit on them.)
+ EXPECT_TRUE(SetGlobalMaxBitrate(kOpusCodec, -1));
+ EXPECT_EQ(48000, GetCodecBitrate(kSsrcs4[0]));
+ EXPECT_EQ(96000, GetCodecBitrate(kSsrcs4[1]));
+ EXPECT_EQ(64000, GetCodecBitrate(kSsrcs4[2]));
+}
+
+// Test that GetRtpSendParameters returns the currently configured codecs.
+TEST_F(WebRtcVoiceEngineTestFake, GetRtpSendParametersCodecs) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ webrtc::RtpParameters rtp_parameters = channel_->GetRtpSendParameters(kSsrc1);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(kIsacCodec.ToCodecParameters(), rtp_parameters.codecs[0]);
+ EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]);
+}
+
+// Test that if we set/get parameters multiple times, we get the same results.
+TEST_F(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) {
+ EXPECT_TRUE(SetupSendStream());
+ cricket::AudioSendParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+
+ webrtc::RtpParameters initial_params = channel_->GetRtpSendParameters(kSsrc1);
+
+ // We should be able to set the params we just got.
+ EXPECT_TRUE(channel_->SetRtpSendParameters(kSsrc1, initial_params));
+
+ // ... And this shouldn't change the params returned by GetRtpSendParameters.
+ webrtc::RtpParameters new_params = channel_->GetRtpSendParameters(kSsrc1);
+ EXPECT_EQ(initial_params, channel_->GetRtpSendParameters(kSsrc1));
+}
+
+// Test that GetRtpReceiveParameters returns the currently configured codecs.
+TEST_F(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersCodecs) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ webrtc::RtpParameters rtp_parameters =
+ channel_->GetRtpReceiveParameters(kSsrc1);
+ ASSERT_EQ(2u, rtp_parameters.codecs.size());
+ EXPECT_EQ(kIsacCodec.ToCodecParameters(), rtp_parameters.codecs[0]);
+ EXPECT_EQ(kPcmuCodec.ToCodecParameters(), rtp_parameters.codecs[1]);
+}
+
+// Test that if we set/get parameters multiple times, we get the same results.
+TEST_F(WebRtcVoiceEngineTestFake, SetAndGetRtpReceiveParameters) {
+ EXPECT_TRUE(SetupRecvStream());
+ cricket::AudioRecvParameters parameters;
+ parameters.codecs.push_back(kIsacCodec);
+ parameters.codecs.push_back(kPcmuCodec);
+ EXPECT_TRUE(channel_->SetRecvParameters(parameters));
+
+ webrtc::RtpParameters initial_params =
+ channel_->GetRtpReceiveParameters(kSsrc1);
+
+ // We should be able to set the params we just got.
+ EXPECT_TRUE(channel_->SetRtpReceiveParameters(kSsrc1, initial_params));
+
+ // ... And this shouldn't change the params returned by
+ // GetRtpReceiveParameters.
+ webrtc::RtpParameters new_params = channel_->GetRtpReceiveParameters(kSsrc1);
+ EXPECT_EQ(initial_params, channel_->GetRtpReceiveParameters(kSsrc1));
+}
+
// Test that we apply codecs properly.
TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecs) {
EXPECT_TRUE(SetupSendStream());
@@ -1115,7 +1380,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) {
cricket::kParamValueEmpty));
EXPECT_FALSE(voe_.GetNACK(channel_num));
EXPECT_TRUE(channel_->SetSendParameters(parameters));
- EXPECT_FALSE(voe_.GetNACK(channel_num));
+ // NACK should be enabled even with no send stream.
+ EXPECT_TRUE(voe_.GetNACK(channel_num));
EXPECT_TRUE(channel_->AddSendStream(
cricket::StreamParams::CreateLegacy(kSsrc1)));
@@ -2018,9 +2284,9 @@ TEST_F(WebRtcVoiceEngineTestFake, RecvAbsoluteSendTimeHeaderExtensions) {
TEST_F(WebRtcVoiceEngineTestFake, Send) {
EXPECT_TRUE(SetupSendStream());
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
- channel_->SetSend(true);
+ SetSend(channel_, true);
EXPECT_TRUE(GetSendStream(kSsrc1).IsSending());
- channel_->SetSend(false);
+ SetSend(channel_, false);
EXPECT_FALSE(GetSendStream(kSsrc1).IsSending());
}
@@ -2030,7 +2296,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SendStateWithAndWithoutSource) {
EXPECT_TRUE(SetupSendStream());
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
EXPECT_TRUE(channel_->SetAudioSend(kSsrc1, true, nullptr, nullptr));
- channel_->SetSend(true);
+ SetSend(channel_, true);
EXPECT_FALSE(GetSendStream(kSsrc1).IsSending());
EXPECT_TRUE(channel_->SetAudioSend(kSsrc1, true, nullptr, &fake_source_));
EXPECT_TRUE(GetSendStream(kSsrc1).IsSending());
@@ -2044,7 +2310,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SendStateWhenStreamsAreRecreated) {
EXPECT_FALSE(GetSendStream(kSsrc1).IsSending());
// Turn on sending.
- channel_->SetSend(true);
+ SetSend(channel_, true);
EXPECT_TRUE(GetSendStream(kSsrc1).IsSending());
// Changing RTP header extensions will recreate the AudioSendStream.
@@ -2054,7 +2320,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SendStateWhenStreamsAreRecreated) {
EXPECT_TRUE(GetSendStream(kSsrc1).IsSending());
// Turn off sending.
- channel_->SetSend(false);
+ SetSend(channel_, false);
EXPECT_FALSE(GetSendStream(kSsrc1).IsSending());
// Changing RTP header extensions will recreate the AudioSendStream.
@@ -2079,7 +2345,7 @@ TEST_F(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) {
SetupForMultiSendStream();
// Set the global state for sending.
- channel_->SetSend(true);
+ SetSend(channel_, true);
for (uint32_t ssrc : kSsrcs4) {
EXPECT_TRUE(channel_->AddSendStream(
@@ -2150,14 +2416,14 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendWithMultipleSendStreams) {
}
// Set the global state for starting sending.
- channel_->SetSend(true);
+ SetSend(channel_, true);
for (uint32_t ssrc : kSsrcs4) {
// Verify that we are in a sending state for all the send streams.
EXPECT_TRUE(GetSendStream(ssrc).IsSending());
}
// Set the global state for stopping sending.
- channel_->SetSend(false);
+ SetSend(channel_, false);
for (uint32_t ssrc : kSsrcs4) {
// Verify that we are in a stop state for all the send streams.
EXPECT_FALSE(GetSendStream(ssrc).IsSending());
@@ -2236,7 +2502,7 @@ TEST_F(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) {
EXPECT_TRUE(
channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrc2)));
int channel_num2 = voe_.GetLastChannel();
- channel_->SetSend(true);
+ SetSend(channel_, true);
EXPECT_TRUE(GetSendStream(kSsrc1).IsSending());
// Make sure only the new stream is played out.
@@ -2252,7 +2518,7 @@ TEST_F(WebRtcVoiceEngineTestFake, PlayoutWithMultipleStreams) {
EXPECT_TRUE(voe_.GetPlayout(channel_num3));
// Stop sending.
- channel_->SetSend(false);
+ SetSend(channel_, false);
EXPECT_FALSE(GetSendStream(kSsrc1).IsSending());
// Stop playout.
@@ -2284,17 +2550,19 @@ TEST_F(WebRtcVoiceEngineTestFake, CodianSend) {
EXPECT_EQ(0, agc_config.targetLeveldBOv);
send_parameters_.options = options_adjust_agc;
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
- channel_->SetSend(true);
+ SetSend(channel_, true);
EXPECT_TRUE(GetSendStream(kSsrc1).IsSending());
EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
EXPECT_EQ(agc_config.targetLeveldBOv, 10); // level was attenuated
- channel_->SetSend(false);
+ SetSend(channel_, false);
EXPECT_FALSE(GetSendStream(kSsrc1).IsSending());
EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
}
TEST_F(WebRtcVoiceEngineTestFake, TxAgcConfigViaOptions) {
EXPECT_TRUE(SetupSendStream());
+ EXPECT_CALL(adm_,
+ BuiltInAGCIsAvailable()).Times(2).WillRepeatedly(Return(false));
webrtc::AgcConfig agc_config;
EXPECT_EQ(0, voe_.GetAgcConfig(agc_config));
EXPECT_EQ(0, agc_config.targetLeveldBOv);
@@ -2319,16 +2587,12 @@ TEST_F(WebRtcVoiceEngineTestFake, TxAgcConfigViaOptions) {
TEST_F(WebRtcVoiceEngineTestFake, SampleRatesViaOptions) {
EXPECT_TRUE(SetupSendStream());
+ EXPECT_CALL(adm_, SetRecordingSampleRate(48000)).WillOnce(Return(0));
+ EXPECT_CALL(adm_, SetPlayoutSampleRate(44100)).WillOnce(Return(0));
send_parameters_.options.recording_sample_rate =
rtc::Optional<uint32_t>(48000);
send_parameters_.options.playout_sample_rate = rtc::Optional<uint32_t>(44100);
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
-
- unsigned int recording_sample_rate, playout_sample_rate;
- EXPECT_EQ(0, voe_.RecordingSampleRate(&recording_sample_rate));
- EXPECT_EQ(0, voe_.PlayoutSampleRate(&playout_sample_rate));
- EXPECT_EQ(48000u, recording_sample_rate);
- EXPECT_EQ(44100u, playout_sample_rate);
}
// Test that we can set the outgoing SSRC properly.
@@ -2365,7 +2629,7 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStats) {
// Start sending - this affects some reported stats.
{
cricket::VoiceMediaInfo info;
- channel_->SetSend(true);
+ SetSend(channel_, true);
EXPECT_EQ(true, channel_->GetStats(&info));
VerifyVoiceSenderInfo(info.senders[0], true);
}
@@ -2420,61 +2684,77 @@ TEST_F(WebRtcVoiceEngineTestFake, Recv) {
EXPECT_TRUE(SetupChannel());
EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
- int channel_num = voe_.GetLastChannel();
- EXPECT_TRUE(voe_.CheckPacket(channel_num, kPcmuFrame, sizeof(kPcmuFrame)));
+
+ EXPECT_TRUE(GetRecvStream(1).VerifyLastPacket(kPcmuFrame,
+ sizeof(kPcmuFrame)));
}
// Test that we can properly receive packets on multiple streams.
TEST_F(WebRtcVoiceEngineTestFake, RecvWithMultipleStreams) {
EXPECT_TRUE(SetupChannel());
- EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
- int channel_num1 = voe_.GetLastChannel();
- EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
- int channel_num2 = voe_.GetLastChannel();
- EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(3)));
- int channel_num3 = voe_.GetLastChannel();
+ const uint32_t ssrc1 = 1;
+ const uint32_t ssrc2 = 2;
+ const uint32_t ssrc3 = 3;
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(ssrc1)));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(ssrc2)));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(ssrc3)));
// Create packets with the right SSRCs.
- char packets[4][sizeof(kPcmuFrame)];
+ unsigned char packets[4][sizeof(kPcmuFrame)];
for (size_t i = 0; i < arraysize(packets); ++i) {
memcpy(packets[i], kPcmuFrame, sizeof(kPcmuFrame));
rtc::SetBE32(packets[i] + 8, static_cast<uint32_t>(i));
}
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num1));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num2));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num3));
+
+ const cricket::FakeAudioReceiveStream& s1 = GetRecvStream(ssrc1);
+ const cricket::FakeAudioReceiveStream& s2 = GetRecvStream(ssrc2);
+ const cricket::FakeAudioReceiveStream& s3 = GetRecvStream(ssrc3);
+
+ EXPECT_EQ(s1.received_packets(), 0);
+ EXPECT_EQ(s2.received_packets(), 0);
+ EXPECT_EQ(s3.received_packets(), 0);
DeliverPacket(packets[0], sizeof(packets[0]));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num1));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num2));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num3));
+ EXPECT_EQ(s1.received_packets(), 0);
+ EXPECT_EQ(s2.received_packets(), 0);
+ EXPECT_EQ(s3.received_packets(), 0);
DeliverPacket(packets[1], sizeof(packets[1]));
- EXPECT_TRUE(voe_.CheckPacket(channel_num1, packets[1], sizeof(packets[1])));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num2));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num3));
+ EXPECT_EQ(s1.received_packets(), 1);
+ EXPECT_TRUE(s1.VerifyLastPacket(packets[1], sizeof(packets[1])));
+ EXPECT_EQ(s2.received_packets(), 0);
+ EXPECT_EQ(s3.received_packets(), 0);
DeliverPacket(packets[2], sizeof(packets[2]));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num1));
- EXPECT_TRUE(voe_.CheckPacket(channel_num2, packets[2], sizeof(packets[2])));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num3));
+ EXPECT_EQ(s1.received_packets(), 1);
+ EXPECT_EQ(s2.received_packets(), 1);
+ EXPECT_TRUE(s2.VerifyLastPacket(packets[2], sizeof(packets[2])));
+ EXPECT_EQ(s3.received_packets(), 0);
DeliverPacket(packets[3], sizeof(packets[3]));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num1));
- EXPECT_TRUE(voe_.CheckNoPacket(channel_num2));
- EXPECT_TRUE(voe_.CheckPacket(channel_num3, packets[3], sizeof(packets[3])));
+ EXPECT_EQ(s1.received_packets(), 1);
+ EXPECT_EQ(s2.received_packets(), 1);
+ EXPECT_EQ(s3.received_packets(), 1);
+ EXPECT_TRUE(s3.VerifyLastPacket(packets[3], sizeof(packets[3])));
- EXPECT_TRUE(channel_->RemoveRecvStream(3));
- EXPECT_TRUE(channel_->RemoveRecvStream(2));
- EXPECT_TRUE(channel_->RemoveRecvStream(1));
+ EXPECT_TRUE(channel_->RemoveRecvStream(ssrc3));
+ EXPECT_TRUE(channel_->RemoveRecvStream(ssrc2));
+ EXPECT_TRUE(channel_->RemoveRecvStream(ssrc1));
}
// Test that receiving on an unsignalled stream works (default channel will be
// created).
TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignalled) {
EXPECT_TRUE(SetupChannel());
+ EXPECT_EQ(0, call_.GetAudioReceiveStreams().size());
+
DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame));
- int channel_num = voe_.GetLastChannel();
- EXPECT_TRUE(voe_.CheckPacket(channel_num, kPcmuFrame, sizeof(kPcmuFrame)));
+
+ EXPECT_EQ(1, call_.GetAudioReceiveStreams().size());
+ EXPECT_TRUE(GetRecvStream(1).VerifyLastPacket(kPcmuFrame,
+ sizeof(kPcmuFrame)));
}
// Test that receiving on an unsignalled stream works (default channel will be
@@ -2482,48 +2762,61 @@ TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignalled) {
// regardless of their SSRCs.
TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignalledWithSsrcSwitch) {
EXPECT_TRUE(SetupChannel());
- char packet[sizeof(kPcmuFrame)];
+ unsigned char packet[sizeof(kPcmuFrame)];
memcpy(packet, kPcmuFrame, sizeof(kPcmuFrame));
- // Note that the first unknown SSRC cannot be 0, because we only support
- // creating receive streams for SSRC!=0.
- DeliverPacket(packet, sizeof(packet));
- int channel_num = voe_.GetLastChannel();
- EXPECT_TRUE(voe_.CheckPacket(channel_num, packet, sizeof(packet)));
- // Once we have the default channel, SSRC==0 will be ok.
- for (uint32_t ssrc = 0; ssrc < 10; ++ssrc) {
+ // Note that ssrc = 0 is not supported.
+ uint32_t ssrc = 1;
+ for (; ssrc < 10; ++ssrc) {
rtc::SetBE32(&packet[8], ssrc);
DeliverPacket(packet, sizeof(packet));
- EXPECT_TRUE(voe_.CheckPacket(channel_num, packet, sizeof(packet)));
+
+ // Verify we only have one default stream.
+ EXPECT_EQ(1, call_.GetAudioReceiveStreams().size());
+ EXPECT_EQ(1, GetRecvStream(ssrc).received_packets());
+ EXPECT_TRUE(GetRecvStream(ssrc).VerifyLastPacket(packet, sizeof(packet)));
}
+
+ // Sending the same ssrc again should not create a new stream.
+ --ssrc;
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_EQ(1, call_.GetAudioReceiveStreams().size());
+ EXPECT_EQ(2, GetRecvStream(ssrc).received_packets());
+ EXPECT_TRUE(GetRecvStream(ssrc).VerifyLastPacket(packet, sizeof(packet)));
}
// Test that a default channel is created even after a signalled stream has been
// added, and that this stream will get any packets for unknown SSRCs.
TEST_F(WebRtcVoiceEngineTestFake, RecvUnsignalledAfterSignalled) {
EXPECT_TRUE(SetupChannel());
- char packet[sizeof(kPcmuFrame)];
+ unsigned char packet[sizeof(kPcmuFrame)];
memcpy(packet, kPcmuFrame, sizeof(kPcmuFrame));
// Add a known stream, send packet and verify we got it.
- EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
- int signalled_channel_num = voe_.GetLastChannel();
+ const uint32_t signaled_ssrc = 1;
+ rtc::SetBE32(&packet[8], signaled_ssrc);
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(signaled_ssrc)));
DeliverPacket(packet, sizeof(packet));
- EXPECT_TRUE(voe_.CheckPacket(signalled_channel_num, packet, sizeof(packet)));
+ EXPECT_TRUE(GetRecvStream(signaled_ssrc).VerifyLastPacket(
+ packet, sizeof(packet)));
// Note that the first unknown SSRC cannot be 0, because we only support
// creating receive streams for SSRC!=0.
- rtc::SetBE32(&packet[8], 7011);
+ const uint32_t unsignaled_ssrc = 7011;
+ rtc::SetBE32(&packet[8], unsignaled_ssrc);
DeliverPacket(packet, sizeof(packet));
- int channel_num = voe_.GetLastChannel();
- EXPECT_NE(channel_num, signalled_channel_num);
- EXPECT_TRUE(voe_.CheckPacket(channel_num, packet, sizeof(packet)));
- // Once we have the default channel, SSRC==0 will be ok.
- for (uint32_t ssrc = 0; ssrc < 20; ssrc += 2) {
- rtc::SetBE32(&packet[8], ssrc);
- DeliverPacket(packet, sizeof(packet));
- EXPECT_TRUE(voe_.CheckPacket(channel_num, packet, sizeof(packet)));
- }
+ EXPECT_TRUE(GetRecvStream(unsignaled_ssrc).VerifyLastPacket(
+ packet, sizeof(packet)));
+ EXPECT_EQ(2, call_.GetAudioReceiveStreams().size());
+
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_EQ(2, GetRecvStream(unsignaled_ssrc).received_packets());
+
+ rtc::SetBE32(&packet[8], signaled_ssrc);
+ DeliverPacket(packet, sizeof(packet));
+ EXPECT_EQ(2, GetRecvStream(signaled_ssrc).received_packets());
+ EXPECT_EQ(2, call_.GetAudioReceiveStreams().size());
}
// Test that we properly handle failures to add a receive stream.
@@ -2621,7 +2914,7 @@ TEST_F(WebRtcVoiceEngineTestFake, InsertDtmfOnSendStreamAsCallee) {
TEST_F(WebRtcVoiceEngineTestFake, TestSetPlayoutError) {
EXPECT_TRUE(SetupSendStream());
EXPECT_TRUE(channel_->SetSendParameters(send_parameters_));
- channel_->SetSend(true);
+ SetSend(channel_, true);
EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2)));
EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(3)));
EXPECT_TRUE(channel_->SetPlayout(true));
@@ -2632,7 +2925,12 @@ TEST_F(WebRtcVoiceEngineTestFake, TestSetPlayoutError) {
TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
EXPECT_TRUE(SetupSendStream());
-
+ EXPECT_CALL(adm_,
+ BuiltInAECIsAvailable()).Times(9).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ BuiltInAGCIsAvailable()).Times(4).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ BuiltInNSIsAvailable()).Times(2).WillRepeatedly(Return(false));
bool ec_enabled;
webrtc::EcModes ec_mode;
webrtc::AecmModes aecm_mode;
@@ -2830,6 +3128,13 @@ TEST_F(WebRtcVoiceEngineTestFake, InitDoesNotOverwriteDefaultAgcConfig) {
TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
EXPECT_TRUE(SetupSendStream());
+ EXPECT_CALL(adm_,
+ BuiltInAECIsAvailable()).Times(9).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ BuiltInAGCIsAvailable()).Times(9).WillRepeatedly(Return(false));
+ EXPECT_CALL(adm_,
+ BuiltInNSIsAvailable()).Times(9).WillRepeatedly(Return(false));
+
std::unique_ptr<cricket::WebRtcVoiceMediaChannel> channel1(
static_cast<cricket::WebRtcVoiceMediaChannel*>(engine_->CreateChannel(
&call_, cricket::MediaConfig(), cricket::AudioOptions())));
@@ -2890,7 +3195,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
EXPECT_TRUE(agc_enabled);
EXPECT_TRUE(ns_enabled);
- channel1->SetSend(true);
+ SetSend(channel1.get(), true);
voe_.GetEcStatus(ec_enabled, ec_mode);
voe_.GetAgcStatus(agc_enabled, agc_mode);
voe_.GetNsStatus(ns_enabled, ns_mode);
@@ -2898,7 +3203,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
EXPECT_TRUE(agc_enabled);
EXPECT_FALSE(ns_enabled);
- channel2->SetSend(true);
+ SetSend(channel2.get(), true);
voe_.GetEcStatus(ec_enabled, ec_mode);
voe_.GetAgcStatus(agc_enabled, agc_mode);
voe_.GetNsStatus(ns_enabled, ns_mode);
@@ -3262,55 +3567,55 @@ TEST(WebRtcVoiceEngineTest, StartupShutdownWithExternalADM) {
TEST(WebRtcVoiceEngineTest, HasCorrectCodecs) {
// Check codecs by name.
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "OPUS", 48000, 0, 2, 0), nullptr));
+ cricket::AudioCodec(96, "OPUS", 48000, 0, 2), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "ISAC", 16000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "ISAC", 16000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "ISAC", 32000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "ISAC", 32000, 0, 1), nullptr));
// Check that name matching is case-insensitive.
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "ILBC", 8000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "ILBC", 8000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "iLBC", 8000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "iLBC", 8000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "PCMU", 8000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "PCMU", 8000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "PCMA", 8000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "PCMA", 8000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "G722", 8000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "G722", 8000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "red", 8000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "red", 8000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "CN", 32000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "CN", 32000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "CN", 16000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "CN", 16000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "CN", 8000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "CN", 8000, 0, 1), nullptr));
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(96, "telephone-event", 8000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(96, "telephone-event", 8000, 0, 1), nullptr));
// Check codecs with an id by id.
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(0, "", 8000, 0, 1, 0), nullptr)); // PCMU
+ cricket::AudioCodec(0, "", 8000, 0, 1), nullptr)); // PCMU
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(8, "", 8000, 0, 1, 0), nullptr)); // PCMA
+ cricket::AudioCodec(8, "", 8000, 0, 1), nullptr)); // PCMA
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(9, "", 8000, 0, 1, 0), nullptr)); // G722
+ cricket::AudioCodec(9, "", 8000, 0, 1), nullptr)); // G722
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(13, "", 8000, 0, 1, 0), nullptr)); // CN
+ cricket::AudioCodec(13, "", 8000, 0, 1), nullptr)); // CN
// Check sample/bitrate matching.
EXPECT_TRUE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(0, "PCMU", 8000, 64000, 1, 0), nullptr));
+ cricket::AudioCodec(0, "PCMU", 8000, 64000, 1), nullptr));
// Check that bad codecs fail.
EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(99, "ABCD", 0, 0, 1, 0), nullptr));
+ cricket::AudioCodec(99, "ABCD", 0, 0, 1), nullptr));
EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(88, "", 0, 0, 1, 0), nullptr));
+ cricket::AudioCodec(88, "", 0, 0, 1), nullptr));
EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(0, "", 0, 0, 2, 0), nullptr));
+ cricket::AudioCodec(0, "", 0, 0, 2), nullptr));
EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(0, "", 5000, 0, 1, 0), nullptr));
+ cricket::AudioCodec(0, "", 5000, 0, 1), nullptr));
EXPECT_FALSE(cricket::WebRtcVoiceEngine::ToCodecInst(
- cricket::AudioCodec(0, "", 0, 5000, 1, 0), nullptr));
+ cricket::AudioCodec(0, "", 0, 5000, 1), nullptr));
// Verify the payload id of common audio codecs, including CN, ISAC, and G722.
cricket::WebRtcVoiceEngine engine(nullptr);
@@ -3334,8 +3639,6 @@ TEST(WebRtcVoiceEngineTest, HasCorrectCodecs) {
EXPECT_EQ(111, it->id);
ASSERT_TRUE(it->params.find("minptime") != it->params.end());
EXPECT_EQ("10", it->params.find("minptime")->second);
- ASSERT_TRUE(it->params.find("maxptime") != it->params.end());
- EXPECT_EQ("60", it->params.find("maxptime")->second);
ASSERT_TRUE(it->params.find("useinbandfec") != it->params.end());
EXPECT_EQ("1", it->params.find("useinbandfec")->second);
}
diff --git a/chromium/third_party/webrtc/media/media.gyp b/chromium/third_party/webrtc/media/media.gyp
index 621af832e87..deb730a85a8 100644
--- a/chromium/third_party/webrtc/media/media.gyp
+++ b/chromium/third_party/webrtc/media/media.gyp
@@ -15,7 +15,6 @@
'dependencies': [
'<(webrtc_root)/base/base.gyp:rtc_base_approved',
'<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/modules/modules.gyp:video_render_module',
'<(webrtc_root)/webrtc.gyp:webrtc',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:metrics_default',
@@ -30,7 +29,6 @@
],
},
'sources': [
- 'base/audioframe.h',
'base/audiosource.h',
'base/codec.cc',
'base/codec.h',
@@ -109,7 +107,6 @@
'-Wextra',
],
'cflags_cc!': [
- '-Wnon-virtual-dtor',
'-Woverloaded-virtual',
],
'msvs_disabled_warnings': [
@@ -134,7 +131,6 @@
['build_with_chromium==1', {
'dependencies': [
'<(webrtc_root)/modules/modules.gyp:video_capture',
- '<(webrtc_root)/modules/modules.gyp:video_render',
],
}, {
'defines': [
@@ -149,7 +145,6 @@
},
'dependencies': [
'<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
- '<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
],
}],
['OS=="linux" and use_gtk==1', {
@@ -252,11 +247,6 @@
'engine/fakewebrtcvideoengine.h',
'engine/fakewebrtcvoiceengine.h',
],
- # TODO(kjellander): Make the code compile without disabling these flags.
- # See https://bugs.chromium.org/p/webrtc/issues/detail?id=3307
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
}, # target rtc_unittest_main
{
'target_name': 'rtc_media_unittests',
@@ -295,7 +285,6 @@
'-Wno-sign-compare',
],
'cflags_cc!': [
- '-Wnon-virtual-dtor',
'-Woverloaded-virtual',
],
'msvs_disabled_warnings': [
@@ -304,15 +293,6 @@
],
'conditions': [
['OS=="win"', {
- 'conditions': [
- ['use_openssl==0', {
- 'dependencies': [
- '<(DEPTH)/net/third_party/nss/ssl.gyp:libssl',
- '<(DEPTH)/third_party/nss/nss.gyp:nspr',
- '<(DEPTH)/third_party/nss/nss.gyp:nss',
- ],
- }],
- ],
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
diff --git a/chromium/third_party/webrtc/media/sctp/sctpdataengine.cc b/chromium/third_party/webrtc/media/sctp/sctpdataengine.cc
index ba3b8f24084..958cd9a0f63 100644
--- a/chromium/third_party/webrtc/media/sctp/sctpdataengine.cc
+++ b/chromium/third_party/webrtc/media/sctp/sctpdataengine.cc
@@ -20,6 +20,7 @@
#include "usrsctplib/usrsctp.h"
#include "webrtc/base/arraysize.h"
#include "webrtc/base/copyonwritebuffer.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/safe_conversions.h"
@@ -27,8 +28,29 @@
#include "webrtc/media/base/mediaconstants.h"
#include "webrtc/media/base/streamparams.h"
+namespace cricket {
+// The biggest SCTP packet. Starting from a 'safe' wire MTU value of 1280,
+// take off 80 bytes for DTLS/TURN/TCP/IP overhead.
+static const size_t kSctpMtu = 1200;
+
+// The size of the SCTP association send buffer. 256kB, the usrsctp default.
+static const int kSendBufferSize = 262144;
+
+struct SctpInboundPacket {
+ rtc::CopyOnWriteBuffer buffer;
+ ReceiveDataParams params;
+ // The |flags| parameter is used by SCTP to distinguish notification packets
+ // from other types of packets.
+ int flags;
+};
+
namespace {
-typedef cricket::SctpDataMediaChannel::StreamSet StreamSet;
+// Set the initial value of the static SCTP Data Engines reference count.
+int g_usrsctp_usage_count = 0;
+rtc::GlobalLockPod g_usrsctp_lock_;
+
+typedef SctpDataMediaChannel::StreamSet StreamSet;
+
// Returns a comma-separated, human-readable list of the stream IDs in 's'
std::string ListStreams(const StreamSet& s) {
std::stringstream result;
@@ -85,78 +107,62 @@ std::string ListArray(const uint16_t* array, int num_elems) {
}
return result.str();
}
-} // namespace
-namespace cricket {
typedef rtc::ScopedMessageData<SctpInboundPacket> InboundPacketMessage;
typedef rtc::ScopedMessageData<rtc::CopyOnWriteBuffer> OutboundPacketMessage;
-// The biggest SCTP packet. Starting from a 'safe' wire MTU value of 1280,
-// take off 80 bytes for DTLS/TURN/TCP/IP overhead.
-static const size_t kSctpMtu = 1200;
-
-// The size of the SCTP association send buffer. 256kB, the usrsctp default.
-static const int kSendBufferSize = 262144;
enum {
MSG_SCTPINBOUNDPACKET = 1, // MessageData is SctpInboundPacket
MSG_SCTPOUTBOUNDPACKET = 2, // MessageData is rtc:Buffer
};
-struct SctpInboundPacket {
- rtc::CopyOnWriteBuffer buffer;
- ReceiveDataParams params;
- // The |flags| parameter is used by SCTP to distinguish notification packets
- // from other types of packets.
- int flags;
-};
-
// Helper for logging SCTP messages.
-static void debug_sctp_printf(const char *format, ...) {
+void DebugSctpPrintf(const char* format, ...) {
+#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
char s[255];
va_list ap;
va_start(ap, format);
vsnprintf(s, sizeof(s), format, ap);
LOG(LS_INFO) << "SCTP: " << s;
va_end(ap);
+#endif
}
// Get the PPID to use for the terminating fragment of this type.
-static SctpDataMediaChannel::PayloadProtocolIdentifier GetPpid(
- cricket::DataMessageType type) {
+SctpDataMediaChannel::PayloadProtocolIdentifier GetPpid(DataMessageType type) {
switch (type) {
default:
- case cricket::DMT_NONE:
+ case DMT_NONE:
return SctpDataMediaChannel::PPID_NONE;
- case cricket::DMT_CONTROL:
+ case DMT_CONTROL:
return SctpDataMediaChannel::PPID_CONTROL;
- case cricket::DMT_BINARY:
+ case DMT_BINARY:
return SctpDataMediaChannel::PPID_BINARY_LAST;
- case cricket::DMT_TEXT:
+ case DMT_TEXT:
return SctpDataMediaChannel::PPID_TEXT_LAST;
};
}
-static bool GetDataMediaType(
- SctpDataMediaChannel::PayloadProtocolIdentifier ppid,
- cricket::DataMessageType *dest) {
+bool GetDataMediaType(SctpDataMediaChannel::PayloadProtocolIdentifier ppid,
+ DataMessageType* dest) {
ASSERT(dest != NULL);
switch (ppid) {
case SctpDataMediaChannel::PPID_BINARY_PARTIAL:
case SctpDataMediaChannel::PPID_BINARY_LAST:
- *dest = cricket::DMT_BINARY;
+ *dest = DMT_BINARY;
return true;
case SctpDataMediaChannel::PPID_TEXT_PARTIAL:
case SctpDataMediaChannel::PPID_TEXT_LAST:
- *dest = cricket::DMT_TEXT;
+ *dest = DMT_TEXT;
return true;
case SctpDataMediaChannel::PPID_CONTROL:
- *dest = cricket::DMT_CONTROL;
+ *dest = DMT_CONTROL;
return true;
case SctpDataMediaChannel::PPID_NONE:
- *dest = cricket::DMT_NONE;
+ *dest = DMT_NONE;
return true;
default:
@@ -165,7 +171,7 @@ static bool GetDataMediaType(
}
// Log the packet in text2pcap format, if log level is at LS_VERBOSE.
-static void VerboseLogPacket(const void *data, size_t length, int direction) {
+void VerboseLogPacket(const void* data, size_t length, int direction) {
if (LOG_CHECK_LEVEL(LS_VERBOSE) && length > 0) {
char *dump_buf;
// Some downstream project uses an older version of usrsctp that expects
@@ -181,8 +187,11 @@ static void VerboseLogPacket(const void *data, size_t length, int direction) {
// This is the callback usrsctp uses when there's data to send on the network
// that has been wrapped appropriatly for the SCTP protocol.
-static int OnSctpOutboundPacket(void* addr, void* data, size_t length,
- uint8_t tos, uint8_t set_df) {
+int OnSctpOutboundPacket(void* addr,
+ void* data,
+ size_t length,
+ uint8_t tos,
+ uint8_t set_df) {
SctpDataMediaChannel* channel = static_cast<SctpDataMediaChannel*>(addr);
LOG(LS_VERBOSE) << "global OnSctpOutboundPacket():"
<< "addr: " << addr << "; length: " << length
@@ -201,10 +210,13 @@ static int OnSctpOutboundPacket(void* addr, void* data, size_t length,
// a packet has been interpreted and parsed by usrsctp and found to contain
// payload data. It is called by a usrsctp thread. It is assumed this function
// will free the memory used by 'data'.
-static int OnSctpInboundPacket(struct socket* sock, union sctp_sockstore addr,
- void* data, size_t length,
- struct sctp_rcvinfo rcv, int flags,
- void* ulp_info) {
+int OnSctpInboundPacket(struct socket* sock,
+ union sctp_sockstore addr,
+ void* data,
+ size_t length,
+ struct sctp_rcvinfo rcv,
+ int flags,
+ void* ulp_info) {
SctpDataMediaChannel* channel = static_cast<SctpDataMediaChannel*>(ulp_info);
// Post data to the channel's receiver thread (copying it).
// TODO(ldixon): Unclear if copy is needed as this method is responsible for
@@ -212,7 +224,7 @@ static int OnSctpInboundPacket(struct socket* sock, union sctp_sockstore addr,
const SctpDataMediaChannel::PayloadProtocolIdentifier ppid =
static_cast<SctpDataMediaChannel::PayloadProtocolIdentifier>(
rtc::HostToNetwork32(rcv.rcv_ppid));
- cricket::DataMessageType type = cricket::DMT_NONE;
+ DataMessageType type = DMT_NONE;
if (!GetDataMediaType(ppid, &type) && !(flags & MSG_NOTIFICATION)) {
// It's neither a notification nor a recognized data packet. Drop it.
LOG(LS_ERROR) << "Received an unknown PPID " << ppid
@@ -233,78 +245,94 @@ static int OnSctpInboundPacket(struct socket* sock, union sctp_sockstore addr,
return 1;
}
-// Set the initial value of the static SCTP Data Engines reference count.
-int SctpDataEngine::usrsctp_engines_count = 0;
-
-SctpDataEngine::SctpDataEngine() {
- if (usrsctp_engines_count == 0) {
- // First argument is udp_encapsulation_port, which is not releveant for our
- // AF_CONN use of sctp.
- usrsctp_init(0, cricket::OnSctpOutboundPacket, debug_sctp_printf);
-
- // To turn on/off detailed SCTP debugging. You will also need to have the
- // SCTP_DEBUG cpp defines flag.
- // usrsctp_sysctl_set_sctp_debug_on(SCTP_DEBUG_ALL);
-
- // TODO(ldixon): Consider turning this on/off.
- usrsctp_sysctl_set_sctp_ecn_enable(0);
-
- // This is harmless, but we should find out when the library default
- // changes.
- int send_size = usrsctp_sysctl_get_sctp_sendspace();
- if (send_size != kSendBufferSize) {
- LOG(LS_ERROR) << "Got different send size than expected: " << send_size;
- }
+void InitializeUsrSctp() {
+ LOG(LS_INFO) << __FUNCTION__;
+ // First argument is udp_encapsulation_port, which is not releveant for our
+ // AF_CONN use of sctp.
+ usrsctp_init(0, &OnSctpOutboundPacket, &DebugSctpPrintf);
- // TODO(ldixon): Consider turning this on/off.
- // This is not needed right now (we don't do dynamic address changes):
- // If SCTP Auto-ASCONF is enabled, the peer is informed automatically
- // when a new address is added or removed. This feature is enabled by
- // default.
- // usrsctp_sysctl_set_sctp_auto_asconf(0);
-
- // TODO(ldixon): Consider turning this on/off.
- // Add a blackhole sysctl. Setting it to 1 results in no ABORTs
- // being sent in response to INITs, setting it to 2 results
- // in no ABORTs being sent for received OOTB packets.
- // This is similar to the TCP sysctl.
- //
- // See: http://lakerest.net/pipermail/sctp-coders/2012-January/009438.html
- // See: http://svnweb.freebsd.org/base?view=revision&revision=229805
- // usrsctp_sysctl_set_sctp_blackhole(2);
-
- // Set the number of default outgoing streams. This is the number we'll
- // send in the SCTP INIT message. The 'appropriate default' in the
- // second paragraph of
- // http://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-05#section-6.2
- // is cricket::kMaxSctpSid.
- usrsctp_sysctl_set_sctp_nr_outgoing_streams_default(
- cricket::kMaxSctpSid);
+ // To turn on/off detailed SCTP debugging. You will also need to have the
+ // SCTP_DEBUG cpp defines flag.
+ // usrsctp_sysctl_set_sctp_debug_on(SCTP_DEBUG_ALL);
+
+ // TODO(ldixon): Consider turning this on/off.
+ usrsctp_sysctl_set_sctp_ecn_enable(0);
+
+ // This is harmless, but we should find out when the library default
+ // changes.
+ int send_size = usrsctp_sysctl_get_sctp_sendspace();
+ if (send_size != kSendBufferSize) {
+ LOG(LS_ERROR) << "Got different send size than expected: " << send_size;
}
- usrsctp_engines_count++;
- cricket::DataCodec codec(kGoogleSctpDataCodecId, kGoogleSctpDataCodecName, 0);
- codec.SetParam(kCodecParamPort, kSctpDefaultPort);
- codecs_.push_back(codec);
+ // TODO(ldixon): Consider turning this on/off.
+ // This is not needed right now (we don't do dynamic address changes):
+ // If SCTP Auto-ASCONF is enabled, the peer is informed automatically
+ // when a new address is added or removed. This feature is enabled by
+ // default.
+ // usrsctp_sysctl_set_sctp_auto_asconf(0);
+
+ // TODO(ldixon): Consider turning this on/off.
+ // Add a blackhole sysctl. Setting it to 1 results in no ABORTs
+ // being sent in response to INITs, setting it to 2 results
+ // in no ABORTs being sent for received OOTB packets.
+ // This is similar to the TCP sysctl.
+ //
+ // See: http://lakerest.net/pipermail/sctp-coders/2012-January/009438.html
+ // See: http://svnweb.freebsd.org/base?view=revision&revision=229805
+ // usrsctp_sysctl_set_sctp_blackhole(2);
+
+ // Set the number of default outgoing streams. This is the number we'll
+ // send in the SCTP INIT message. The 'appropriate default' in the
+ // second paragraph of
+ // http://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-05#section-6.2
+ // is kMaxSctpSid.
+ usrsctp_sysctl_set_sctp_nr_outgoing_streams_default(kMaxSctpSid);
}
-SctpDataEngine::~SctpDataEngine() {
- usrsctp_engines_count--;
- LOG(LS_VERBOSE) << "usrsctp_engines_count:" << usrsctp_engines_count;
+void UninitializeUsrSctp() {
+ LOG(LS_INFO) << __FUNCTION__;
+ // usrsctp_finish() may fail if it's called too soon after the channels are
+ // closed. Wait and try again until it succeeds for up to 3 seconds.
+ for (size_t i = 0; i < 300; ++i) {
+ if (usrsctp_finish() == 0) {
+ return;
+ }
- if (usrsctp_engines_count == 0) {
- // usrsctp_finish() may fail if it's called too soon after the channels are
- // closed. Wait and try again until it succeeds for up to 3 seconds.
- for (size_t i = 0; i < 300; ++i) {
- if (usrsctp_finish() == 0)
- return;
+ rtc::Thread::SleepMs(10);
+ }
+ LOG(LS_ERROR) << "Failed to shutdown usrsctp.";
+}
- rtc::Thread::SleepMs(10);
- }
- LOG(LS_ERROR) << "Failed to shutdown usrsctp.";
+void IncrementUsrSctpUsageCount() {
+ rtc::GlobalLockScope lock(&g_usrsctp_lock_);
+ if (!g_usrsctp_usage_count) {
+ InitializeUsrSctp();
+ }
+ ++g_usrsctp_usage_count;
+}
+
+void DecrementUsrSctpUsageCount() {
+ rtc::GlobalLockScope lock(&g_usrsctp_lock_);
+ --g_usrsctp_usage_count;
+ if (!g_usrsctp_usage_count) {
+ UninitializeUsrSctp();
}
}
+DataCodec GetSctpDataCodec() {
+ DataCodec codec(kGoogleSctpDataCodecId, kGoogleSctpDataCodecName);
+ codec.SetParam(kCodecParamPort, kSctpDefaultPort);
+ return codec;
+}
+
+} // namespace
+
+SctpDataEngine::SctpDataEngine() : codecs_(1, GetSctpDataCodec()) {}
+
+SctpDataEngine::~SctpDataEngine() {}
+
+// Called on the worker thread.
DataMediaChannel* SctpDataEngine::CreateChannel(
DataChannelType data_channel_type) {
if (data_channel_type != DCT_SCTP) {
@@ -314,7 +342,7 @@ DataMediaChannel* SctpDataEngine::CreateChannel(
}
// static
-SctpDataMediaChannel* SctpDataEngine::GetChannelFromSocket(
+SctpDataMediaChannel* SctpDataMediaChannel::GetChannelFromSocket(
struct socket* sock) {
struct sockaddr* addrs = nullptr;
int naddrs = usrsctp_getladdrs(sock, 0, &addrs);
@@ -336,8 +364,8 @@ SctpDataMediaChannel* SctpDataEngine::GetChannelFromSocket(
}
// static
-int SctpDataEngine::SendThresholdCallback(struct socket* sock,
- uint32_t sb_free) {
+int SctpDataMediaChannel::SendThresholdCallback(struct socket* sock,
+ uint32_t sb_free) {
// Fired on our I/O thread. SctpDataMediaChannel::OnPacketReceived() gets
// a packet containing acknowledgments, which goes into usrsctp_conninput,
// and then back here.
@@ -389,17 +417,19 @@ bool SctpDataMediaChannel::OpenSctpSocket() {
return false;
}
+ IncrementUsrSctpUsageCount();
+
// If kSendBufferSize isn't reflective of reality, we log an error, but we
// still have to do something reasonable here. Look up what the buffer's
// real size is and set our threshold to something reasonable.
const static int kSendThreshold = usrsctp_sysctl_get_sctp_sendspace() / 2;
- sock_ = usrsctp_socket(AF_CONN, SOCK_STREAM, IPPROTO_SCTP,
- cricket::OnSctpInboundPacket,
- &SctpDataEngine::SendThresholdCallback,
- kSendThreshold, this);
+ sock_ = usrsctp_socket(
+ AF_CONN, SOCK_STREAM, IPPROTO_SCTP, OnSctpInboundPacket,
+ &SctpDataMediaChannel::SendThresholdCallback, kSendThreshold, this);
if (!sock_) {
LOG_ERRNO(LS_ERROR) << debug_name_ << "Failed to create SCTP socket.";
+ DecrementUsrSctpUsageCount();
return false;
}
@@ -488,6 +518,8 @@ void SctpDataMediaChannel::CloseSctpSocket() {
usrsctp_close(sock_);
sock_ = NULL;
usrsctp_deregister_address(this);
+
+ DecrementUsrSctpUsageCount();
}
}
@@ -599,7 +631,7 @@ bool SctpDataMediaChannel::SendData(
return false;
}
- if (params.type != cricket::DMT_CONTROL &&
+ if (params.type != DMT_CONTROL &&
open_streams_.find(params.ssrc) == open_streams_.end()) {
LOG(LS_WARNING) << debug_name_ << "->SendData(...): "
<< "Not sending data because ssrc is unknown: "
@@ -717,7 +749,7 @@ bool SctpDataMediaChannel::AddStream(const StreamParams& stream) {
}
const uint32_t ssrc = stream.first_ssrc();
- if (ssrc >= cricket::kMaxSctpSid) {
+ if (ssrc >= kMaxSctpSid) {
LOG(LS_WARNING) << debug_name_ << "->Add(Send|Recv)Stream(...): "
<< "Not adding data stream '" << stream.id
<< "' with ssrc=" << ssrc
@@ -984,8 +1016,9 @@ void SctpDataMediaChannel::OnPacketFromSctpToNetwork(
}
bool SctpDataMediaChannel::SendQueuedStreamResets() {
- if (!sent_reset_streams_.empty() || queued_reset_streams_.empty())
+ if (!sent_reset_streams_.empty() || queued_reset_streams_.empty()) {
return true;
+ }
LOG(LS_VERBOSE) << "SendQueuedStreamResets[" << debug_name_ << "]: Sending ["
<< ListStreams(queued_reset_streams_) << "], Open: ["
diff --git a/chromium/third_party/webrtc/media/sctp/sctpdataengine.h b/chromium/third_party/webrtc/media/sctp/sctpdataengine.h
index 8eb99564ca2..f9a3b5a6547 100644
--- a/chromium/third_party/webrtc/media/sctp/sctpdataengine.h
+++ b/chromium/third_party/webrtc/media/sctp/sctpdataengine.h
@@ -25,6 +25,7 @@ enum PreservedErrno {
} // namespace cricket
#include "webrtc/base/copyonwritebuffer.h"
+#include "webrtc/base/gtest_prod_util.h"
#include "webrtc/media/base/codec.h"
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/media/base/mediaengine.h"
@@ -75,19 +76,13 @@ class SctpDataMediaChannel;
class SctpDataEngine : public DataEngineInterface, public sigslot::has_slots<> {
public:
SctpDataEngine();
- virtual ~SctpDataEngine();
+ ~SctpDataEngine() override;
- virtual DataMediaChannel* CreateChannel(DataChannelType data_channel_type);
-
- virtual const std::vector<DataCodec>& data_codecs() { return codecs_; }
-
- static int SendThresholdCallback(struct socket* sock, uint32_t sb_free);
+ DataMediaChannel* CreateChannel(DataChannelType data_channel_type) override;
+ const std::vector<DataCodec>& data_codecs() override { return codecs_; }
private:
- static int usrsctp_engines_count;
- std::vector<DataCodec> codecs_;
-
- static SctpDataMediaChannel* GetChannelFromSocket(struct socket* sock);
+ const std::vector<DataCodec> codecs_;
};
// TODO(ldixon): Make into a special type of TypedMessageData.
@@ -161,11 +156,16 @@ class SctpDataMediaChannel : public DataMediaChannel,
void OnSendThresholdCallback();
// Helper for debugging.
- void set_debug_name(const std::string& debug_name) {
+ void set_debug_name_for_testing(const char* debug_name) {
debug_name_ = debug_name;
}
- const std::string& debug_name() const { return debug_name_; }
const struct socket* socket() const { return sock_; }
+
+ private:
+ FRIEND_TEST_ALL_PREFIXES(SctpDataMediaChannelTest, EngineSignalsRightChannel);
+ static int SendThresholdCallback(struct socket* sock, uint32_t sb_free);
+ static SctpDataMediaChannel* GetChannelFromSocket(struct socket* sock);
+
private:
sockaddr_conn GetSctpSockAddr(int port);
@@ -229,8 +229,8 @@ class SctpDataMediaChannel : public DataMediaChannel,
StreamSet queued_reset_streams_;
StreamSet sent_reset_streams_;
- // A human-readable name for debugging messages.
- std::string debug_name_;
+ // A static human-readable name for debugging messages.
+ const char* debug_name_;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/media/sctp/sctpdataengine_unittest.cc b/chromium/third_party/webrtc/media/sctp/sctpdataengine_unittest.cc
index f18437dc8ed..f41691b6016 100644
--- a/chromium/third_party/webrtc/media/sctp/sctpdataengine_unittest.cc
+++ b/chromium/third_party/webrtc/media/sctp/sctpdataengine_unittest.cc
@@ -29,13 +29,14 @@
#include "webrtc/media/base/mediaconstants.h"
#include "webrtc/media/sctp/sctpdataengine.h"
+namespace cricket {
enum {
MSG_PACKET = 1,
};
// Fake NetworkInterface that sends/receives sctp packets. The one in
// webrtc/media/base/fakenetworkinterface.h only works with rtp/rtcp.
-class SctpFakeNetworkInterface : public cricket::MediaChannel::NetworkInterface,
+class SctpFakeNetworkInterface : public MediaChannel::NetworkInterface,
public rtc::MessageHandler {
public:
explicit SctpFakeNetworkInterface(rtc::Thread* thread)
@@ -43,7 +44,7 @@ class SctpFakeNetworkInterface : public cricket::MediaChannel::NetworkInterface,
dest_(NULL) {
}
- void SetDestination(cricket::DataMediaChannel* dest) { dest_ = dest; }
+ void SetDestination(DataMediaChannel* dest) { dest_ = dest; }
protected:
// Called to send raw packet down the wire (e.g. SCTP an packet).
@@ -91,7 +92,7 @@ class SctpFakeNetworkInterface : public cricket::MediaChannel::NetworkInterface,
private:
// Not owned by this class.
rtc::Thread* thread_;
- cricket::DataMediaChannel* dest_;
+ DataMediaChannel* dest_;
};
// This is essentially a buffer to hold recieved data. It stores only the last
@@ -106,11 +107,12 @@ class SctpFakeDataReceiver : public sigslot::has_slots<> {
void Clear() {
received_ = false;
last_data_ = "";
- last_params_ = cricket::ReceiveDataParams();
+ last_params_ = ReceiveDataParams();
}
- virtual void OnDataReceived(const cricket::ReceiveDataParams& params,
- const char* data, size_t length) {
+ virtual void OnDataReceived(const ReceiveDataParams& params,
+ const char* data,
+ size_t length) {
received_ = true;
last_data_ = std::string(data, length);
last_params_ = params;
@@ -118,12 +120,12 @@ class SctpFakeDataReceiver : public sigslot::has_slots<> {
bool received() const { return received_; }
std::string last_data() const { return last_data_; }
- cricket::ReceiveDataParams last_params() const { return last_params_; }
+ ReceiveDataParams last_params() const { return last_params_; }
private:
bool received_;
std::string last_data_;
- cricket::ReceiveDataParams last_params_;
+ ReceiveDataParams last_params_;
};
class SignalReadyToSendObserver : public sigslot::has_slots<> {
@@ -147,7 +149,7 @@ class SignalReadyToSendObserver : public sigslot::has_slots<> {
class SignalChannelClosedObserver : public sigslot::has_slots<> {
public:
SignalChannelClosedObserver() {}
- void BindSelf(cricket::SctpDataMediaChannel* channel) {
+ void BindSelf(SctpDataMediaChannel* channel) {
channel->SignalStreamClosedRemotely.connect(
this, &SignalChannelClosedObserver::OnStreamClosed);
}
@@ -168,12 +170,12 @@ class SignalChannelClosedObserver : public sigslot::has_slots<> {
class SignalChannelClosedReopener : public sigslot::has_slots<> {
public:
- SignalChannelClosedReopener(cricket::SctpDataMediaChannel* channel,
- cricket::SctpDataMediaChannel* peer)
+ SignalChannelClosedReopener(SctpDataMediaChannel* channel,
+ SctpDataMediaChannel* peer)
: channel_(channel), peer_(peer) {}
void OnStreamClosed(int stream) {
- cricket::StreamParams p(cricket::StreamParams::CreateLegacy(stream));
+ StreamParams p(StreamParams::CreateLegacy(stream));
channel_->AddSendStream(p);
channel_->AddRecvStream(p);
peer_->AddSendStream(p);
@@ -186,8 +188,8 @@ class SignalChannelClosedReopener : public sigslot::has_slots<> {
}
private:
- cricket::SctpDataMediaChannel* channel_;
- cricket::SctpDataMediaChannel* peer_;
+ SctpDataMediaChannel* channel_;
+ SctpDataMediaChannel* peer_;
std::vector<int> streams_;
};
@@ -200,9 +202,7 @@ class SctpDataMediaChannelTest : public testing::Test,
static void SetUpTestCase() {
}
- virtual void SetUp() {
- engine_.reset(new cricket::SctpDataEngine());
- }
+ virtual void SetUp() { engine_.reset(new SctpDataEngine()); }
void SetupConnectedChannels() {
net1_.reset(new SctpFakeNetworkInterface(rtc::Thread::Current()));
@@ -212,11 +212,11 @@ class SctpDataMediaChannelTest : public testing::Test,
chan1_ready_to_send_count_ = 0;
chan2_ready_to_send_count_ = 0;
chan1_.reset(CreateChannel(net1_.get(), recv1_.get()));
- chan1_->set_debug_name("chan1/connector");
+ chan1_->set_debug_name_for_testing("chan1/connector");
chan1_->SignalReadyToSend.connect(
this, &SctpDataMediaChannelTest::OnChan1ReadyToSend);
chan2_.reset(CreateChannel(net2_.get(), recv2_.get()));
- chan2_->set_debug_name("chan2/listener");
+ chan2_->set_debug_name_for_testing("chan2/listener");
chan2_->SignalReadyToSend.connect(
this, &SctpDataMediaChannelTest::OnChan2ReadyToSend);
// Setup two connected channels ready to send and receive.
@@ -254,7 +254,7 @@ class SctpDataMediaChannelTest : public testing::Test,
bool AddStream(int ssrc) {
bool ret = true;
- cricket::StreamParams p(cricket::StreamParams::CreateLegacy(ssrc));
+ StreamParams p(StreamParams::CreateLegacy(ssrc));
ret = ret && chan1_->AddSendStream(p);
ret = ret && chan1_->AddRecvStream(p);
ret = ret && chan2_->AddSendStream(p);
@@ -262,11 +262,10 @@ class SctpDataMediaChannelTest : public testing::Test,
return ret;
}
- cricket::SctpDataMediaChannel* CreateChannel(
- SctpFakeNetworkInterface* net, SctpFakeDataReceiver* recv) {
- cricket::SctpDataMediaChannel* channel =
- static_cast<cricket::SctpDataMediaChannel*>(engine_->CreateChannel(
- cricket::DCT_SCTP));
+ SctpDataMediaChannel* CreateChannel(SctpFakeNetworkInterface* net,
+ SctpFakeDataReceiver* recv) {
+ SctpDataMediaChannel* channel =
+ static_cast<SctpDataMediaChannel*>(engine_->CreateChannel(DCT_SCTP));
channel->SetInterface(net);
// When data is received, pass it to the SctpFakeDataReceiver.
channel->SignalDataReceived.connect(
@@ -274,11 +273,11 @@ class SctpDataMediaChannelTest : public testing::Test,
return channel;
}
- bool SendData(cricket::SctpDataMediaChannel* chan,
+ bool SendData(SctpDataMediaChannel* chan,
uint32_t ssrc,
const std::string& msg,
- cricket::SendDataResult* result) {
- cricket::SendDataParams params;
+ SendDataResult* result) {
+ SendDataParams params;
params.ssrc = ssrc;
return chan->SendData(params, rtc::CopyOnWriteBuffer(
@@ -304,21 +303,21 @@ class SctpDataMediaChannelTest : public testing::Test,
return !thread->IsQuitting();
}
- cricket::SctpDataMediaChannel* channel1() { return chan1_.get(); }
- cricket::SctpDataMediaChannel* channel2() { return chan2_.get(); }
+ SctpDataMediaChannel* channel1() { return chan1_.get(); }
+ SctpDataMediaChannel* channel2() { return chan2_.get(); }
SctpFakeDataReceiver* receiver1() { return recv1_.get(); }
SctpFakeDataReceiver* receiver2() { return recv2_.get(); }
int channel1_ready_to_send_count() { return chan1_ready_to_send_count_; }
int channel2_ready_to_send_count() { return chan2_ready_to_send_count_; }
private:
- std::unique_ptr<cricket::SctpDataEngine> engine_;
+ std::unique_ptr<SctpDataEngine> engine_;
std::unique_ptr<SctpFakeNetworkInterface> net1_;
std::unique_ptr<SctpFakeNetworkInterface> net2_;
std::unique_ptr<SctpFakeDataReceiver> recv1_;
std::unique_ptr<SctpFakeDataReceiver> recv2_;
- std::unique_ptr<cricket::SctpDataMediaChannel> chan1_;
- std::unique_ptr<cricket::SctpDataMediaChannel> chan2_;
+ std::unique_ptr<SctpDataMediaChannel> chan1_;
+ std::unique_ptr<SctpDataMediaChannel> chan2_;
int chan1_ready_to_send_count_;
int chan2_ready_to_send_count_;
@@ -345,12 +344,12 @@ TEST_F(SctpDataMediaChannelTest, SignalReadyToSend) {
channel2()->SignalReadyToSend.connect(&signal_observer_2,
&SignalReadyToSendObserver::OnSignaled);
- cricket::SendDataResult result;
+ SendDataResult result;
ASSERT_TRUE(SendData(channel1(), 1, "hello?", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver2(), 1, "hello?"), 1000);
ASSERT_TRUE(SendData(channel2(), 2, "hi chan1", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver1(), 2, "hi chan1"), 1000);
EXPECT_TRUE_WAIT(signal_observer_1.IsSignaled(true), 1000);
@@ -360,10 +359,10 @@ TEST_F(SctpDataMediaChannelTest, SignalReadyToSend) {
TEST_F(SctpDataMediaChannelTest, SendData) {
SetupConnectedChannels();
- cricket::SendDataResult result;
+ SendDataResult result;
LOG(LS_VERBOSE) << "chan1 sending: 'hello?' -----------------------------";
ASSERT_TRUE(SendData(channel1(), 1, "hello?", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver2(), 1, "hello?"), 1000);
LOG(LS_VERBOSE) << "recv2.received=" << receiver2()->received()
<< ", recv2.last_params.ssrc="
@@ -376,7 +375,7 @@ TEST_F(SctpDataMediaChannelTest, SendData) {
LOG(LS_VERBOSE) << "chan2 sending: 'hi chan1' -----------------------------";
ASSERT_TRUE(SendData(channel2(), 2, "hi chan1", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver1(), 2, "hi chan1"), 1000);
LOG(LS_VERBOSE) << "recv1.received=" << receiver1()->received()
<< ", recv1.last_params.ssrc="
@@ -392,8 +391,8 @@ TEST_F(SctpDataMediaChannelTest, SendData) {
TEST_F(SctpDataMediaChannelTest, SendDataBlocked) {
SetupConnectedChannels();
- cricket::SendDataResult result;
- cricket::SendDataParams params;
+ SendDataResult result;
+ SendDataParams params;
params.ssrc = 1;
std::vector<char> buffer(1024 * 64, 0);
@@ -401,11 +400,11 @@ TEST_F(SctpDataMediaChannelTest, SendDataBlocked) {
for (size_t i = 0; i < 100; ++i) {
channel1()->SendData(
params, rtc::CopyOnWriteBuffer(&buffer[0], buffer.size()), &result);
- if (result == cricket::SDR_BLOCK)
+ if (result == SDR_BLOCK)
break;
}
- EXPECT_EQ(cricket::SDR_BLOCK, result);
+ EXPECT_EQ(SDR_BLOCK, result);
}
TEST_F(SctpDataMediaChannelTest, ClosesRemoteStream) {
@@ -414,12 +413,12 @@ TEST_F(SctpDataMediaChannelTest, ClosesRemoteStream) {
chan_1_sig_receiver.BindSelf(channel1());
chan_2_sig_receiver.BindSelf(channel2());
- cricket::SendDataResult result;
+ SendDataResult result;
ASSERT_TRUE(SendData(channel1(), 1, "hello?", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver2(), 1, "hello?"), 1000);
ASSERT_TRUE(SendData(channel2(), 2, "hi chan1", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver1(), 2, "hi chan1"), 1000);
// Close channel 1. Channel 2 should notify us.
@@ -434,12 +433,12 @@ TEST_F(SctpDataMediaChannelTest, ClosesTwoRemoteStreams) {
chan_1_sig_receiver.BindSelf(channel1());
chan_2_sig_receiver.BindSelf(channel2());
- cricket::SendDataResult result;
+ SendDataResult result;
ASSERT_TRUE(SendData(channel1(), 1, "hello?", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver2(), 1, "hello?"), 1000);
ASSERT_TRUE(SendData(channel2(), 2, "hi chan1", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver1(), 2, "hi chan1"), 1000);
// Close two streams on one side.
@@ -457,12 +456,12 @@ TEST_F(SctpDataMediaChannelTest, ClosesStreamsOnBothSides) {
chan_1_sig_receiver.BindSelf(channel1());
chan_2_sig_receiver.BindSelf(channel2());
- cricket::SendDataResult result;
+ SendDataResult result;
ASSERT_TRUE(SendData(channel1(), 1, "hello?", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver2(), 1, "hello?"), 1000);
ASSERT_TRUE(SendData(channel2(), 2, "hi chan1", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver1(), 2, "hi chan1"), 1000);
// Close one stream on channel1(), while closing three streams on
@@ -484,7 +483,7 @@ TEST_F(SctpDataMediaChannelTest, EngineSignalsRightChannel) {
EXPECT_TRUE_WAIT(channel1()->socket() != NULL, 1000);
struct socket *sock = const_cast<struct socket*>(channel1()->socket());
int prior_count = channel1_ready_to_send_count();
- cricket::SctpDataEngine::SendThresholdCallback(sock, 0);
+ SctpDataMediaChannel::SendThresholdCallback(sock, 0);
EXPECT_GT(channel1_ready_to_send_count(), prior_count);
}
@@ -503,12 +502,12 @@ TEST_F(SctpDataMediaChannelTest, RefusesHighNumberedChannels) {
TEST_F(SctpDataMediaChannelTest, MAYBE_ReusesAStream) {
// Shut down channel 1, then open it up again for reuse.
SetupConnectedChannels();
- cricket::SendDataResult result;
+ SendDataResult result;
SignalChannelClosedObserver chan_2_sig_receiver;
chan_2_sig_receiver.BindSelf(channel2());
ASSERT_TRUE(SendData(channel1(), 1, "hello?", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver2(), 1, "hello?"), 1000);
channel1()->RemoveSendStream(1);
@@ -518,8 +517,10 @@ TEST_F(SctpDataMediaChannelTest, MAYBE_ReusesAStream) {
// Create a new channel 1.
AddStream(1);
ASSERT_TRUE(SendData(channel1(), 1, "hi?", &result));
- EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ EXPECT_EQ(SDR_SUCCESS, result);
EXPECT_TRUE_WAIT(ReceivedData(receiver2(), 1, "hi?"), 1000);
channel1()->RemoveSendStream(1);
EXPECT_TRUE_WAIT(chan_2_sig_receiver.StreamCloseCount(1) == 2, 1000);
}
+
+} // namespace cricket
diff --git a/chromium/third_party/webrtc/modules/audio_codec_speed_tests_apk.isolate b/chromium/third_party/webrtc/modules/audio_codec_speed_tests_apk.isolate
new file mode 100644
index 00000000000..2122df84730
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_codec_speed_tests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'audio_codec_speed_tests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_audio_codec_speed_tests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/audio_codec_speed_tests_apk/',
+ '<(PRODUCT_DIR)/bin/run_audio_codec_speed_tests',
+ 'audio_codec_speed_tests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn b/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
index 23b5fa46da5..d1f70cf105b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_coding/BUILD.gn
@@ -39,6 +39,33 @@ if (!build_with_mozilla && !build_with_chromium) {
audio_codec_deps += [ ":red" ]
}
+source_set("audio_decoder_factory_interface") {
+ sources = [
+ "codecs/audio_decoder_factory.h",
+ "codecs/audio_format.cc",
+ "codecs/audio_format.h",
+ ]
+ configs += [ "../..:common_config" ]
+ public_configs = [ "../..:common_inherited_config" ]
+ deps = [
+ "../..:webrtc_common",
+ ]
+}
+
+source_set("builtin_audio_decoder_factory") {
+ sources = [
+ "codecs/builtin_audio_decoder_factory.cc",
+ "codecs/builtin_audio_decoder_factory.h",
+ ]
+ configs += [ "../..:common_config" ]
+ public_configs = [ "../..:common_inherited_config" ]
+ deps = [
+ "../..:webrtc_common",
+ ":audio_decoder_factory_interface",
+ ] + audio_codec_deps
+ defines = audio_codec_defines
+}
+
source_set("rent_a_codec") {
sources = [
"acm2/acm_codec_database.cc",
@@ -121,6 +148,7 @@ source_set("audio_decoder_interface") {
public_configs = [ "../..:common_inherited_config" ]
deps = [
"../..:webrtc_common",
+ "../../base:rtc_base_approved",
]
}
@@ -133,6 +161,7 @@ source_set("audio_encoder_interface") {
public_configs = [ "../..:common_inherited_config" ]
deps = [
"../..:webrtc_common",
+ "../../base:rtc_base_approved",
]
}
@@ -147,9 +176,7 @@ source_set("cng") {
sources = [
"codecs/cng/audio_encoder_cng.cc",
"codecs/cng/audio_encoder_cng.h",
- "codecs/cng/cng_helpfuns.c",
- "codecs/cng/cng_helpfuns.h",
- "codecs/cng/webrtc_cng.c",
+ "codecs/cng/webrtc_cng.cc",
"codecs/cng/webrtc_cng.h",
]
@@ -760,6 +787,8 @@ source_set("neteq") {
"neteq/buffer_level_filter.h",
"neteq/comfort_noise.cc",
"neteq/comfort_noise.h",
+ "neteq/cross_correlation.cc",
+ "neteq/cross_correlation.h",
"neteq/decision_logic.cc",
"neteq/decision_logic.h",
"neteq/decision_logic_fax.cc",
@@ -791,6 +820,8 @@ source_set("neteq") {
"neteq/neteq_impl.h",
"neteq/normal.cc",
"neteq/normal.h",
+ "neteq/packet.cc",
+ "neteq/packet.h",
"neteq/packet_buffer.cc",
"neteq/packet_buffer.h",
"neteq/payload_splitter.cc",
@@ -807,6 +838,8 @@ source_set("neteq") {
"neteq/statistics_calculator.h",
"neteq/sync_buffer.cc",
"neteq/sync_buffer.h",
+ "neteq/tick_timer.cc",
+ "neteq/tick_timer.h",
"neteq/time_stretch.cc",
"neteq/time_stretch.h",
"neteq/timestamp_scaler.cc",
@@ -822,9 +855,11 @@ source_set("neteq") {
deps = [
":audio_decoder_interface",
+ ":builtin_audio_decoder_factory",
":cng",
":g711",
":pcm16b",
+ ":rent_a_codec",
"../..:webrtc_common",
"../../common_audio",
"../../system_wrappers",
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc
index e4a34a72946..dc8111daf3b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receive_test_oldapi.cc
@@ -158,8 +158,11 @@ void AcmReceiveTestOldApi::Run() {
// Pull audio until time to insert packet.
while (clock_.TimeInMilliseconds() < packet->time_ms()) {
AudioFrame output_frame;
- EXPECT_EQ(0, acm_->PlayoutData10Ms(output_freq_hz_, &output_frame));
+ bool muted;
+ EXPECT_EQ(0,
+ acm_->PlayoutData10Ms(output_freq_hz_, &output_frame, &muted));
ASSERT_EQ(output_freq_hz_, output_frame.sample_rate_hz_);
+ ASSERT_FALSE(muted);
const size_t samples_per_block =
static_cast<size_t>(output_freq_hz_ * 10 / 1000);
EXPECT_EQ(samples_per_block, output_frame.samples_per_channel_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc
index f8bacf8dc7d..8c07631e3a6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.cc
@@ -26,7 +26,6 @@
#include "webrtc/modules/audio_coding/acm2/call_statistics.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
@@ -133,11 +132,13 @@ int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
return 0;
}
-int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
+int AcmReceiver::GetAudio(int desired_freq_hz,
+ AudioFrame* audio_frame,
+ bool* muted) {
// Accessing members, take the lock.
rtc::CritScope lock(&crit_sect_);
- if (neteq_->GetAudio(audio_frame) != NetEq::kOK) {
+ if (neteq_->GetAudio(audio_frame, muted) != NetEq::kOK) {
LOG(LERROR) << "AcmReceiver::GetAudio - NetEq Failed.";
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h
index 6fec1ffdda1..f37212c067b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver.h
@@ -82,11 +82,13 @@ class AcmReceiver {
// Output:
// -audio_frame : an audio frame were output data and
// associated parameters are written to.
+ // -muted : if true, the sample data in audio_frame is not
+ // populated, and must be interpreted as all zero.
//
// Return value : 0 if OK.
// -1 if NetEq returned an error.
//
- int GetAudio(int desired_freq_hz, AudioFrame* audio_frame);
+ int GetAudio(int desired_freq_hz, AudioFrame* audio_frame, bool* muted);
//
// Adds a new codec to the NetEq codec database.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc
index c39a7cc797f..b57b7ef446e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_receiver_unittest_oldapi.cc
@@ -285,7 +285,8 @@ TEST_F(AcmReceiverTestOldApi, MAYBE_SampleRate) {
const int num_10ms_frames = codec.inst.pacsize / (codec.inst.plfreq / 100);
InsertOnePacketOfSilence(codec.id);
for (int k = 0; k < num_10ms_frames; ++k) {
- EXPECT_EQ(0, receiver_->GetAudio(kOutSampleRateHz, &frame));
+ bool muted;
+ EXPECT_EQ(0, receiver_->GetAudio(kOutSampleRateHz, &frame, &muted));
}
EXPECT_EQ(codec.inst.plfreq, receiver_->last_output_sample_rate_hz());
}
@@ -326,13 +327,15 @@ class AcmReceiverTestFaxModeOldApi : public AcmReceiverTestOldApi {
rtc::CheckedDivExact(5 * output_sample_rate_hz, 8000);
AudioFrame frame;
- EXPECT_EQ(0, receiver_->GetAudio(output_sample_rate_hz, &frame));
+ bool muted;
+ EXPECT_EQ(0, receiver_->GetAudio(output_sample_rate_hz, &frame, &muted));
// Expect timestamp = 0 before first packet is inserted.
EXPECT_EQ(0u, frame.timestamp_);
for (int i = 0; i < 5; ++i) {
InsertOnePacketOfSilence(codec.id);
for (int k = 0; k < num_10ms_frames; ++k) {
- EXPECT_EQ(0, receiver_->GetAudio(output_sample_rate_hz, &frame));
+ EXPECT_EQ(0,
+ receiver_->GetAudio(output_sample_rate_hz, &frame, &muted));
EXPECT_EQ(expected_output_ts, frame.timestamp_);
expected_output_ts += 10 * samples_per_ms;
EXPECT_EQ(10 * samples_per_ms, frame.samples_per_channel_);
@@ -340,6 +343,7 @@ class AcmReceiverTestFaxModeOldApi : public AcmReceiverTestOldApi {
EXPECT_EQ(output_channels, frame.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, frame.speech_type_);
EXPECT_EQ(expected_vad_activity, frame.vad_activity_);
+ EXPECT_FALSE(muted);
}
}
}
@@ -388,8 +392,10 @@ TEST_F(AcmReceiverTestOldApi, MAYBE_PostdecodingVad) {
AudioFrame frame;
for (int n = 0; n < kNumPackets; ++n) {
InsertOnePacketOfSilence(codec.id);
- for (int k = 0; k < num_10ms_frames; ++k)
- ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame));
+ for (int k = 0; k < num_10ms_frames; ++k) {
+ bool muted;
+ ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame, &muted));
+ }
}
EXPECT_EQ(AudioFrame::kVadPassive, frame.vad_activity_);
}
@@ -417,8 +423,10 @@ TEST_F(AcmReceiverTestPostDecodeVadPassiveOldApi, MAYBE_PostdecodingVad) {
AudioFrame frame;
for (int n = 0; n < kNumPackets; ++n) {
InsertOnePacketOfSilence(codec.id);
- for (int k = 0; k < num_10ms_frames; ++k)
- ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame));
+ for (int k = 0; k < num_10ms_frames; ++k) {
+ bool muted;
+ ASSERT_EQ(0, receiver_->GetAudio(codec.inst.plfreq, &frame, &muted));
+ }
}
EXPECT_EQ(AudioFrame::kVadUnknown, frame.vad_activity_);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h
index cfee3530206..938e39e2e69 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/acm_send_test_oldapi.h
@@ -47,7 +47,7 @@ class AcmSendTestOldApi : public AudioPacketizationCallback,
// Returns the next encoded packet. Returns NULL if the test duration was
// exceeded. Ownership of the packet is handed over to the caller.
// Inherited from PacketSource.
- Packet* NextPacket();
+ Packet* NextPacket() override;
// Inherited from AudioPacketizationCallback.
int32_t SendData(FrameType frame_type,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc
index 254c2f420bf..bc7197d8e79 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.cc
@@ -29,6 +29,18 @@
namespace webrtc {
+namespace {
+
+// Adds a codec usage sample to the histogram.
+void UpdateCodecTypeHistogram(size_t codec_type) {
+ RTC_HISTOGRAM_ENUMERATION(
+ "WebRTC.Audio.Encoder.CodecType", static_cast<int>(codec_type),
+ static_cast<int>(
+ webrtc::AudioEncoder::CodecType::kMaxLoggedAudioCodecTypes));
+}
+
+} // namespace
+
namespace acm2 {
struct EncoderFactory {
@@ -104,7 +116,6 @@ void ConvertEncodedInfoToFragmentationHeader(
class RawAudioEncoderWrapper final : public AudioEncoder {
public:
RawAudioEncoderWrapper(AudioEncoder* enc) : enc_(enc) {}
- size_t MaxEncodedBytes() const override { return enc_->MaxEncodedBytes(); }
int SampleRateHz() const override { return enc_->SampleRateHz(); }
size_t NumChannels() const override { return enc_->NumChannels(); }
int RtpTimestampRateHz() const override { return enc_->RtpTimestampRateHz(); }
@@ -120,13 +131,6 @@ class RawAudioEncoderWrapper final : public AudioEncoder {
rtc::Buffer* encoded) override {
return enc_->Encode(rtp_timestamp, audio, encoded);
}
- EncodedInfo EncodeInternal(uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) override {
- return enc_->EncodeInternal(rtp_timestamp, audio, max_encoded_bytes,
- encoded);
- }
void Reset() override { return enc_->Reset(); }
bool SetFec(bool enable) override { return enc_->SetFec(enable); }
bool SetDtx(bool enable) override { return enc_->SetDtx(enable); }
@@ -193,7 +197,9 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(
first_10ms_data_(false),
first_frame_(true),
packetization_callback_(NULL),
- vad_callback_(NULL) {
+ vad_callback_(NULL),
+ codec_histogram_bins_log_(),
+ number_of_consecutive_empty_packets_(0) {
if (InitializeReceiverSafe() < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Cannot initialize receiver");
@@ -239,6 +245,20 @@ int32_t AudioCodingModuleImpl::Encode(const InputData& input_data) {
}
previous_pltype = previous_pltype_; // Read it while we have the critsect.
+ // Log codec type to histogram once every 500 packets.
+ if (encoded_info.encoded_bytes == 0) {
+ ++number_of_consecutive_empty_packets_;
+ } else {
+ size_t codec_type = static_cast<size_t>(encoded_info.encoder_type);
+ codec_histogram_bins_log_[codec_type] +=
+ number_of_consecutive_empty_packets_ + 1;
+ number_of_consecutive_empty_packets_ = 0;
+ if (codec_histogram_bins_log_[codec_type] >= 500) {
+ codec_histogram_bins_log_[codec_type] -= 500;
+ UpdateCodecTypeHistogram(codec_type);
+ }
+ }
+
RTPFragmentationHeader my_fragmentation;
ConvertEncodedInfoToFragmentationHeader(encoded_info, &my_fragmentation);
FrameType frame_type;
@@ -727,10 +747,12 @@ int AudioCodingModuleImpl::RegisterReceiveCodecUnlocked(
AudioDecoder* isac_decoder = nullptr;
if (STR_CASE_CMP(codec.plname, "isac") == 0) {
- if (!isac_decoder_) {
- isac_decoder_ = isac_factory();
+ std::unique_ptr<AudioDecoder>& saved_isac_decoder =
+ codec.plfreq == 16000 ? isac_decoder_16k_ : isac_decoder_32k_;
+ if (!saved_isac_decoder) {
+ saved_isac_decoder = isac_factory();
}
- isac_decoder = isac_decoder_.get();
+ isac_decoder = saved_isac_decoder.get();
}
return receiver_.AddCodec(*codec_index, codec.pltype, codec.channels,
codec.plfreq, isac_decoder, codec.plname);
@@ -797,9 +819,10 @@ int AudioCodingModuleImpl::SetMaximumPlayoutDelay(int time_ms) {
// Get 10 milliseconds of raw audio data to play out.
// Automatic resample to the requested frequency.
int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
- AudioFrame* audio_frame) {
+ AudioFrame* audio_frame,
+ bool* muted) {
// GetAudio always returns 10 ms, at the requested sample rate.
- if (receiver_.GetAudio(desired_freq_hz, audio_frame) != 0) {
+ if (receiver_.GetAudio(desired_freq_hz, audio_frame, muted) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"PlayoutData failed, RecOut Failed");
return -1;
@@ -808,6 +831,14 @@ int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
return 0;
}
+int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
+ AudioFrame* audio_frame) {
+ bool muted;
+ int ret = PlayoutData10Ms(desired_freq_hz, audio_frame, &muted);
+ RTC_DCHECK(!muted);
+ return ret;
+}
+
/////////////////////////////////////////
// Statistics
//
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h
index 63dfb810567..c098e62b991 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_impl.h
@@ -23,6 +23,7 @@
#include "webrtc/modules/audio_coding/acm2/acm_receiver.h"
#include "webrtc/modules/audio_coding/acm2/acm_resampler.h"
#include "webrtc/modules/audio_coding/acm2/codec_manager.h"
+#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
namespace webrtc {
@@ -163,6 +164,9 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
// Get 10 milliseconds of raw audio data to play out, and
// automatic resample to the requested frequency if > 0.
+ int PlayoutData10Ms(int desired_freq_hz,
+ AudioFrame* audio_frame,
+ bool* muted) override;
int PlayoutData10Ms(int desired_freq_hz, AudioFrame* audio_frame) override;
/////////////////////////////////////////
@@ -268,7 +272,8 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
// RegisterEncoder.
std::unique_ptr<AudioEncoder> encoder_stack_ GUARDED_BY(acm_crit_sect_);
- std::unique_ptr<AudioDecoder> isac_decoder_ GUARDED_BY(acm_crit_sect_);
+ std::unique_ptr<AudioDecoder> isac_decoder_16k_ GUARDED_BY(acm_crit_sect_);
+ std::unique_ptr<AudioDecoder> isac_decoder_32k_ GUARDED_BY(acm_crit_sect_);
// This is to keep track of CN instances where we can send DTMFs.
uint8_t previous_pltype_ GUARDED_BY(acm_crit_sect_);
@@ -294,6 +299,10 @@ class AudioCodingModuleImpl final : public AudioCodingModule {
AudioPacketizationCallback* packetization_callback_
GUARDED_BY(callback_crit_sect_);
ACMVADCallback* vad_callback_ GUARDED_BY(callback_crit_sect_);
+
+ int codec_histogram_bins_log_[static_cast<size_t>(
+ AudioEncoder::CodecType::kMaxLoggedAudioCodecTypes)];
+ int number_of_consecutive_empty_packets_;
};
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc
index 6e004f9e28d..470f690ed9c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/audio_coding_module_unittest_oldapi.cc
@@ -205,7 +205,9 @@ class AudioCodingModuleTestOldApi : public ::testing::Test {
virtual void PullAudio() {
AudioFrame audio_frame;
- ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &audio_frame));
+ bool muted;
+ ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
}
virtual void InsertAudio() {
@@ -296,7 +298,9 @@ TEST_F(AudioCodingModuleTestOldApi, MAYBE_NetEqCalls) {
TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
AudioFrame audio_frame;
const int kSampleRateHz = 32000;
- EXPECT_EQ(0, acm_->PlayoutData10Ms(kSampleRateHz, &audio_frame));
+ bool muted;
+ EXPECT_EQ(0, acm_->PlayoutData10Ms(kSampleRateHz, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(id_, audio_frame.id_);
EXPECT_EQ(0u, audio_frame.timestamp_);
EXPECT_GT(audio_frame.num_channels_, 0u);
@@ -307,7 +311,8 @@ TEST_F(AudioCodingModuleTestOldApi, VerifyOutputFrame) {
TEST_F(AudioCodingModuleTestOldApi, FailOnZeroDesiredFrequency) {
AudioFrame audio_frame;
- EXPECT_EQ(-1, acm_->PlayoutData10Ms(0, &audio_frame));
+ bool muted;
+ EXPECT_EQ(-1, acm_->PlayoutData10Ms(0, &audio_frame, &muted));
}
// Checks that the transport callback is invoked once for each speech packet.
@@ -608,7 +613,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
~AcmIsacMtTestOldApi() {}
- void SetUp() {
+ void SetUp() override {
AudioCodingModuleTestOldApi::SetUp();
RegisterCodec(); // Must be called before the threads start below.
@@ -642,7 +647,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
}
- void InsertPacket() {
+ void InsertPacket() override {
int num_calls = packet_cb_.num_calls(); // Store locally for thread safety.
if (num_calls > last_packet_number_) {
// Get the new payload out from the callback handler.
@@ -661,7 +666,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
&last_payload_vec_[0], last_payload_vec_.size(), rtp_header_));
}
- void InsertAudio() {
+ void InsertAudio() override {
// TODO(kwiberg): Use std::copy here. Might be complications because AFAICS
// this call confuses the number of samples with the number of bytes, and
// ends up copying only half of what it should.
@@ -677,7 +682,7 @@ class AcmIsacMtTestOldApi : public AudioCodingModuleMtTestOldApi {
// This method is the same as AudioCodingModuleMtTestOldApi::TestDone(), but
// here it is using the constants defined in this class (i.e., shorter test
// run).
- virtual bool TestDone() {
+ bool TestDone() override {
if (packet_cb_.num_calls() > kNumPackets) {
rtc::CritScope lock(&crit_sect_);
if (pull_audio_count_ > kNumPullCalls) {
@@ -728,7 +733,7 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
clock_ = fake_clock_.get();
}
- void SetUp() {
+ void SetUp() override {
AudioCodingModuleTestOldApi::SetUp();
// Set up input audio source to read from specified file, loop after 5
// seconds, and deliver blocks of 10 ms.
@@ -757,7 +762,7 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
codec_registration_thread_.SetPriority(rtc::kRealtimePriority);
}
- void TearDown() {
+ void TearDown() override {
AudioCodingModuleTestOldApi::TearDown();
receive_thread_.Stop();
codec_registration_thread_.Stop();
@@ -806,8 +811,13 @@ class AcmReRegisterIsacMtTestOldApi : public AudioCodingModuleTestOldApi {
// Pull audio.
for (int i = 0; i < rtc::CheckedDivExact(kPacketSizeMs, 10); ++i) {
AudioFrame audio_frame;
+ bool muted;
EXPECT_EQ(0, acm_->PlayoutData10Ms(-1 /* default output frequency */,
- &audio_frame));
+ &audio_frame, &muted));
+ if (muted) {
+ ADD_FAILURE();
+ return false;
+ }
fake_clock_->AdvanceTimeMilliseconds(10);
}
rtp_utility_->Forward(&rtp_header_);
@@ -939,34 +949,34 @@ class AcmReceiverBitExactnessOldApi : public ::testing::Test {
#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \
defined(WEBRTC_CODEC_ILBC) && defined(WEBRTC_CODEC_G722)
TEST_F(AcmReceiverBitExactnessOldApi, 8kHzOutput) {
- Run(8000, PlatformChecksum("d9334a99c7f185860028e6f08e5b7390",
- "946803da293ef3fa39242d3059eac491",
- "efb5a07480bad8afb184c4150f4b3f3a",
- "51717ab374871cbfa2c6977ea2aa40f3"),
+ Run(8000, PlatformChecksum("90be25dd9505005aaadf91b77ee31624",
+ "ac6dc4b5bf6d277f693889c4c916882e",
+ "a607f7d0ba98683c9c236217f86aaa6b",
+ "4a54f6ec712bda58484a388e1a332b42"),
std::vector<ExternalDecoder>());
}
TEST_F(AcmReceiverBitExactnessOldApi, 16kHzOutput) {
- Run(16000, PlatformChecksum("9ad7d5a5f3c9fac4e880a6fbfd9d3ac8",
- "4fc1b82404ae33511c1cdb385774b2a4",
- "f580bfd4e5e29f0399b61b7512d4e3b4",
- "5b2ae32c590b41d0c601179e14eaae96"),
+ Run(16000, PlatformChecksum("2c713197d41becd52c1ceecbd2b9f687",
+ "130cc2a43063c74197122e3760690e7d",
+ "cdc3d88f6d8e497d4e00c62c0e6dbb3c",
+ "83edb67c157d0e3a0fb9f7d7b1ce5dc7"),
std::vector<ExternalDecoder>());
}
TEST_F(AcmReceiverBitExactnessOldApi, 32kHzOutput) {
- Run(32000, PlatformChecksum("08e6085ccb96494b242f0ecc4c8a2dc8",
- "d1f853b1e046c67c9ee186786eaf2124",
- "fdf5166b98c43235978685e40e28fea6",
- "7f620312f2fa74a10048bbb7739d4bf3"),
+ Run(32000, PlatformChecksum("fe5851d43c13df66a7ad30fdb124e62f",
+ "309d24be4b287dc92c340f10a807a11e",
+ "c4a0e0b2e031d62c693af2a9ff4337ac",
+ "4cbfc6ab4d704f5d9b4f10406437fda9"),
std::vector<ExternalDecoder>());
}
TEST_F(AcmReceiverBitExactnessOldApi, 48kHzOutput) {
- Run(48000, PlatformChecksum("31343887b7ef70772df733d072b0dd00",
- "f6893278d75dad42ac44bff77f674b33",
- "71f89e87ee1bad594f529d6c036289ad",
- "b64c891e99eccc9ff45541ef67c9e9bf"),
+ Run(48000, PlatformChecksum("a9241f426b4bf2ac650b6d287469a550",
+ "30374fd4a932df942c1b1120e7b724ad",
+ "22242dd832824046d48db9ea8a01f84c",
+ "c7f46bf165400b266d9b57aee02d2747"),
std::vector<ExternalDecoder>());
}
@@ -1021,10 +1031,10 @@ TEST_F(AcmReceiverBitExactnessOldApi, 48kHzOutputExternalDecoder) {
std::vector<ExternalDecoder> external_decoders;
external_decoders.push_back(ed);
- Run(48000, PlatformChecksum("31343887b7ef70772df733d072b0dd00",
- "f6893278d75dad42ac44bff77f674b33",
- "71f89e87ee1bad594f529d6c036289ad",
- "b64c891e99eccc9ff45541ef67c9e9bf"),
+ Run(48000, PlatformChecksum("a9241f426b4bf2ac650b6d287469a550",
+ "30374fd4a932df942c1b1120e7b724ad",
+ "22242dd832824046d48db9ea8a01f84c",
+ "c7f46bf165400b266d9b57aee02d2747"),
external_decoders);
EXPECT_CALL(mock_decoder, Die());
@@ -1737,7 +1747,7 @@ class AcmSwitchingOutputFrequencyOldApi : public ::testing::Test,
}
// Inherited from test::AudioSink.
- bool WriteArray(const int16_t* audio, size_t num_samples) {
+ bool WriteArray(const int16_t* audio, size_t num_samples) override {
// Skip checking the first output frame, since it has a number of zeros
// due to how NetEq is initialized.
if (first_output_) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc
index 81adf81a83c..f028c45f991 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.cc
@@ -113,7 +113,7 @@ bool CodecManager::RegisterEncoder(const CodecInst& send_codec) {
}
send_codec_inst_ = rtc::Optional<CodecInst>(send_codec);
- codec_stack_params_.speech_encoder.reset(); // Caller must recreate it.
+ recreate_encoder_ = true; // Caller must recreate it.
return true;
}
@@ -190,5 +190,67 @@ bool CodecManager::SetCodecFEC(bool enable_codec_fec) {
return true;
}
+bool CodecManager::MakeEncoder(RentACodec* rac, AudioCodingModule* acm) {
+ RTC_DCHECK(rac);
+ RTC_DCHECK(acm);
+
+ if (!recreate_encoder_) {
+ bool error = false;
+ // Try to re-use the speech encoder we've given to the ACM.
+ acm->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
+ if (!*encoder) {
+ // There is no existing encoder.
+ recreate_encoder_ = true;
+ return;
+ }
+
+ // Extract the speech encoder from the ACM.
+ std::unique_ptr<AudioEncoder> enc = std::move(*encoder);
+ while (true) {
+ auto sub_enc = enc->ReclaimContainedEncoders();
+ if (sub_enc.empty()) {
+ break;
+ }
+ RTC_CHECK_EQ(1u, sub_enc.size());
+
+ // Replace enc with its sub encoder. We need to put the sub encoder in
+ // a temporary first, since otherwise the old value of enc would be
+ // destroyed before the new value got assigned, which would be bad
+ // since the new value is a part of the old value.
+ auto tmp_enc = std::move(sub_enc[0]);
+ enc = std::move(tmp_enc);
+ }
+
+ // Wrap it in a new encoder stack and put it back.
+ codec_stack_params_.speech_encoder = std::move(enc);
+ *encoder = rac->RentEncoderStack(&codec_stack_params_);
+ if (!*encoder) {
+ error = true;
+ }
+ });
+ if (error) {
+ return false;
+ }
+ if (!recreate_encoder_) {
+ return true;
+ }
+ }
+
+ if (!send_codec_inst_) {
+ // We don't have the information we need to create a new speech encoder.
+ // (This is not an error.)
+ return true;
+ }
+
+ codec_stack_params_.speech_encoder = rac->RentEncoder(*send_codec_inst_);
+ auto stack = rac->RentEncoderStack(&codec_stack_params_);
+ if (!stack) {
+ return false;
+ }
+ acm->SetEncoder(std::move(stack));
+ recreate_encoder_ = false;
+ return true;
+}
+
} // namespace acm2
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h
index f6c6cd46d2c..b60b7e7bcbd 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/codec_manager.h
@@ -59,31 +59,13 @@ class CodecManager final {
// Uses the provided Rent-A-Codec to create a new encoder stack, if we have a
// complete specification; if so, it is then passed to set_encoder. On error,
// returns false.
- bool MakeEncoder(RentACodec* rac, AudioCodingModule* acm) {
- RTC_DCHECK(rac);
- RTC_DCHECK(acm);
- if (!codec_stack_params_.speech_encoder && send_codec_inst_) {
- // We have no speech encoder, but we have a specification for making one.
- auto enc = rac->RentEncoder(*send_codec_inst_);
- if (!enc)
- return false;
- codec_stack_params_.speech_encoder = std::move(enc);
- }
- auto stack = rac->RentEncoderStack(&codec_stack_params_);
- if (stack) {
- // Give new encoder stack to the ACM.
- acm->SetEncoder(std::move(stack));
- } else {
- // The specification was good but incomplete, so we have no encoder stack
- // to give to the ACM.
- }
- return true;
- }
+ bool MakeEncoder(RentACodec* rac, AudioCodingModule* acm);
private:
rtc::ThreadChecker thread_checker_;
rtc::Optional<CodecInst> send_codec_inst_;
RentACodec::StackParameters codec_stack_params_;
+ bool recreate_encoder_ = true; // Need to recreate encoder?
RTC_DISALLOW_COPY_AND_ASSIGN(CodecManager);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc b/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc
index 7f1e52030dc..a61f15949d7 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.cc
@@ -47,6 +47,59 @@
namespace webrtc {
namespace acm2 {
+rtc::Optional<SdpAudioFormat> RentACodec::NetEqDecoderToSdpAudioFormat(
+ NetEqDecoder nd) {
+ switch (nd) {
+ case NetEqDecoder::kDecoderPCMu:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("pcmu", 8000, 1));
+ case NetEqDecoder::kDecoderPCMa:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("pcma", 8000, 1));
+ case NetEqDecoder::kDecoderPCMu_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("pcmu", 8000, 2));
+ case NetEqDecoder::kDecoderPCMa_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("pcma", 8000, 2));
+ case NetEqDecoder::kDecoderILBC:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("ilbc", 8000, 1));
+ case NetEqDecoder::kDecoderISAC:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("isac", 16000, 1));
+ case NetEqDecoder::kDecoderISACswb:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("isac", 32000, 1));
+ case NetEqDecoder::kDecoderPCM16B:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 8000, 1));
+ case NetEqDecoder::kDecoderPCM16Bwb:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 16000, 1));
+ case NetEqDecoder::kDecoderPCM16Bswb32kHz:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 32000, 1));
+ case NetEqDecoder::kDecoderPCM16Bswb48kHz:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 48000, 1));
+ case NetEqDecoder::kDecoderPCM16B_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 8000, 2));
+ case NetEqDecoder::kDecoderPCM16Bwb_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 16000, 2));
+ case NetEqDecoder::kDecoderPCM16Bswb32kHz_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 32000, 2));
+ case NetEqDecoder::kDecoderPCM16Bswb48kHz_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 48000, 2));
+ case NetEqDecoder::kDecoderPCM16B_5ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("l16", 8000, 5));
+ case NetEqDecoder::kDecoderG722:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("g722", 8000, 1));
+ case NetEqDecoder::kDecoderG722_2ch:
+ return rtc::Optional<SdpAudioFormat>(SdpAudioFormat("g722", 8000, 2));
+ case NetEqDecoder::kDecoderOpus:
+ return rtc::Optional<SdpAudioFormat>(
+ SdpAudioFormat("opus", 48000, 2,
+ std::map<std::string, std::string>{{"stereo", "0"}}));
+ case NetEqDecoder::kDecoderOpus_2ch:
+ return rtc::Optional<SdpAudioFormat>(
+ SdpAudioFormat("opus", 48000, 2,
+ std::map<std::string, std::string>{{"stereo", "1"}}));
+
+ default:
+ return rtc::Optional<SdpAudioFormat>();
+ }
+}
+
rtc::Optional<RentACodec::CodecId> RentACodec::CodecIdByParams(
const char* payload_name,
int sampling_freq_hz,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h b/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h
index a4026acd283..bac37afa963 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/acm2/rent_a_codec.h
@@ -20,6 +20,7 @@
#include "webrtc/base/optional.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/audio_format.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
#include "webrtc/modules/audio_coding/include/audio_coding_module_typedefs.h"
#include "webrtc/typedefs.h"
@@ -135,6 +136,9 @@ class RentACodec {
kDecoderOpus_2ch,
};
+ static rtc::Optional<SdpAudioFormat> NetEqDecoderToSdpAudioFormat(
+ NetEqDecoder nd);
+
static inline size_t NumberOfCodecs() {
return static_cast<size_t>(CodecId::kNumCodecs);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi b/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi
index e460853ac10..af2cb3c49dc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_coding.gypi
@@ -69,6 +69,50 @@
},
'targets': [
{
+ 'target_name': 'audio_decoder_factory_interface',
+ 'type': 'static_library',
+ 'dependencies': [
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ ],
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ },
+ 'sources': [
+ 'codecs/audio_decoder_factory.h',
+ 'codecs/audio_format.cc',
+ 'codecs/audio_format.h',
+ ],
+ },
+ {
+ 'target_name': 'builtin_audio_decoder_factory',
+ 'type': 'static_library',
+ 'defines': [
+ '<@(audio_codec_defines)',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ '<@(audio_codec_dependencies)',
+ 'audio_decoder_factory_interface',
+ ],
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ },
+ 'sources': [
+ 'codecs/builtin_audio_decoder_factory.cc',
+ 'codecs/builtin_audio_decoder_factory.h',
+ ],
+ },
+ {
'target_name': 'rent_a_codec',
'type': 'static_library',
'defines': [
diff --git a/chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi
index e60309a6dfa..0b1a22d8d04 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/audio_coding_tests.gypi
@@ -39,17 +39,4 @@
],
},
],
- 'conditions': [
- ['OS=="android"', {
- 'targets': [
- {
- 'target_name': 'audio_codec_speed_tests_apk_target',
- 'type': 'none',
- 'dependencies': [
- '<(apk_tests_path):audio_codec_speed_tests_apk',
- ],
- },
- ],
- }],
- ],
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc
index d2984b97b09..442ddc1e4b8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.cc
@@ -82,11 +82,6 @@ bool AudioDecoder::PacketHasFec(const uint8_t* encoded,
return false;
}
-CNG_dec_inst* AudioDecoder::CngDecoderInstance() {
- FATAL() << "Not a CNG decoder";
- return NULL;
-}
-
AudioDecoder::SpeechType AudioDecoder::ConvertSpeechType(int16_t type) {
switch (type) {
case 0: // TODO(hlundin): Both iSAC and Opus return 0 for speech.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h
index 81ac8731830..580ddbf74ff 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder.h
@@ -14,7 +14,6 @@
#include <stdlib.h> // NULL
#include "webrtc/base/constructormagic.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -94,10 +93,6 @@ class AudioDecoder {
// Returns true if the packet has FEC and false otherwise.
virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const;
- // If this is a CNG decoder, return the underlying CNG_dec_inst*. If this
- // isn't a CNG decoder, don't call this method.
- virtual CNG_dec_inst* CngDecoderInstance();
-
virtual size_t Channels() const = 0;
protected:
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory.h
new file mode 100644
index 00000000000..12b97780918
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_DECODER_FACTORY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_DECODER_FACTORY_H_
+
+#include <memory>
+#include <vector>
+
+#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/audio_format.h"
+
+namespace webrtc {
+
+// A factory that creates AudioDecoders.
+// NOTE: This class is still under development and may change without notice.
+class AudioDecoderFactory {
+ public:
+ virtual ~AudioDecoderFactory() = default;
+
+ virtual std::vector<SdpAudioFormat> GetSupportedFormats() = 0;
+
+ virtual std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format) = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_DECODER_FACTORY_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory_unittest.cc
new file mode 100644
index 00000000000..12a0a4047e8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_decoder_factory_unittest.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
+
+namespace webrtc {
+
+TEST(AudioDecoderFactoryTest, CreateUnknownDecoder) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("rey", 8000, 1)));
+}
+
+TEST(AudioDecoderFactoryTest, CreatePcmu) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // PCMu supports 8 kHz, and any number of channels.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 1)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 2)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 3)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 16000, 1)));
+}
+
+TEST(AudioDecoderFactoryTest, CreatePcma) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // PCMa supports 8 kHz, and any number of channels.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 1)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 2)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 3)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("pcma", 16000, 1)));
+}
+
+TEST(AudioDecoderFactoryTest, CreateIlbc) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // iLBC supports 8 kHz, 1 channel.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 1)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 2)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 16000, 1)));
+
+ // iLBC actually uses a 16 kHz sample rate instead of the nominal 8 kHz.
+ // TODO(kwiberg): Uncomment this once AudioDecoder has a SampleRateHz method.
+ // std::unique_ptr<AudioDecoder> dec =
+ // adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 1));
+ // EXPECT_EQ(16000, dec->SampleRateHz());
+}
+
+TEST(AudioDecoderFactoryTest, CreateIsac) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // iSAC supports 16 kHz, 1 channel. The float implementation additionally
+ // supports 32 kHz, 1 channel.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 1)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 16000, 2)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 8000, 1)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 48000, 1)));
+#ifdef WEBRTC_ARCH_ARM
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 32000, 1)));
+#else
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("isac", 32000, 1)));
+#endif
+}
+
+TEST(AudioDecoderFactoryTest, CreateL16) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // L16 supports any clock rate, any number of channels.
+ const int clockrates[] = {8000, 16000, 32000, 48000};
+ const int num_channels[] = {1, 2, 3, 4711};
+ for (int clockrate : clockrates) {
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("l16", clockrate, 0)));
+ for (int channels : num_channels) {
+ EXPECT_TRUE(
+ adf->MakeAudioDecoder(SdpAudioFormat("l16", clockrate, channels)));
+ }
+ }
+}
+
+TEST(AudioDecoderFactoryTest, CreateG722) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // g722 supports 8 kHz, 1-2 channels.
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 0)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 1)));
+ EXPECT_TRUE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 2)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 3)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 16000, 1)));
+ EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("g722", 32000, 1)));
+}
+
+TEST(AudioDecoderFactoryTest, CreateOpus) {
+ std::unique_ptr<AudioDecoderFactory> adf = CreateBuiltinAudioDecoderFactory();
+ ASSERT_TRUE(adf);
+ // Opus supports 48 kHz, 2 channels, and wants a "stereo" parameter whose
+ // value is either "0" or "1".
+ for (int hz : {8000, 16000, 32000, 48000}) {
+ for (int channels : {0, 1, 2, 3}) {
+ for (std::string stereo : {"XX", "0", "1", "2"}) {
+ std::map<std::string, std::string> params;
+ if (stereo != "XX") {
+ params["stereo"] = stereo;
+ }
+ bool good =
+ (hz == 48000 && channels == 2 && (stereo == "0" || stereo == "1"));
+ EXPECT_EQ(good, static_cast<bool>(adf->MakeAudioDecoder(SdpAudioFormat(
+ "opus", hz, channels, std::move(params)))));
+ }
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc
index 6f793e25314..6b7f5f893fd 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.cc
@@ -16,8 +16,13 @@
namespace webrtc {
AudioEncoder::EncodedInfo::EncodedInfo() = default;
-
+AudioEncoder::EncodedInfo::EncodedInfo(const EncodedInfo&) = default;
+AudioEncoder::EncodedInfo::EncodedInfo(EncodedInfo&&) = default;
AudioEncoder::EncodedInfo::~EncodedInfo() = default;
+AudioEncoder::EncodedInfo& AudioEncoder::EncodedInfo::operator=(
+ const EncodedInfo&) = default;
+AudioEncoder::EncodedInfo& AudioEncoder::EncodedInfo::operator=(EncodedInfo&&) =
+ default;
int AudioEncoder::RtpTimestampRateHz() const {
return SampleRateHz();
@@ -37,55 +42,6 @@ AudioEncoder::EncodedInfo AudioEncoder::Encode(
return info;
}
-AudioEncoder::EncodedInfo AudioEncoder::Encode(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) {
- return DEPRECATED_Encode(rtp_timestamp, audio, max_encoded_bytes, encoded);
-}
-
-AudioEncoder::EncodedInfo AudioEncoder::DEPRECATED_Encode(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded) {
- TRACE_EVENT0("webrtc", "AudioEncoder::Encode");
- RTC_CHECK_EQ(audio.size(),
- static_cast<size_t>(NumChannels() * SampleRateHz() / 100));
- EncodedInfo info =
- EncodeInternal(rtp_timestamp, audio, max_encoded_bytes, encoded);
- RTC_CHECK_LE(info.encoded_bytes, max_encoded_bytes);
- return info;
-}
-
-AudioEncoder::EncodedInfo AudioEncoder::EncodeImpl(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- rtc::Buffer* encoded)
-{
- EncodedInfo info;
- encoded->AppendData(MaxEncodedBytes(), [&] (rtc::ArrayView<uint8_t> encoded) {
- info = EncodeInternal(rtp_timestamp, audio,
- encoded.size(), encoded.data());
- return info.encoded_bytes;
- });
- return info;
-}
-
-AudioEncoder::EncodedInfo AudioEncoder::EncodeInternal(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded)
-{
- rtc::Buffer temp_buffer;
- EncodedInfo info = EncodeImpl(rtp_timestamp, audio, &temp_buffer);
- RTC_DCHECK_LE(temp_buffer.size(), max_encoded_bytes);
- std::memcpy(encoded, temp_buffer.data(), info.encoded_bytes);
- return info;
-}
-
bool AudioEncoder::SetFec(bool enable) {
return !enable;
}
@@ -104,4 +60,7 @@ void AudioEncoder::SetProjectedPacketLossRate(double fraction) {}
void AudioEncoder::SetTargetBitrate(int target_bps) {}
+rtc::ArrayView<std::unique_ptr<AudioEncoder>>
+AudioEncoder::ReclaimContainedEncoders() { return nullptr; }
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h
index 3fdee259ce7..ecc28d96a16 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder.h
@@ -25,12 +25,32 @@ namespace webrtc {
// type must have an implementation of this class.
class AudioEncoder {
public:
+ // Used for UMA logging of codec usage. The same codecs, with the
+ // same values, must be listed in
+ // src/tools/metrics/histograms/histograms.xml in chromium to log
+ // correct values.
+ enum class CodecType {
+ kOther = 0, // Codec not specified, and/or not listed in this enum
+ kOpus = 1,
+ kIsac = 2,
+ kPcmA = 3,
+ kPcmU = 4,
+ kG722 = 5,
+ kIlbc = 6,
+
+ // Number of histogram bins in the UMA logging of codec types. The
+ // total number of different codecs that are logged cannot exceed this
+ // number.
+ kMaxLoggedAudioCodecTypes
+ };
+
struct EncodedInfoLeaf {
size_t encoded_bytes = 0;
uint32_t encoded_timestamp = 0;
int payload_type = 0;
bool send_even_if_empty = false;
bool speech = true;
+ CodecType encoder_type = CodecType::kOther;
};
// This is the main struct for auxiliary encoding information. Each encoded
@@ -45,21 +65,17 @@ class AudioEncoder {
// vector.
struct EncodedInfo : public EncodedInfoLeaf {
EncodedInfo();
+ EncodedInfo(const EncodedInfo&);
+ EncodedInfo(EncodedInfo&&);
~EncodedInfo();
+ EncodedInfo& operator=(const EncodedInfo&);
+ EncodedInfo& operator=(EncodedInfo&&);
std::vector<EncodedInfoLeaf> redundant;
};
virtual ~AudioEncoder() = default;
- // Returns the maximum number of bytes that can be produced by the encoder
- // at each Encode() call. The caller can use the return value to determine
- // the size of the buffer that needs to be allocated. This value is allowed
- // to depend on encoder parameters like bitrate, frame size etc., so if
- // any of these change, the caller of Encode() is responsible for checking
- // that the buffer is large enough by calling MaxEncodedBytes() again.
- virtual size_t MaxEncodedBytes() const = 0;
-
// Returns the input sample rate in Hz and the number of input channels.
// These are constants set at instantiation time.
virtual int SampleRateHz() const = 0;
@@ -95,33 +111,6 @@ class AudioEncoder {
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded);
- // Deprecated interface to Encode (remove eventually, bug 5591). May incur a
- // copy. The encoder produces zero or more bytes of output in |encoded| and
- // returns additional encoding information. The caller is responsible for
- // making sure that |max_encoded_bytes| is not smaller than the number of
- // bytes actually produced by the encoder.
- RTC_DEPRECATED EncodedInfo Encode(uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded);
-
- EncodedInfo DEPRECATED_Encode(uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded);
-
- // Deprecated interface EncodeInternal (see bug 5591). May incur a copy.
- // Subclasses implement this to perform the actual encoding. Called by
- // Encode(). By default, this is implemented as a call to the newer
- // EncodeImpl() that accepts an rtc::Buffer instead of a raw pointer.
- // That version is protected, so see below. At least one of EncodeInternal
- // or EncodeImpl _must_ be implemented by a subclass.
- virtual EncodedInfo EncodeInternal(
- uint32_t rtp_timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded);
-
// Resets the encoder to its starting state, discarding any input that has
// been fed to the encoder but not yet emitted in a packet.
virtual void Reset() = 0;
@@ -160,15 +149,21 @@ class AudioEncoder {
// implementation does the latter).
virtual void SetTargetBitrate(int target_bps);
+ // Causes this encoder to let go of any other encoders it contains, and
+ // returns a pointer to an array where they are stored (which is required to
+ // live as long as this encoder). Unless the returned array is empty, you may
+ // not call any methods on this encoder afterwards, except for the
+ // destructor. The default implementation just returns an empty array.
+ // NOTE: This method is subject to change. Do not call or override it.
+ virtual rtc::ArrayView<std::unique_ptr<AudioEncoder>>
+ ReclaimContainedEncoders();
+
protected:
// Subclasses implement this to perform the actual encoding. Called by
- // Encode(). For compatibility reasons, this is implemented by default as a
- // call to the older interface EncodeInternal(). At least one of
- // EncodeInternal or EncodeImpl _must_ be implemented by a
- // subclass. Preferably this one.
+ // Encode().
virtual EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
- rtc::Buffer* encoded);
+ rtc::Buffer* encoded) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_ENCODER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder_unittest.cc
deleted file mode 100644
index 71ffcde323b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_encoder_unittest.cc
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
-
-using ::testing::_;
-using ::testing::Invoke;
-using ::testing::Return;
-
-namespace webrtc {
-
-TEST(AudioEncoderTest, EncodeInternalRedirectsOk) {
- const size_t kPayloadSize = 16;
- const uint8_t payload[kPayloadSize] =
- {0xf, 0xe, 0xd, 0xc, 0xb, 0xa, 0x9, 0x8,
- 0x7, 0x6, 0x5, 0x4, 0x3, 0x2, 0x1, 0x0};
-
- MockAudioEncoderDeprecated old_impl;
- MockAudioEncoder new_impl;
- MockAudioEncoderBase* impls[] = { &old_impl, &new_impl };
- for (auto* impl : impls) {
- EXPECT_CALL(*impl, Die());
- EXPECT_CALL(*impl, MaxEncodedBytes())
- .WillRepeatedly(Return(kPayloadSize * 2));
- EXPECT_CALL(*impl, NumChannels()).WillRepeatedly(Return(1));
- EXPECT_CALL(*impl, SampleRateHz()).WillRepeatedly(Return(8000));
- }
-
- EXPECT_CALL(old_impl, EncodeInternal(_, _, _, _)).WillOnce(
- Invoke(MockAudioEncoderDeprecated::CopyEncoding(payload)));
-
- EXPECT_CALL(new_impl, EncodeImpl(_, _, _)).WillOnce(
- Invoke(MockAudioEncoder::CopyEncoding(payload)));
-
- int16_t audio[80];
- uint8_t output_array[kPayloadSize * 2];
- rtc::Buffer output_buffer;
-
- AudioEncoder* old_encoder = &old_impl;
- AudioEncoder* new_encoder = &new_impl;
- auto old_info = old_encoder->Encode(0, audio, &output_buffer);
- auto new_info = new_encoder->DEPRECATED_Encode(0, audio,
- kPayloadSize * 2,
- output_array);
-
- EXPECT_EQ(old_info.encoded_bytes, kPayloadSize);
- EXPECT_EQ(new_info.encoded_bytes, kPayloadSize);
- EXPECT_EQ(output_buffer.size(), kPayloadSize);
-
- for (size_t i = 0; i != kPayloadSize; ++i) {
- EXPECT_EQ(output_buffer.data()[i], payload[i]);
- EXPECT_EQ(output_array[i], payload[i]);
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.cc
new file mode 100644
index 00000000000..bb69cbdb2f7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/audio_format.h"
+
+namespace webrtc {
+
+SdpAudioFormat::SdpAudioFormat() = default;
+SdpAudioFormat::SdpAudioFormat(const SdpAudioFormat&) = default;
+SdpAudioFormat::SdpAudioFormat(SdpAudioFormat&&) = default;
+
+SdpAudioFormat::SdpAudioFormat(const char* name,
+ int clockrate_hz,
+ int num_channels)
+ : name(name), clockrate_hz(clockrate_hz), num_channels(num_channels) {}
+
+SdpAudioFormat::SdpAudioFormat(const char* name,
+ int clockrate_hz,
+ int num_channels,
+ Parameters&& param)
+ : name(name),
+ clockrate_hz(clockrate_hz),
+ num_channels(num_channels),
+ parameters(std::move(param)) {}
+
+SdpAudioFormat::~SdpAudioFormat() = default;
+SdpAudioFormat& SdpAudioFormat::operator=(const SdpAudioFormat&) = default;
+SdpAudioFormat& SdpAudioFormat::operator=(SdpAudioFormat&&) = default;
+
+void swap(SdpAudioFormat& a, SdpAudioFormat& b) {
+ using std::swap;
+ swap(a.name, b.name);
+ swap(a.clockrate_hz, b.clockrate_hz);
+ swap(a.num_channels, b.num_channels);
+ swap(a.parameters, b.parameters);
+}
+
+std::ostream& operator<<(std::ostream& os, const SdpAudioFormat& saf) {
+ os << "{name: " << saf.name;
+ os << ", clockrate_hz: " << saf.clockrate_hz;
+ os << ", num_channels: " << saf.num_channels;
+ os << ", parameters: {";
+ const char* sep = "";
+ for (const auto& kv : saf.parameters) {
+ os << sep << kv.first << ": " << kv.second;
+ sep = ", ";
+ }
+ os << "}}";
+ return os;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.h
new file mode 100644
index 00000000000..61c0dd9f6fa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/audio_format.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_FORMAT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_FORMAT_H_
+
+#include <map>
+#include <ostream>
+#include <string>
+#include <utility>
+
+namespace webrtc {
+
+// SDP specification for a single audio codec.
+// NOTE: This class is still under development and may change without notice.
+struct SdpAudioFormat {
+ using Parameters = std::map<std::string, std::string>;
+
+ // TODO(kwiberg): Get rid of the default constructor when rtc::Optional no
+ // longer requires it.
+ SdpAudioFormat();
+ SdpAudioFormat(const SdpAudioFormat&);
+ SdpAudioFormat(SdpAudioFormat&&);
+ SdpAudioFormat(const char* name, int clockrate_hz, int num_channels);
+ SdpAudioFormat(const char* name,
+ int clockrate_hz,
+ int num_channels,
+ Parameters&& param);
+ ~SdpAudioFormat();
+
+ SdpAudioFormat& operator=(const SdpAudioFormat&);
+ SdpAudioFormat& operator=(SdpAudioFormat&&);
+
+ std::string name;
+ int clockrate_hz;
+ int num_channels;
+ Parameters parameters;
+ // Parameters feedback_parameters; ??
+};
+
+void swap(SdpAudioFormat& a, SdpAudioFormat& b);
+std::ostream& operator<<(std::ostream& os, const SdpAudioFormat& saf);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_AUDIO_FORMAT_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.cc
new file mode 100644
index 00000000000..4c7445672ac
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.cc
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
+
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/optional.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
+#ifdef WEBRTC_CODEC_G722
+#include "webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h"
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+#include "webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h"
+#endif
+#ifdef WEBRTC_CODEC_ISACFX
+#include "webrtc/modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h"
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+#include "webrtc/modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h"
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+#include "webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h"
+#endif
+#include "webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h"
+
+namespace webrtc {
+
+namespace {
+
+struct NamedDecoderConstructor {
+ const char* name;
+ std::unique_ptr<AudioDecoder> (*constructor)(const SdpAudioFormat&);
+};
+
+std::unique_ptr<AudioDecoder> Unique(AudioDecoder* d) {
+ return std::unique_ptr<AudioDecoder>(d);
+}
+
+// TODO(kwiberg): These factory functions should probably be moved to each
+// decoder.
+NamedDecoderConstructor decoder_constructors[] = {
+ {"pcmu",
+ [](const SdpAudioFormat& format) {
+ return format.clockrate_hz == 8000 && format.num_channels >= 1
+ ? Unique(new AudioDecoderPcmU(format.num_channels))
+ : nullptr;
+ }},
+ {"pcma",
+ [](const SdpAudioFormat& format) {
+ return format.clockrate_hz == 8000 && format.num_channels >= 1
+ ? Unique(new AudioDecoderPcmA(format.num_channels))
+ : nullptr;
+ }},
+#ifdef WEBRTC_CODEC_ILBC
+ {"ilbc",
+ [](const SdpAudioFormat& format) {
+ return format.clockrate_hz == 8000 && format.num_channels == 1
+ ? Unique(new AudioDecoderIlbc)
+ : nullptr;
+ }},
+#endif
+#if defined(WEBRTC_CODEC_ISACFX)
+ {"isac",
+ [](const SdpAudioFormat& format) {
+ return format.clockrate_hz == 16000 && format.num_channels == 1
+ ? Unique(new AudioDecoderIsacFix)
+ : nullptr;
+ }},
+#elif defined(WEBRTC_CODEC_ISAC)
+ {"isac",
+ [](const SdpAudioFormat& format) {
+ return (format.clockrate_hz == 16000 || format.clockrate_hz == 32000) &&
+ format.num_channels == 1
+ ? Unique(new AudioDecoderIsac)
+ : nullptr;
+ }},
+#endif
+ {"l16",
+ [](const SdpAudioFormat& format) {
+ return format.num_channels >= 1
+ ? Unique(new AudioDecoderPcm16B(format.num_channels))
+ : nullptr;
+ }},
+#ifdef WEBRTC_CODEC_G722
+ {"g722",
+ [](const SdpAudioFormat& format) {
+ if (format.clockrate_hz == 8000) {
+ if (format.num_channels == 1)
+ return Unique(new AudioDecoderG722);
+ if (format.num_channels == 2)
+ return Unique(new AudioDecoderG722Stereo);
+ }
+ return Unique(nullptr);
+ }},
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ {"opus",
+ [](const SdpAudioFormat& format) {
+ rtc::Optional<int> num_channels = [&] {
+ auto stereo = format.parameters.find("stereo");
+ if (stereo != format.parameters.end()) {
+ if (stereo->second == "0") {
+ return rtc::Optional<int>(1);
+ } else if (stereo->second == "1") {
+ return rtc::Optional<int>(2);
+ }
+ }
+ return rtc::Optional<int>();
+ }();
+ return format.clockrate_hz == 48000 && format.num_channels == 2 &&
+ num_channels
+ ? Unique(new AudioDecoderOpus(*num_channels))
+ : nullptr;
+ }},
+#endif
+};
+
+class BuiltinAudioDecoderFactory : public AudioDecoderFactory {
+ public:
+ std::vector<SdpAudioFormat> GetSupportedFormats() override {
+ FATAL() << "Not implemented yet!";
+ }
+
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format) override {
+ for (const auto& dc : decoder_constructors) {
+ if (STR_CASE_CMP(format.name.c_str(), dc.name) == 0) {
+ return std::unique_ptr<AudioDecoder>(dc.constructor(format));
+ }
+ }
+ return nullptr;
+ }
+};
+
+} // namespace
+
+std::unique_ptr<AudioDecoderFactory> CreateBuiltinAudioDecoderFactory() {
+ return std::unique_ptr<AudioDecoderFactory>(new BuiltinAudioDecoderFactory);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h
new file mode 100644
index 00000000000..7234c160b5c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_
+
+#include <memory>
+
+#include "webrtc/modules/audio_coding/codecs/audio_decoder_factory.h"
+
+namespace webrtc {
+
+// Creates a new factory that can create the built-in types of audio decoders.
+// NOTE: This function is still under development and may change without notice.
+std::unique_ptr<AudioDecoderFactory> CreateBuiltinAudioDecoderFactory();
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
index 3b48131a754..d2edcb5c265 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.cc
@@ -21,33 +21,10 @@ namespace {
const int kMaxFrameSizeMs = 60;
-std::unique_ptr<CNG_enc_inst, CngInstDeleter> CreateCngInst(
- int sample_rate_hz,
- int sid_frame_interval_ms,
- int num_cng_coefficients) {
- CNG_enc_inst* ci;
- RTC_CHECK_EQ(0, WebRtcCng_CreateEnc(&ci));
- std::unique_ptr<CNG_enc_inst, CngInstDeleter> cng_inst(ci);
- RTC_CHECK_EQ(0,
- WebRtcCng_InitEnc(cng_inst.get(), sample_rate_hz,
- sid_frame_interval_ms, num_cng_coefficients));
- return cng_inst;
-}
-
} // namespace
AudioEncoderCng::Config::Config() = default;
-
-// TODO(kwiberg): =default this when Visual Studio learns to handle it.
-AudioEncoderCng::Config::Config(Config&& c)
- : num_channels(c.num_channels),
- payload_type(c.payload_type),
- speech_encoder(std::move(c.speech_encoder)),
- vad_mode(c.vad_mode),
- sid_frame_interval_ms(c.sid_frame_interval_ms),
- num_cng_coefficients(c.num_cng_coefficients),
- vad(c.vad) {}
-
+AudioEncoderCng::Config::Config(Config&&) = default;
AudioEncoderCng::Config::~Config() = default;
bool AudioEncoderCng::Config::IsOk() const {
@@ -75,20 +52,14 @@ AudioEncoderCng::AudioEncoderCng(Config&& config)
sid_frame_interval_ms_(config.sid_frame_interval_ms),
last_frame_active_(true),
vad_(config.vad ? std::unique_ptr<Vad>(config.vad)
- : CreateVad(config.vad_mode)) {
- cng_inst_ = CreateCngInst(SampleRateHz(), sid_frame_interval_ms_,
- num_cng_coefficients_);
+ : CreateVad(config.vad_mode)),
+ cng_encoder_(new ComfortNoiseEncoder(SampleRateHz(),
+ sid_frame_interval_ms_,
+ num_cng_coefficients_)) {
}
AudioEncoderCng::~AudioEncoderCng() = default;
-size_t AudioEncoderCng::MaxEncodedBytes() const {
- const size_t max_encoded_bytes_active = speech_encoder_->MaxEncodedBytes();
- const size_t max_encoded_bytes_passive =
- rtc::CheckedDivExact(kMaxFrameSizeMs, 10) * SamplesPer10msFrame();
- return std::max(max_encoded_bytes_active, max_encoded_bytes_passive);
-}
-
int AudioEncoderCng::SampleRateHz() const {
return speech_encoder_->SampleRateHz();
}
@@ -187,8 +158,9 @@ void AudioEncoderCng::Reset() {
rtp_timestamps_.clear();
last_frame_active_ = true;
vad_->Reset();
- cng_inst_ = CreateCngInst(SampleRateHz(), sid_frame_interval_ms_,
- num_cng_coefficients_);
+ cng_encoder_.reset(
+ new ComfortNoiseEncoder(SampleRateHz(), sid_frame_interval_ms_,
+ num_cng_coefficients_));
}
bool AudioEncoderCng::SetFec(bool enable) {
@@ -215,38 +187,38 @@ void AudioEncoderCng::SetTargetBitrate(int bits_per_second) {
speech_encoder_->SetTargetBitrate(bits_per_second);
}
+rtc::ArrayView<std::unique_ptr<AudioEncoder>>
+AudioEncoderCng::ReclaimContainedEncoders() {
+ return rtc::ArrayView<std::unique_ptr<AudioEncoder>>(&speech_encoder_, 1);
+}
+
AudioEncoder::EncodedInfo AudioEncoderCng::EncodePassive(
size_t frames_to_encode,
rtc::Buffer* encoded) {
bool force_sid = last_frame_active_;
bool output_produced = false;
const size_t samples_per_10ms_frame = SamplesPer10msFrame();
- const size_t bytes_to_encode = frames_to_encode * samples_per_10ms_frame;
AudioEncoder::EncodedInfo info;
- encoded->AppendData(bytes_to_encode, [&] (rtc::ArrayView<uint8_t> encoded) {
- for (size_t i = 0; i < frames_to_encode; ++i) {
- // It's important not to pass &info.encoded_bytes directly to
- // WebRtcCng_Encode(), since later loop iterations may return zero in
- // that value, in which case we don't want to overwrite any value from
- // an earlier iteration.
- size_t encoded_bytes_tmp = 0;
- RTC_CHECK_GE(
- WebRtcCng_Encode(cng_inst_.get(),
- &speech_buffer_[i * samples_per_10ms_frame],
- samples_per_10ms_frame, encoded.data(),
- &encoded_bytes_tmp, force_sid),
- 0);
- if (encoded_bytes_tmp > 0) {
- RTC_CHECK(!output_produced);
- info.encoded_bytes = encoded_bytes_tmp;
- output_produced = true;
- force_sid = false;
- }
- }
-
- return info.encoded_bytes;
- });
+ for (size_t i = 0; i < frames_to_encode; ++i) {
+ // It's important not to pass &info.encoded_bytes directly to
+ // WebRtcCng_Encode(), since later loop iterations may return zero in
+ // that value, in which case we don't want to overwrite any value from
+ // an earlier iteration.
+ size_t encoded_bytes_tmp =
+ cng_encoder_->Encode(
+ rtc::ArrayView<const int16_t>(
+ &speech_buffer_[i * samples_per_10ms_frame],
+ samples_per_10ms_frame),
+ force_sid, encoded);
+
+ if (encoded_bytes_tmp > 0) {
+ RTC_CHECK(!output_produced);
+ info.encoded_bytes = encoded_bytes_tmp;
+ output_produced = true;
+ force_sid = false;
+ }
+ }
info.encoded_timestamp = rtp_timestamps_.front();
info.payload_type = cng_payload_type_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h
index 1384cd511ee..a895e69de44 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h
@@ -21,11 +21,6 @@
namespace webrtc {
-// Deleter for use with unique_ptr.
-struct CngInstDeleter {
- void operator()(CNG_enc_inst* ptr) const { WebRtcCng_FreeEnc(ptr); }
-};
-
class Vad;
class AudioEncoderCng final : public AudioEncoder {
@@ -52,7 +47,6 @@ class AudioEncoderCng final : public AudioEncoder {
explicit AudioEncoderCng(Config&& config);
~AudioEncoderCng() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
int RtpTimestampRateHz() const override;
@@ -69,6 +63,8 @@ class AudioEncoderCng final : public AudioEncoder {
void SetMaxPlaybackRate(int frequency_hz) override;
void SetProjectedPacketLossRate(double fraction) override;
void SetTargetBitrate(int target_bps) override;
+ rtc::ArrayView<std::unique_ptr<AudioEncoder>> ReclaimContainedEncoders()
+ override;
private:
EncodedInfo EncodePassive(size_t frames_to_encode,
@@ -85,7 +81,7 @@ class AudioEncoderCng final : public AudioEncoder {
std::vector<uint32_t> rtp_timestamps_;
bool last_frame_active_;
std::unique_ptr<Vad> vad_;
- std::unique_ptr<CNG_enc_inst, CngInstDeleter> cng_inst_;
+ std::unique_ptr<ComfortNoiseEncoder> cng_encoder_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCng);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
index 8f30d783ae4..eb6c6d3607e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc
@@ -12,6 +12,7 @@
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/vad/mock/mock_vad.h"
#include "webrtc/modules/audio_coding/codecs/cng/audio_encoder_cng.h"
#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h"
@@ -25,7 +26,6 @@ using ::testing::Invoke;
namespace webrtc {
namespace {
-static const size_t kMockMaxEncodedBytes = 1000;
static const size_t kMaxNumSamples = 48 * 10 * 2; // 10 ms @ 48 kHz stereo.
static const size_t kMockReturnEncodedBytes = 17;
static const int kCngPayloadType = 18;
@@ -74,8 +74,6 @@ class AudioEncoderCngTest : public ::testing::Test {
// as long as it is smaller than 10.
EXPECT_CALL(*mock_encoder_, Max10MsFramesInAPacket())
.WillOnce(Return(1u));
- EXPECT_CALL(*mock_encoder_, MaxEncodedBytes())
- .WillRepeatedly(Return(kMockMaxEncodedBytes));
}
cng_.reset(new AudioEncoderCng(std::move(config)));
}
@@ -90,8 +88,8 @@ class AudioEncoderCngTest : public ::testing::Test {
}
// Expect |num_calls| calls to the encoder, all successful. The last call
- // claims to have encoded |kMockMaxEncodedBytes| bytes, and all the preceding
- // ones 0 bytes.
+ // claims to have encoded |kMockReturnEncodedBytes| bytes, and all the
+ // preceding ones 0 bytes.
void ExpectEncodeCalls(size_t num_calls) {
InSequence s;
AudioEncoder::EncodedInfo info;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi
index c020f4740d4..bbff9f8edfe 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng.gypi
@@ -18,9 +18,7 @@
'sources': [
'audio_encoder_cng.cc',
'audio_encoder_cng.h',
- 'cng_helpfuns.c',
- 'cng_helpfuns.h',
- 'webrtc_cng.c',
+ 'webrtc_cng.cc',
'webrtc_cng.h',
],
},
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.c
deleted file mode 100644
index bc08d431a69..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.c
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "cng_helpfuns.h"
-
-#include "signal_processing_library.h"
-#include "webrtc/typedefs.h"
-#include "webrtc_cng.h"
-
-/* Values in |k| are Q15, and |a| Q12. */
-void WebRtcCng_K2a16(int16_t* k, int useOrder, int16_t* a) {
- int16_t any[WEBRTC_SPL_MAX_LPC_ORDER + 1];
- int16_t *aptr, *aptr2, *anyptr;
- const int16_t *kptr;
- int m, i;
-
- kptr = k;
- *a = 4096; /* i.e., (Word16_MAX >> 3) + 1 */
- *any = *a;
- a[1] = (*k + 4) >> 3;
- for (m = 1; m < useOrder; m++) {
- kptr++;
- aptr = a;
- aptr++;
- aptr2 = &a[m];
- anyptr = any;
- anyptr++;
-
- any[m + 1] = (*kptr + 4) >> 3;
- for (i = 0; i < m; i++) {
- *anyptr++ = (*aptr++) +
- (int16_t)((((int32_t)(*aptr2--) * (int32_t) * kptr) + 16384) >> 15);
- }
-
- aptr = a;
- anyptr = any;
- for (i = 0; i < (m + 2); i++) {
- *aptr++ = *anyptr++;
- }
- }
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.h
deleted file mode 100644
index a553a7615e6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_helpfuns.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_CNG_HELPFUNS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_CNG_HELPFUNS_H_
-
-#include "webrtc/typedefs.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-void WebRtcCng_K2a16(int16_t* k, int useOrder, int16_t* a);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_CNG_HELPFUNS_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
index 1061dca69ac..95132a96178 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/cng_unittest.cc
@@ -7,11 +7,12 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc_cng.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
namespace webrtc {
@@ -21,7 +22,7 @@ enum {
kSidLongIntervalUpdate = 10000
};
-enum {
+enum : size_t {
kCNGNumParamsLow = 0,
kCNGNumParamsNormal = 8,
kCNGNumParamsHigh = WEBRTC_CNG_MAX_LPC_ORDER,
@@ -35,19 +36,13 @@ enum {
class CngTest : public ::testing::Test {
protected:
- CngTest();
virtual void SetUp();
- CNG_enc_inst* cng_enc_inst_;
- CNG_dec_inst* cng_dec_inst_;
+ void TestCngEncode(int sample_rate_hz, int quality);
+
int16_t speech_data_[640]; // Max size of CNG internal buffers.
};
-CngTest::CngTest()
- : cng_enc_inst_(NULL),
- cng_dec_inst_(NULL) {
-}
-
void CngTest::SetUp() {
FILE* input_file;
const std::string file_name =
@@ -60,289 +55,187 @@ void CngTest::SetUp() {
input_file = NULL;
}
-// Test failing Create.
-TEST_F(CngTest, CngCreateFail) {
- // Test to see that an invalid pointer is caught.
- EXPECT_EQ(-1, WebRtcCng_CreateEnc(NULL));
- EXPECT_EQ(-1, WebRtcCng_CreateDec(NULL));
-}
-
-// Test normal Create.
-TEST_F(CngTest, CngCreate) {
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_TRUE(cng_enc_inst_ != NULL);
- EXPECT_TRUE(cng_dec_inst_ != NULL);
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+void CngTest::TestCngEncode(int sample_rate_hz, int quality) {
+ const size_t num_samples_10ms = rtc::CheckedDivExact(sample_rate_hz, 100);
+ rtc::Buffer sid_data;
+
+ ComfortNoiseEncoder cng_encoder(sample_rate_hz, kSidNormalIntervalUpdate,
+ quality);
+ EXPECT_EQ(0U, cng_encoder.Encode(rtc::ArrayView<const int16_t>(
+ speech_data_, num_samples_10ms),
+ kNoSid, &sid_data));
+ EXPECT_EQ(static_cast<size_t>(quality + 1),
+ cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, num_samples_10ms),
+ kForceSid, &sid_data));
}
+#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
// Create CNG encoder, init with faulty values, free CNG encoder.
TEST_F(CngTest, CngInitFail) {
- // Create encoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
-
// Call with too few parameters.
- EXPECT_EQ(-1, WebRtcCng_InitEnc(cng_enc_inst_, 8000, kSidNormalIntervalUpdate,
- kCNGNumParamsLow));
- EXPECT_EQ(6130, WebRtcCng_GetErrorCodeEnc(cng_enc_inst_));
-
+ EXPECT_DEATH({ ComfortNoiseEncoder(8000, kSidNormalIntervalUpdate,
+ kCNGNumParamsLow); }, "");
// Call with too many parameters.
- EXPECT_EQ(-1, WebRtcCng_InitEnc(cng_enc_inst_, 8000, kSidNormalIntervalUpdate,
- kCNGNumParamsTooHigh));
- EXPECT_EQ(6130, WebRtcCng_GetErrorCodeEnc(cng_enc_inst_));
-
- // Free encoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
-}
-
-TEST_F(CngTest, CngEncode) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
-
- // Create encoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
-
- // 8 kHz, Normal number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 8000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 80, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 80, sid_data, &number_bytes, kForceSid));
-
- // 16 kHz, Normal number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 160, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kForceSid));
-
- // 32 kHz, Max number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 32000, kSidNormalIntervalUpdate,
- kCNGNumParamsHigh));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 320, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsHigh + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 320, sid_data, &number_bytes, kForceSid));
-
- // 48 kHz, Normal number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 48000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 480, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 480, sid_data, &number_bytes, kForceSid));
-
- // 64 kHz, Normal number of parameters
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 64000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 640, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 640, sid_data, &number_bytes, kForceSid));
-
- // Free encoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
+ EXPECT_DEATH({ ComfortNoiseEncoder(8000, kSidNormalIntervalUpdate,
+ kCNGNumParamsTooHigh); }, "");
}
// Encode Cng with too long input vector.
TEST_F(CngTest, CngEncodeTooLong) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
-
- // Create and init encoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 8000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
+ rtc::Buffer sid_data;
+ // Create encoder.
+ ComfortNoiseEncoder cng_encoder(8000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
// Run encoder with too much data.
- EXPECT_EQ(-1, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 641, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(6140, WebRtcCng_GetErrorCodeEnc(cng_enc_inst_));
+ EXPECT_DEATH(
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 641),
+ kNoSid, &sid_data),
+ "");
+}
+#endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
- // Free encoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
+TEST_F(CngTest, CngEncode8000) {
+ TestCngEncode(8000, kCNGNumParamsNormal);
}
-// Call encode without calling init.
-TEST_F(CngTest, CngEncodeNoInit) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
+TEST_F(CngTest, CngEncode16000) {
+ TestCngEncode(16000, kCNGNumParamsNormal);
+}
- // Create encoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
+TEST_F(CngTest, CngEncode32000) {
+ TestCngEncode(32000, kCNGNumParamsHigh);
+}
- // Run encoder without calling init.
- EXPECT_EQ(-1, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 640, sid_data,
- &number_bytes, kNoSid));
- EXPECT_EQ(6120, WebRtcCng_GetErrorCodeEnc(cng_enc_inst_));
+TEST_F(CngTest, CngEncode48000) {
+ TestCngEncode(48000, kCNGNumParamsNormal);
+}
- // Free encoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
+TEST_F(CngTest, CngEncode64000) {
+ TestCngEncode(64000, kCNGNumParamsNormal);
}
// Update SID parameters, for both 9 and 16 parameters.
TEST_F(CngTest, CngUpdateSid) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
+ rtc::Buffer sid_data;
- // Create and initialize encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- WebRtcCng_InitDec(cng_dec_inst_);
+ // Create and initialize encoder and decoder.
+ ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
// Run normal Encode and UpdateSid.
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kForceSid));
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
+ EXPECT_EQ(kCNGNumParamsNormal + 1,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 160),
+ kForceSid, &sid_data));
+ cng_decoder.UpdateSid(sid_data);
// Reinit with new length.
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsHigh));
- WebRtcCng_InitDec(cng_dec_inst_);
+ cng_encoder.Reset(16000, kSidNormalIntervalUpdate, kCNGNumParamsHigh);
+ cng_decoder.Reset();
// Expect 0 because of unstable parameters after switching length.
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 160, sid_data,
- &number_bytes, kForceSid));
- EXPECT_EQ(kCNGNumParamsHigh + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_ + 160, 160, sid_data, &number_bytes,
- kForceSid));
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+ EXPECT_EQ(0U,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 160),
+ kForceSid, &sid_data));
+ EXPECT_EQ(
+ kCNGNumParamsHigh + 1,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_ + 160, 160),
+ kForceSid, &sid_data));
+ cng_decoder.UpdateSid(
+ rtc::ArrayView<const uint8_t>(sid_data.data(), kCNGNumParamsNormal + 1));
}
// Update SID parameters, with wrong parameters or without calling decode.
TEST_F(CngTest, CngUpdateSidErroneous) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
-
- // Create encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
+ rtc::Buffer sid_data;
// Encode.
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kForceSid));
-
- // Update Sid before initializing decoder.
- EXPECT_EQ(-1, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
- EXPECT_EQ(6220, WebRtcCng_GetErrorCodeDec(cng_dec_inst_));
-
- // Initialize decoder.
- WebRtcCng_InitDec(cng_dec_inst_);
+ ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
+ EXPECT_EQ(kCNGNumParamsNormal + 1,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 160),
+ kForceSid, &sid_data));
// First run with valid parameters, then with too many CNG parameters.
// The function will operate correctly by only reading the maximum number of
// parameters, skipping the extra.
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsTooHigh + 1));
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+ EXPECT_EQ(kCNGNumParamsNormal + 1, sid_data.size());
+ cng_decoder.UpdateSid(sid_data);
+
+ // Make sure the input buffer is large enough. Since Encode() appends data, we
+ // need to set the size manually only afterwards, or the buffer will be bigger
+ // than anticipated.
+ sid_data.SetSize(kCNGNumParamsTooHigh + 1);
+ cng_decoder.UpdateSid(sid_data);
}
// Test to generate cng data, by forcing SID. Both normal and faulty condition.
TEST_F(CngTest, CngGenerate) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ rtc::Buffer sid_data;
int16_t out_data[640];
- size_t number_bytes;
- // Create and initialize encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- WebRtcCng_InitDec(cng_dec_inst_);
+ // Create and initialize encoder and decoder.
+ ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
// Normal Encode.
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kForceSid));
+ EXPECT_EQ(kCNGNumParamsNormal + 1,
+ cng_encoder.Encode(rtc::ArrayView<const int16_t>(speech_data_, 160),
+ kForceSid, &sid_data));
// Normal UpdateSid.
- EXPECT_EQ(0, WebRtcCng_UpdateSid(cng_dec_inst_, sid_data,
- kCNGNumParamsNormal + 1));
+ cng_decoder.UpdateSid(sid_data);
// Two normal Generate, one with new_period.
- EXPECT_EQ(0, WebRtcCng_Generate(cng_dec_inst_, out_data, 640, 1));
- EXPECT_EQ(0, WebRtcCng_Generate(cng_dec_inst_, out_data, 640, 0));
+ EXPECT_TRUE(cng_decoder.Generate(rtc::ArrayView<int16_t>(out_data, 640), 1));
+ EXPECT_TRUE(cng_decoder.Generate(rtc::ArrayView<int16_t>(out_data, 640), 0));
// Call Genereate with too much data.
- EXPECT_EQ(-1, WebRtcCng_Generate(cng_dec_inst_, out_data, 641, 0));
- EXPECT_EQ(6140, WebRtcCng_GetErrorCodeDec(cng_dec_inst_));
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+ EXPECT_FALSE(cng_decoder.Generate(rtc::ArrayView<int16_t>(out_data, 641), 0));
}
// Test automatic SID.
TEST_F(CngTest, CngAutoSid) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
+ rtc::Buffer sid_data;
- // Create and initialize encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidNormalIntervalUpdate,
- kCNGNumParamsNormal));
- WebRtcCng_InitDec(cng_dec_inst_);
+ // Create and initialize encoder and decoder.
+ ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
// Normal Encode, 100 msec, where no SID data should be generated.
for (int i = 0; i < 10; i++) {
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 160, sid_data,
- &number_bytes, kNoSid));
+ EXPECT_EQ(0U, cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, 160), kNoSid, &sid_data));
}
// We have reached 100 msec, and SID data should be generated.
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kNoSid));
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
+ EXPECT_EQ(kCNGNumParamsNormal + 1, cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, 160), kNoSid, &sid_data));
}
// Test automatic SID, with very short interval.
TEST_F(CngTest, CngAutoSidShort) {
- uint8_t sid_data[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- size_t number_bytes;
+ rtc::Buffer sid_data;
- // Create and initialize encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_CreateEnc(&cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_CreateDec(&cng_dec_inst_));
- EXPECT_EQ(0, WebRtcCng_InitEnc(cng_enc_inst_, 16000, kSidShortIntervalUpdate,
- kCNGNumParamsNormal));
- WebRtcCng_InitDec(cng_dec_inst_);
+ // Create and initialize encoder and decoder.
+ ComfortNoiseEncoder cng_encoder(16000, kSidShortIntervalUpdate,
+ kCNGNumParamsNormal);
+ ComfortNoiseDecoder cng_decoder;
// First call will never generate SID, unless forced to.
- EXPECT_EQ(0, WebRtcCng_Encode(cng_enc_inst_, speech_data_, 160, sid_data,
- &number_bytes, kNoSid));
+ EXPECT_EQ(0U, cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, 160), kNoSid, &sid_data));
// Normal Encode, 100 msec, SID data should be generated all the time.
for (int i = 0; i < 10; i++) {
- EXPECT_EQ(kCNGNumParamsNormal + 1, WebRtcCng_Encode(
- cng_enc_inst_, speech_data_, 160, sid_data, &number_bytes, kNoSid));
+ EXPECT_EQ(kCNGNumParamsNormal + 1, cng_encoder.Encode(
+ rtc::ArrayView<const int16_t>(speech_data_, 160), kNoSid, &sid_data));
}
-
- // Free encoder and decoder memory.
- EXPECT_EQ(0, WebRtcCng_FreeEnc(cng_enc_inst_));
- EXPECT_EQ(0, WebRtcCng_FreeDec(cng_dec_inst_));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.c
deleted file mode 100644
index 8dddc5c717d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.c
+++ /dev/null
@@ -1,603 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc_cng.h"
-
-#include <string.h>
-#include <stdlib.h>
-
-#include "cng_helpfuns.h"
-#include "signal_processing_library.h"
-
-typedef struct WebRtcCngDecoder_ {
- uint32_t dec_seed;
- int32_t dec_target_energy;
- int32_t dec_used_energy;
- int16_t dec_target_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_used_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_filtstate[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_filtstateLow[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_Efiltstate[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_EfiltstateLow[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t dec_order;
- int16_t dec_target_scale_factor; /* Q29 */
- int16_t dec_used_scale_factor; /* Q29 */
- int16_t target_scale_factor; /* Q13 */
- int16_t errorcode;
- int16_t initflag;
-} WebRtcCngDecoder;
-
-typedef struct WebRtcCngEncoder_ {
- size_t enc_nrOfCoefs;
- int enc_sampfreq;
- int16_t enc_interval;
- int16_t enc_msSinceSID;
- int32_t enc_Energy;
- int16_t enc_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int32_t enc_corrVector[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- uint32_t enc_seed;
- int16_t errorcode;
- int16_t initflag;
-} WebRtcCngEncoder;
-
-const int32_t WebRtcCng_kDbov[94] = {
- 1081109975, 858756178, 682134279, 541838517, 430397633, 341876992,
- 271562548, 215709799, 171344384, 136103682, 108110997, 85875618,
- 68213428, 54183852, 43039763, 34187699, 27156255, 21570980,
- 17134438, 13610368, 10811100, 8587562, 6821343, 5418385,
- 4303976, 3418770, 2715625, 2157098, 1713444, 1361037,
- 1081110, 858756, 682134, 541839, 430398, 341877,
- 271563, 215710, 171344, 136104, 108111, 85876,
- 68213, 54184, 43040, 34188, 27156, 21571,
- 17134, 13610, 10811, 8588, 6821, 5418,
- 4304, 3419, 2716, 2157, 1713, 1361,
- 1081, 859, 682, 542, 430, 342,
- 272, 216, 171, 136, 108, 86,
- 68, 54, 43, 34, 27, 22,
- 17, 14, 11, 9, 7, 5,
- 4, 3, 3, 2, 2, 1,
- 1, 1, 1, 1
-};
-
-const int16_t WebRtcCng_kCorrWindow[WEBRTC_CNG_MAX_LPC_ORDER] = {
- 32702, 32636, 32570, 32505, 32439, 32374,
- 32309, 32244, 32179, 32114, 32049, 31985
-};
-
-/****************************************************************************
- * WebRtcCng_CreateEnc/Dec(...)
- *
- * These functions create an instance to the specified structure
- *
- * Input:
- * - XXX_inst : Pointer to created instance that should be created
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_CreateEnc(CNG_enc_inst** cng_inst) {
- if (cng_inst != NULL) {
- *cng_inst = (CNG_enc_inst*) malloc(sizeof(WebRtcCngEncoder));
- if (*cng_inst != NULL) {
- (*(WebRtcCngEncoder**) cng_inst)->errorcode = 0;
- (*(WebRtcCngEncoder**) cng_inst)->initflag = 0;
-
- /* Needed to get the right function pointers in SPLIB. */
- WebRtcSpl_Init();
-
- return 0;
- } else {
- /* The memory could not be allocated. */
- return -1;
- }
- } else {
- /* The input pointer is invalid (NULL). */
- return -1;
- }
-}
-
-int16_t WebRtcCng_CreateDec(CNG_dec_inst** cng_inst) {
- if (cng_inst != NULL ) {
- *cng_inst = (CNG_dec_inst*) malloc(sizeof(WebRtcCngDecoder));
- if (*cng_inst != NULL ) {
- (*(WebRtcCngDecoder**) cng_inst)->errorcode = 0;
- (*(WebRtcCngDecoder**) cng_inst)->initflag = 0;
-
- /* Needed to get the right function pointers in SPLIB. */
- WebRtcSpl_Init();
-
- return 0;
- } else {
- /* The memory could not be allocated */
- return -1;
- }
- } else {
- /* The input pointer is invalid (NULL). */
- return -1;
- }
-}
-
-/****************************************************************************
- * WebRtcCng_InitEnc/Dec(...)
- *
- * This function initializes a instance
- *
- * Input:
- * - cng_inst : Instance that should be initialized
- *
- * - fs : 8000 for narrowband and 16000 for wideband
- * - interval : generate SID data every interval ms
- * - quality : TBD
- *
- * Output:
- * - cng_inst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, int fs, int16_t interval,
- int16_t quality) {
- int i;
- WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
- memset(inst, 0, sizeof(WebRtcCngEncoder));
-
- /* Check LPC order */
- if (quality > WEBRTC_CNG_MAX_LPC_ORDER || quality <= 0) {
- inst->errorcode = CNG_DISALLOWED_LPC_ORDER;
- return -1;
- }
-
- inst->enc_sampfreq = fs;
- inst->enc_interval = interval;
- inst->enc_nrOfCoefs = quality;
- inst->enc_msSinceSID = 0;
- inst->enc_seed = 7777; /* For debugging only. */
- inst->enc_Energy = 0;
- for (i = 0; i < (WEBRTC_CNG_MAX_LPC_ORDER + 1); i++) {
- inst->enc_reflCoefs[i] = 0;
- inst->enc_corrVector[i] = 0;
- }
- inst->initflag = 1;
-
- return 0;
-}
-
-void WebRtcCng_InitDec(CNG_dec_inst* cng_inst) {
- int i;
-
- WebRtcCngDecoder* inst = (WebRtcCngDecoder*) cng_inst;
-
- memset(inst, 0, sizeof(WebRtcCngDecoder));
- inst->dec_seed = 7777; /* For debugging only. */
- inst->dec_order = 5;
- inst->dec_target_scale_factor = 0;
- inst->dec_used_scale_factor = 0;
- for (i = 0; i < (WEBRTC_CNG_MAX_LPC_ORDER + 1); i++) {
- inst->dec_filtstate[i] = 0;
- inst->dec_target_reflCoefs[i] = 0;
- inst->dec_used_reflCoefs[i] = 0;
- }
- inst->dec_target_reflCoefs[0] = 0;
- inst->dec_used_reflCoefs[0] = 0;
- inst->dec_used_energy = 0;
- inst->initflag = 1;
-}
-
-/****************************************************************************
- * WebRtcCng_FreeEnc/Dec(...)
- *
- * These functions frees the dynamic memory of a specified instance
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_FreeEnc(CNG_enc_inst* cng_inst) {
- free(cng_inst);
- return 0;
-}
-
-int16_t WebRtcCng_FreeDec(CNG_dec_inst* cng_inst) {
- free(cng_inst);
- return 0;
-}
-
-/****************************************************************************
- * WebRtcCng_Encode(...)
- *
- * These functions analyzes background noise
- *
- * Input:
- * - cng_inst : Pointer to created instance
- * - speech : Signal (noise) to be analyzed
- * - nrOfSamples : Size of speech vector
- * - bytesOut : Nr of bytes to transmit, might be 0
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
- size_t nrOfSamples, uint8_t* SIDdata,
- size_t* bytesOut, int16_t forceSID) {
- WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
-
- int16_t arCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int32_t corrVector[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t hanningW[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
- int16_t ReflBeta = 19661; /* 0.6 in q15. */
- int16_t ReflBetaComp = 13107; /* 0.4 in q15. */
- int32_t outEnergy;
- int outShifts;
- size_t i;
- int stab;
- int acorrScale;
- size_t index;
- size_t ind, factor;
- int32_t* bptr;
- int32_t blo, bhi;
- int16_t negate;
- const int16_t* aptr;
- int16_t speechBuf[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
-
- /* Check if encoder initiated. */
- if (inst->initflag != 1) {
- inst->errorcode = CNG_ENCODER_NOT_INITIATED;
- return -1;
- }
-
- /* Check framesize. */
- if (nrOfSamples > WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
- inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
- return -1;
- }
-
- for (i = 0; i < nrOfSamples; i++) {
- speechBuf[i] = speech[i];
- }
-
- factor = nrOfSamples;
-
- /* Calculate energy and a coefficients. */
- outEnergy = WebRtcSpl_Energy(speechBuf, nrOfSamples, &outShifts);
- while (outShifts > 0) {
- /* We can only do 5 shifts without destroying accuracy in
- * division factor. */
- if (outShifts > 5) {
- outEnergy <<= (outShifts - 5);
- outShifts = 5;
- } else {
- factor /= 2;
- outShifts--;
- }
- }
- outEnergy = WebRtcSpl_DivW32W16(outEnergy, (int16_t)factor);
-
- if (outEnergy > 1) {
- /* Create Hanning Window. */
- WebRtcSpl_GetHanningWindow(hanningW, nrOfSamples / 2);
- for (i = 0; i < (nrOfSamples / 2); i++)
- hanningW[nrOfSamples - i - 1] = hanningW[i];
-
- WebRtcSpl_ElementwiseVectorMult(speechBuf, hanningW, speechBuf, nrOfSamples,
- 14);
-
- WebRtcSpl_AutoCorrelation(speechBuf, nrOfSamples, inst->enc_nrOfCoefs,
- corrVector, &acorrScale);
-
- if (*corrVector == 0)
- *corrVector = WEBRTC_SPL_WORD16_MAX;
-
- /* Adds the bandwidth expansion. */
- aptr = WebRtcCng_kCorrWindow;
- bptr = corrVector;
-
- /* (zzz) lpc16_1 = 17+1+820+2+2 = 842 (ordo2=700). */
- for (ind = 0; ind < inst->enc_nrOfCoefs; ind++) {
- /* The below code multiplies the 16 b corrWindow values (Q15) with
- * the 32 b corrvector (Q0) and shifts the result down 15 steps. */
- negate = *bptr < 0;
- if (negate)
- *bptr = -*bptr;
-
- blo = (int32_t) * aptr * (*bptr & 0xffff);
- bhi = ((blo >> 16) & 0xffff)
- + ((int32_t)(*aptr++) * ((*bptr >> 16) & 0xffff));
- blo = (blo & 0xffff) | ((bhi & 0xffff) << 16);
-
- *bptr = (((bhi >> 16) & 0x7fff) << 17) | ((uint32_t) blo >> 15);
- if (negate)
- *bptr = -*bptr;
- bptr++;
- }
- /* End of bandwidth expansion. */
-
- stab = WebRtcSpl_LevinsonDurbin(corrVector, arCoefs, refCs,
- inst->enc_nrOfCoefs);
-
- if (!stab) {
- /* Disregard from this frame */
- *bytesOut = 0;
- return 0;
- }
-
- } else {
- for (i = 0; i < inst->enc_nrOfCoefs; i++)
- refCs[i] = 0;
- }
-
- if (forceSID) {
- /* Read instantaneous values instead of averaged. */
- for (i = 0; i < inst->enc_nrOfCoefs; i++)
- inst->enc_reflCoefs[i] = refCs[i];
- inst->enc_Energy = outEnergy;
- } else {
- /* Average history with new values. */
- for (i = 0; i < (inst->enc_nrOfCoefs); i++) {
- inst->enc_reflCoefs[i] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- inst->enc_reflCoefs[i], ReflBeta, 15);
- inst->enc_reflCoefs[i] += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- refCs[i], ReflBetaComp, 15);
- }
- inst->enc_Energy = (outEnergy >> 2) + (inst->enc_Energy >> 1)
- + (inst->enc_Energy >> 2);
- }
-
- if (inst->enc_Energy < 1) {
- inst->enc_Energy = 1;
- }
-
- if ((inst->enc_msSinceSID > (inst->enc_interval - 1)) || forceSID) {
-
- /* Search for best dbov value. */
- index = 0;
- for (i = 1; i < 93; i++) {
- /* Always round downwards. */
- if ((inst->enc_Energy - WebRtcCng_kDbov[i]) > 0) {
- index = i;
- break;
- }
- }
- if ((i == 93) && (index == 0))
- index = 94;
- SIDdata[0] = (uint8_t)index;
-
- /* Quantize coefficients with tweak for WebRtc implementation of RFC3389. */
- if (inst->enc_nrOfCoefs == WEBRTC_CNG_MAX_LPC_ORDER) {
- for (i = 0; i < inst->enc_nrOfCoefs; i++) {
- /* Q15 to Q7 with rounding. */
- SIDdata[i + 1] = ((inst->enc_reflCoefs[i] + 128) >> 8);
- }
- } else {
- for (i = 0; i < inst->enc_nrOfCoefs; i++) {
- /* Q15 to Q7 with rounding. */
- SIDdata[i + 1] = (127 + ((inst->enc_reflCoefs[i] + 128) >> 8));
- }
- }
-
- inst->enc_msSinceSID = 0;
- *bytesOut = inst->enc_nrOfCoefs + 1;
-
- inst->enc_msSinceSID +=
- (int16_t)((1000 * nrOfSamples) / inst->enc_sampfreq);
- return (int)(inst->enc_nrOfCoefs + 1);
- } else {
- inst->enc_msSinceSID +=
- (int16_t)((1000 * nrOfSamples) / inst->enc_sampfreq);
- *bytesOut = 0;
- return 0;
- }
-}
-
-/****************************************************************************
- * WebRtcCng_UpdateSid(...)
- *
- * These functions updates the CN state, when a new SID packet arrives
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - SID : SID packet, all headers removed
- * - length : Length in bytes of SID packet
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
- size_t length) {
-
- WebRtcCngDecoder* inst = (WebRtcCngDecoder*) cng_inst;
- int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER];
- int32_t targetEnergy;
- int i;
-
- if (inst->initflag != 1) {
- inst->errorcode = CNG_DECODER_NOT_INITIATED;
- return -1;
- }
-
- /* Throw away reflection coefficients of higher order than we can handle. */
- if (length > (WEBRTC_CNG_MAX_LPC_ORDER + 1))
- length = WEBRTC_CNG_MAX_LPC_ORDER + 1;
-
- inst->dec_order = (int16_t)length - 1;
-
- if (SID[0] > 93)
- SID[0] = 93;
- targetEnergy = WebRtcCng_kDbov[SID[0]];
- /* Take down target energy to 75%. */
- targetEnergy = targetEnergy >> 1;
- targetEnergy += targetEnergy >> 2;
-
- inst->dec_target_energy = targetEnergy;
-
- /* Reconstruct coeffs with tweak for WebRtc implementation of RFC3389. */
- if (inst->dec_order == WEBRTC_CNG_MAX_LPC_ORDER) {
- for (i = 0; i < (inst->dec_order); i++) {
- refCs[i] = SID[i + 1] << 8; /* Q7 to Q15*/
- inst->dec_target_reflCoefs[i] = refCs[i];
- }
- } else {
- for (i = 0; i < (inst->dec_order); i++) {
- refCs[i] = (SID[i + 1] - 127) << 8; /* Q7 to Q15. */
- inst->dec_target_reflCoefs[i] = refCs[i];
- }
- }
-
- for (i = (inst->dec_order); i < WEBRTC_CNG_MAX_LPC_ORDER; i++) {
- refCs[i] = 0;
- inst->dec_target_reflCoefs[i] = refCs[i];
- }
-
- return 0;
-}
-
-/****************************************************************************
- * WebRtcCng_Generate(...)
- *
- * These functions generates CN data when needed
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - outData : pointer to area to write CN data
- * - nrOfSamples : How much data to generate
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_Generate(CNG_dec_inst* cng_inst, int16_t* outData,
- size_t nrOfSamples, int16_t new_period) {
- WebRtcCngDecoder* inst = (WebRtcCngDecoder*) cng_inst;
-
- size_t i;
- int16_t excitation[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
- int16_t low[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
- int16_t lpPoly[WEBRTC_CNG_MAX_LPC_ORDER + 1];
- int16_t ReflBetaStd = 26214; /* 0.8 in q15. */
- int16_t ReflBetaCompStd = 6553; /* 0.2 in q15. */
- int16_t ReflBetaNewP = 19661; /* 0.6 in q15. */
- int16_t ReflBetaCompNewP = 13107; /* 0.4 in q15. */
- int16_t Beta, BetaC, tmp1, tmp2, tmp3;
- int32_t targetEnergy;
- int16_t En;
- int16_t temp16;
-
- if (nrOfSamples > WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
- inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
- return -1;
- }
-
- if (new_period) {
- inst->dec_used_scale_factor = inst->dec_target_scale_factor;
- Beta = ReflBetaNewP;
- BetaC = ReflBetaCompNewP;
- } else {
- Beta = ReflBetaStd;
- BetaC = ReflBetaCompStd;
- }
-
- /* Here we use a 0.5 weighting, should possibly be modified to 0.6. */
- tmp1 = inst->dec_used_scale_factor << 2; /* Q13->Q15 */
- tmp2 = inst->dec_target_scale_factor << 2; /* Q13->Q15 */
- tmp3 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp1, Beta, 15);
- tmp3 += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp2, BetaC, 15);
- inst->dec_used_scale_factor = tmp3 >> 2; /* Q15->Q13 */
-
- inst->dec_used_energy = inst->dec_used_energy >> 1;
- inst->dec_used_energy += inst->dec_target_energy >> 1;
-
- /* Do the same for the reflection coeffs. */
- for (i = 0; i < WEBRTC_CNG_MAX_LPC_ORDER; i++) {
- inst->dec_used_reflCoefs[i] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- inst->dec_used_reflCoefs[i], Beta, 15);
- inst->dec_used_reflCoefs[i] += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- inst->dec_target_reflCoefs[i], BetaC, 15);
- }
-
- /* Compute the polynomial coefficients. */
- WebRtcCng_K2a16(inst->dec_used_reflCoefs, WEBRTC_CNG_MAX_LPC_ORDER, lpPoly);
-
-
- targetEnergy = inst->dec_used_energy;
-
- /* Calculate scaling factor based on filter energy. */
- En = 8192; /* 1.0 in Q13. */
- for (i = 0; i < (WEBRTC_CNG_MAX_LPC_ORDER); i++) {
-
- /* Floating point value for reference.
- E *= 1.0 - (inst->dec_used_reflCoefs[i] / 32768.0) *
- (inst->dec_used_reflCoefs[i] / 32768.0);
- */
-
- /* Same in fixed point. */
- /* K(i).^2 in Q15. */
- temp16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
- inst->dec_used_reflCoefs[i], inst->dec_used_reflCoefs[i], 15);
- /* 1 - K(i).^2 in Q15. */
- temp16 = 0x7fff - temp16;
- En = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(En, temp16, 15);
- }
-
- /* float scaling= sqrt(E * inst->dec_target_energy / (1 << 24)); */
-
- /* Calculate sqrt(En * target_energy / excitation energy) */
- targetEnergy = WebRtcSpl_Sqrt(inst->dec_used_energy);
-
- En = (int16_t) WebRtcSpl_Sqrt(En) << 6;
- En = (En * 3) >> 1; /* 1.5 estimates sqrt(2). */
- inst->dec_used_scale_factor = (int16_t)((En * targetEnergy) >> 12);
-
- /* Generate excitation. */
- /* Excitation energy per sample is 2.^24 - Q13 N(0,1). */
- for (i = 0; i < nrOfSamples; i++) {
- excitation[i] = WebRtcSpl_RandN(&inst->dec_seed) >> 1;
- }
-
- /* Scale to correct energy. */
- WebRtcSpl_ScaleVector(excitation, excitation, inst->dec_used_scale_factor,
- nrOfSamples, 13);
-
- /* |lpPoly| - Coefficients in Q12.
- * |excitation| - Speech samples.
- * |nst->dec_filtstate| - State preservation.
- * |outData| - Filtered speech samples. */
- WebRtcSpl_FilterAR(lpPoly, WEBRTC_CNG_MAX_LPC_ORDER + 1, excitation,
- nrOfSamples, inst->dec_filtstate, WEBRTC_CNG_MAX_LPC_ORDER,
- inst->dec_filtstateLow, WEBRTC_CNG_MAX_LPC_ORDER, outData,
- low, nrOfSamples);
-
- return 0;
-}
-
-/****************************************************************************
- * WebRtcCng_GetErrorCodeEnc/Dec(...)
- *
- * This functions can be used to check the error code of a CNG instance. When
- * a function returns -1 a error code will be set for that instance. The
- * function below extract the code of the last error that occured in the
- * specified instance.
- *
- * Input:
- * - CNG_inst : CNG enc/dec instance
- *
- * Return value : Error code
- */
-int16_t WebRtcCng_GetErrorCodeEnc(CNG_enc_inst* cng_inst) {
- /* Typecast pointer to real structure. */
- WebRtcCngEncoder* inst = (WebRtcCngEncoder*) cng_inst;
- return inst->errorcode;
-}
-
-int16_t WebRtcCng_GetErrorCodeDec(CNG_dec_inst* cng_inst) {
- /* Typecast pointer to real structure. */
- WebRtcCngDecoder* inst = (WebRtcCngDecoder*) cng_inst;
- return inst->errorcode;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.cc
new file mode 100644
index 00000000000..b4da260dba2
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.cc
@@ -0,0 +1,442 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
+
+#include <algorithm>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+
+namespace webrtc {
+
+namespace {
+
+const size_t kCngMaxOutsizeOrder = 640;
+
+// TODO(ossu): Rename the left-over WebRtcCng according to style guide.
+void WebRtcCng_K2a16(int16_t* k, int useOrder, int16_t* a);
+
+const int32_t WebRtcCng_kDbov[94] = {
+ 1081109975, 858756178, 682134279, 541838517, 430397633, 341876992,
+ 271562548, 215709799, 171344384, 136103682, 108110997, 85875618,
+ 68213428, 54183852, 43039763, 34187699, 27156255, 21570980,
+ 17134438, 13610368, 10811100, 8587562, 6821343, 5418385,
+ 4303976, 3418770, 2715625, 2157098, 1713444, 1361037,
+ 1081110, 858756, 682134, 541839, 430398, 341877,
+ 271563, 215710, 171344, 136104, 108111, 85876,
+ 68213, 54184, 43040, 34188, 27156, 21571,
+ 17134, 13610, 10811, 8588, 6821, 5418,
+ 4304, 3419, 2716, 2157, 1713, 1361,
+ 1081, 859, 682, 542, 430, 342,
+ 272, 216, 171, 136, 108, 86,
+ 68, 54, 43, 34, 27, 22,
+ 17, 14, 11, 9, 7, 5,
+ 4, 3, 3, 2, 2, 1,
+ 1, 1, 1, 1
+};
+
+const int16_t WebRtcCng_kCorrWindow[WEBRTC_CNG_MAX_LPC_ORDER] = {
+ 32702, 32636, 32570, 32505, 32439, 32374,
+ 32309, 32244, 32179, 32114, 32049, 31985
+};
+
+} // namespace
+
+ComfortNoiseDecoder::ComfortNoiseDecoder() {
+ /* Needed to get the right function pointers in SPLIB. */
+ WebRtcSpl_Init();
+ Reset();
+}
+
+void ComfortNoiseDecoder::Reset() {
+ dec_seed_ = 7777; /* For debugging only. */
+ dec_target_energy_ = 0;
+ dec_used_energy_ = 0;
+ for (auto& c : dec_target_reflCoefs_)
+ c = 0;
+ for (auto& c : dec_used_reflCoefs_)
+ c = 0;
+ for (auto& c : dec_filtstate_)
+ c = 0;
+ for (auto& c : dec_filtstateLow_)
+ c = 0;
+ dec_order_ = 5;
+ dec_target_scale_factor_ = 0;
+ dec_used_scale_factor_ = 0;
+}
+
+void ComfortNoiseDecoder::UpdateSid(rtc::ArrayView<const uint8_t> sid) {
+ int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER];
+ int32_t targetEnergy;
+ size_t length = sid.size();
+ /* Throw away reflection coefficients of higher order than we can handle. */
+ if (length > (WEBRTC_CNG_MAX_LPC_ORDER + 1))
+ length = WEBRTC_CNG_MAX_LPC_ORDER + 1;
+
+ dec_order_ = static_cast<uint16_t>(length - 1);
+
+ uint8_t sid0 = std::min<uint8_t>(sid[0], 93);
+ targetEnergy = WebRtcCng_kDbov[sid0];
+ /* Take down target energy to 75%. */
+ targetEnergy = targetEnergy >> 1;
+ targetEnergy += targetEnergy >> 2;
+
+ dec_target_energy_ = targetEnergy;
+
+ /* Reconstruct coeffs with tweak for WebRtc implementation of RFC3389. */
+ if (dec_order_ == WEBRTC_CNG_MAX_LPC_ORDER) {
+ for (size_t i = 0; i < (dec_order_); i++) {
+ refCs[i] = sid[i + 1] << 8; /* Q7 to Q15*/
+ dec_target_reflCoefs_[i] = refCs[i];
+ }
+ } else {
+ for (size_t i = 0; i < (dec_order_); i++) {
+ refCs[i] = (sid[i + 1] - 127) << 8; /* Q7 to Q15. */
+ dec_target_reflCoefs_[i] = refCs[i];
+ }
+ }
+
+ for (size_t i = (dec_order_); i < WEBRTC_CNG_MAX_LPC_ORDER; i++) {
+ refCs[i] = 0;
+ dec_target_reflCoefs_[i] = refCs[i];
+ }
+}
+
+bool ComfortNoiseDecoder::Generate(rtc::ArrayView<int16_t> out_data,
+ bool new_period) {
+ int16_t excitation[kCngMaxOutsizeOrder];
+ int16_t low[kCngMaxOutsizeOrder];
+ int16_t lpPoly[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t ReflBetaStd = 26214; /* 0.8 in q15. */
+ int16_t ReflBetaCompStd = 6553; /* 0.2 in q15. */
+ int16_t ReflBetaNewP = 19661; /* 0.6 in q15. */
+ int16_t ReflBetaCompNewP = 13107; /* 0.4 in q15. */
+ int16_t Beta, BetaC, tmp1, tmp2, tmp3;
+ int32_t targetEnergy;
+ int16_t En;
+ int16_t temp16;
+ const size_t num_samples = out_data.size();
+
+ if (num_samples > kCngMaxOutsizeOrder) {
+ return false;
+ }
+
+ if (new_period) {
+ dec_used_scale_factor_ = dec_target_scale_factor_;
+ Beta = ReflBetaNewP;
+ BetaC = ReflBetaCompNewP;
+ } else {
+ Beta = ReflBetaStd;
+ BetaC = ReflBetaCompStd;
+ }
+
+ /* Here we use a 0.5 weighting, should possibly be modified to 0.6. */
+ tmp1 = dec_used_scale_factor_ << 2; /* Q13->Q15 */
+ tmp2 = dec_target_scale_factor_ << 2; /* Q13->Q15 */
+ tmp3 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp1, Beta, 15);
+ tmp3 += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp2, BetaC, 15);
+ dec_used_scale_factor_ = tmp3 >> 2; /* Q15->Q13 */
+
+ dec_used_energy_ = dec_used_energy_ >> 1;
+ dec_used_energy_ += dec_target_energy_ >> 1;
+
+ /* Do the same for the reflection coeffs. */
+ for (size_t i = 0; i < WEBRTC_CNG_MAX_LPC_ORDER; i++) {
+ dec_used_reflCoefs_[i] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
+ dec_used_reflCoefs_[i], Beta, 15);
+ dec_used_reflCoefs_[i] += (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
+ dec_target_reflCoefs_[i], BetaC, 15);
+ }
+
+ /* Compute the polynomial coefficients. */
+ WebRtcCng_K2a16(dec_used_reflCoefs_, WEBRTC_CNG_MAX_LPC_ORDER, lpPoly);
+
+
+ targetEnergy = dec_used_energy_;
+
+ /* Calculate scaling factor based on filter energy. */
+ En = 8192; /* 1.0 in Q13. */
+ for (size_t i = 0; i < (WEBRTC_CNG_MAX_LPC_ORDER); i++) {
+ /* Floating point value for reference.
+ E *= 1.0 - (dec_used_reflCoefs_[i] / 32768.0) *
+ (dec_used_reflCoefs_[i] / 32768.0);
+ */
+
+ /* Same in fixed point. */
+ /* K(i).^2 in Q15. */
+ temp16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
+ dec_used_reflCoefs_[i], dec_used_reflCoefs_[i], 15);
+ /* 1 - K(i).^2 in Q15. */
+ temp16 = 0x7fff - temp16;
+ En = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(En, temp16, 15);
+ }
+
+ /* float scaling= sqrt(E * dec_target_energy_ / (1 << 24)); */
+
+ /* Calculate sqrt(En * target_energy / excitation energy) */
+ targetEnergy = WebRtcSpl_Sqrt(dec_used_energy_);
+
+ En = (int16_t) WebRtcSpl_Sqrt(En) << 6;
+ En = (En * 3) >> 1; /* 1.5 estimates sqrt(2). */
+ dec_used_scale_factor_ = (int16_t)((En * targetEnergy) >> 12);
+
+ /* Generate excitation. */
+ /* Excitation energy per sample is 2.^24 - Q13 N(0,1). */
+ for (size_t i = 0; i < num_samples; i++) {
+ excitation[i] = WebRtcSpl_RandN(&dec_seed_) >> 1;
+ }
+
+ /* Scale to correct energy. */
+ WebRtcSpl_ScaleVector(excitation, excitation, dec_used_scale_factor_,
+ num_samples, 13);
+
+ /* |lpPoly| - Coefficients in Q12.
+ * |excitation| - Speech samples.
+ * |nst->dec_filtstate| - State preservation.
+ * |out_data| - Filtered speech samples. */
+ WebRtcSpl_FilterAR(lpPoly, WEBRTC_CNG_MAX_LPC_ORDER + 1, excitation,
+ num_samples, dec_filtstate_, WEBRTC_CNG_MAX_LPC_ORDER,
+ dec_filtstateLow_, WEBRTC_CNG_MAX_LPC_ORDER,
+ out_data.data(), low, num_samples);
+
+ return true;
+}
+
+ComfortNoiseEncoder::ComfortNoiseEncoder(int fs, int interval, int quality)
+ : enc_nrOfCoefs_(quality),
+ enc_sampfreq_(fs),
+ enc_interval_(interval),
+ enc_msSinceSid_(0),
+ enc_Energy_(0),
+ enc_reflCoefs_{0},
+ enc_corrVector_{0},
+ enc_seed_(7777) /* For debugging only. */ {
+ RTC_CHECK(quality <= WEBRTC_CNG_MAX_LPC_ORDER && quality > 0);
+ /* Needed to get the right function pointers in SPLIB. */
+ WebRtcSpl_Init();
+}
+
+void ComfortNoiseEncoder::Reset(int fs, int interval, int quality) {
+ RTC_CHECK(quality <= WEBRTC_CNG_MAX_LPC_ORDER && quality > 0);
+ enc_nrOfCoefs_ = quality;
+ enc_sampfreq_ = fs;
+ enc_interval_ = interval;
+ enc_msSinceSid_ = 0;
+ enc_Energy_ = 0;
+ for (auto& c : enc_reflCoefs_)
+ c = 0;
+ for (auto& c : enc_corrVector_)
+ c = 0;
+ enc_seed_ = 7777; /* For debugging only. */
+}
+
+size_t ComfortNoiseEncoder::Encode(rtc::ArrayView<const int16_t> speech,
+ bool force_sid,
+ rtc::Buffer* output) {
+ int16_t arCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int32_t corrVector[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t hanningW[kCngMaxOutsizeOrder];
+ int16_t ReflBeta = 19661; /* 0.6 in q15. */
+ int16_t ReflBetaComp = 13107; /* 0.4 in q15. */
+ int32_t outEnergy;
+ int outShifts;
+ size_t i;
+ int stab;
+ int acorrScale;
+ size_t index;
+ size_t ind, factor;
+ int32_t* bptr;
+ int32_t blo, bhi;
+ int16_t negate;
+ const int16_t* aptr;
+ int16_t speechBuf[kCngMaxOutsizeOrder];
+
+ const size_t num_samples = speech.size();
+ RTC_CHECK_LE(num_samples, static_cast<size_t>(kCngMaxOutsizeOrder));
+
+ for (i = 0; i < num_samples; i++) {
+ speechBuf[i] = speech[i];
+ }
+
+ factor = num_samples;
+
+ /* Calculate energy and a coefficients. */
+ outEnergy = WebRtcSpl_Energy(speechBuf, num_samples, &outShifts);
+ while (outShifts > 0) {
+ /* We can only do 5 shifts without destroying accuracy in
+ * division factor. */
+ if (outShifts > 5) {
+ outEnergy <<= (outShifts - 5);
+ outShifts = 5;
+ } else {
+ factor /= 2;
+ outShifts--;
+ }
+ }
+ outEnergy = WebRtcSpl_DivW32W16(outEnergy, (int16_t)factor);
+
+ if (outEnergy > 1) {
+ /* Create Hanning Window. */
+ WebRtcSpl_GetHanningWindow(hanningW, num_samples / 2);
+ for (i = 0; i < (num_samples / 2); i++)
+ hanningW[num_samples - i - 1] = hanningW[i];
+
+ WebRtcSpl_ElementwiseVectorMult(speechBuf, hanningW, speechBuf, num_samples,
+ 14);
+
+ WebRtcSpl_AutoCorrelation(speechBuf, num_samples, enc_nrOfCoefs_,
+ corrVector, &acorrScale);
+
+ if (*corrVector == 0)
+ *corrVector = WEBRTC_SPL_WORD16_MAX;
+
+ /* Adds the bandwidth expansion. */
+ aptr = WebRtcCng_kCorrWindow;
+ bptr = corrVector;
+
+ /* (zzz) lpc16_1 = 17+1+820+2+2 = 842 (ordo2=700). */
+ for (ind = 0; ind < enc_nrOfCoefs_; ind++) {
+ /* The below code multiplies the 16 b corrWindow values (Q15) with
+ * the 32 b corrvector (Q0) and shifts the result down 15 steps. */
+ negate = *bptr < 0;
+ if (negate)
+ *bptr = -*bptr;
+
+ blo = (int32_t) * aptr * (*bptr & 0xffff);
+ bhi = ((blo >> 16) & 0xffff)
+ + ((int32_t)(*aptr++) * ((*bptr >> 16) & 0xffff));
+ blo = (blo & 0xffff) | ((bhi & 0xffff) << 16);
+
+ *bptr = (((bhi >> 16) & 0x7fff) << 17) | ((uint32_t) blo >> 15);
+ if (negate)
+ *bptr = -*bptr;
+ bptr++;
+ }
+ /* End of bandwidth expansion. */
+
+ stab = WebRtcSpl_LevinsonDurbin(corrVector, arCoefs, refCs,
+ enc_nrOfCoefs_);
+
+ if (!stab) {
+ /* Disregard from this frame */
+ return 0;
+ }
+
+ } else {
+ for (i = 0; i < enc_nrOfCoefs_; i++)
+ refCs[i] = 0;
+ }
+
+ if (force_sid) {
+ /* Read instantaneous values instead of averaged. */
+ for (i = 0; i < enc_nrOfCoefs_; i++)
+ enc_reflCoefs_[i] = refCs[i];
+ enc_Energy_ = outEnergy;
+ } else {
+ /* Average history with new values. */
+ for (i = 0; i < enc_nrOfCoefs_; i++) {
+ enc_reflCoefs_[i] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
+ enc_reflCoefs_[i], ReflBeta, 15);
+ enc_reflCoefs_[i] +=
+ (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(refCs[i], ReflBetaComp, 15);
+ }
+ enc_Energy_ =
+ (outEnergy >> 2) + (enc_Energy_ >> 1) + (enc_Energy_ >> 2);
+ }
+
+ if (enc_Energy_ < 1) {
+ enc_Energy_ = 1;
+ }
+
+ if ((enc_msSinceSid_ > (enc_interval_ - 1)) || force_sid) {
+ /* Search for best dbov value. */
+ index = 0;
+ for (i = 1; i < 93; i++) {
+ /* Always round downwards. */
+ if ((enc_Energy_ - WebRtcCng_kDbov[i]) > 0) {
+ index = i;
+ break;
+ }
+ }
+ if ((i == 93) && (index == 0))
+ index = 94;
+
+ const size_t output_coefs = enc_nrOfCoefs_ + 1;
+ output->AppendData(output_coefs, [&] (rtc::ArrayView<uint8_t> output) {
+ output[0] = (uint8_t)index;
+
+ /* Quantize coefficients with tweak for WebRtc implementation of
+ * RFC3389. */
+ if (enc_nrOfCoefs_ == WEBRTC_CNG_MAX_LPC_ORDER) {
+ for (i = 0; i < enc_nrOfCoefs_; i++) {
+ /* Q15 to Q7 with rounding. */
+ output[i + 1] = ((enc_reflCoefs_[i] + 128) >> 8);
+ }
+ } else {
+ for (i = 0; i < enc_nrOfCoefs_; i++) {
+ /* Q15 to Q7 with rounding. */
+ output[i + 1] = (127 + ((enc_reflCoefs_[i] + 128) >> 8));
+ }
+ }
+
+ return output_coefs;
+ });
+
+ enc_msSinceSid_ =
+ static_cast<int16_t>((1000 * num_samples) / enc_sampfreq_);
+ return output_coefs;
+ } else {
+ enc_msSinceSid_ +=
+ static_cast<int16_t>((1000 * num_samples) / enc_sampfreq_);
+ return 0;
+ }
+}
+
+namespace {
+/* Values in |k| are Q15, and |a| Q12. */
+void WebRtcCng_K2a16(int16_t* k, int useOrder, int16_t* a) {
+ int16_t any[WEBRTC_SPL_MAX_LPC_ORDER + 1];
+ int16_t* aptr;
+ int16_t* aptr2;
+ int16_t* anyptr;
+ const int16_t* kptr;
+ int m, i;
+
+ kptr = k;
+ *a = 4096; /* i.e., (Word16_MAX >> 3) + 1 */
+ *any = *a;
+ a[1] = (*k + 4) >> 3;
+ for (m = 1; m < useOrder; m++) {
+ kptr++;
+ aptr = a;
+ aptr++;
+ aptr2 = &a[m];
+ anyptr = any;
+ anyptr++;
+
+ any[m + 1] = (*kptr + 4) >> 3;
+ for (i = 0; i < m; i++) {
+ *anyptr++ =
+ (*aptr++) +
+ (int16_t)((((int32_t)(*aptr2--) * (int32_t)*kptr) + 16384) >> 15);
+ }
+
+ aptr = a;
+ anyptr = any;
+ for (i = 0; i < (m + 2); i++) {
+ *aptr++ = *anyptr++;
+ }
+ }
+}
+
+} // namespace
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h
index 64bea1e26f6..fb0a53df270 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h
@@ -12,152 +12,88 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
-#include <stddef.h>
-#include "webrtc/typedefs.h"
+#include <cstddef>
-#ifdef __cplusplus
-extern "C" {
-#endif
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/buffer.h"
+#include "webrtc/typedefs.h"
#define WEBRTC_CNG_MAX_LPC_ORDER 12
-#define WEBRTC_CNG_MAX_OUTSIZE_ORDER 640
-
-/* Define Error codes. */
-
-/* 6100 Encoder */
-#define CNG_ENCODER_NOT_INITIATED 6120
-#define CNG_DISALLOWED_LPC_ORDER 6130
-#define CNG_DISALLOWED_FRAME_SIZE 6140
-#define CNG_DISALLOWED_SAMPLING_FREQUENCY 6150
-/* 6200 Decoder */
-#define CNG_DECODER_NOT_INITIATED 6220
-
-typedef struct WebRtcCngEncInst CNG_enc_inst;
-typedef struct WebRtcCngDecInst CNG_dec_inst;
-
-/****************************************************************************
- * WebRtcCng_CreateEnc/Dec(...)
- *
- * These functions create an instance to the specified structure
- *
- * Input:
- * - XXX_inst : Pointer to created instance that should be created
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_CreateEnc(CNG_enc_inst** cng_inst);
-int16_t WebRtcCng_CreateDec(CNG_dec_inst** cng_inst);
-
-/****************************************************************************
- * WebRtcCng_InitEnc/Dec(...)
- *
- * This function initializes a instance
- *
- * Input:
- * - cng_inst : Instance that should be initialized
- *
- * - fs : 8000 for narrowband and 16000 for wideband
- * - interval : generate SID data every interval ms
- * - quality : Number of refl. coefs, maximum allowed is 12
- *
- * Output:
- * - cng_inst : Initialized instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcCng_InitEnc(CNG_enc_inst* cng_inst, int fs, int16_t interval,
- int16_t quality);
-void WebRtcCng_InitDec(CNG_dec_inst* cng_inst);
-
-/****************************************************************************
- * WebRtcCng_FreeEnc/Dec(...)
- *
- * These functions frees the dynamic memory of a specified instance
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_FreeEnc(CNG_enc_inst* cng_inst);
-int16_t WebRtcCng_FreeDec(CNG_dec_inst* cng_inst);
-
-/****************************************************************************
- * WebRtcCng_Encode(...)
- *
- * These functions analyzes background noise
- *
- * Input:
- * - cng_inst : Pointer to created instance
- * - speech : Signal to be analyzed
- * - nrOfSamples : Size of speech vector
- * - forceSID : not zero to force SID frame and reset
- *
- * Output:
- * - bytesOut : Nr of bytes to transmit, might be 0
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcCng_Encode(CNG_enc_inst* cng_inst, int16_t* speech,
- size_t nrOfSamples, uint8_t* SIDdata,
- size_t* bytesOut, int16_t forceSID);
-
-/****************************************************************************
- * WebRtcCng_UpdateSid(...)
- *
- * These functions updates the CN state, when a new SID packet arrives
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - SID : SID packet, all headers removed
- * - length : Length in bytes of SID packet
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_UpdateSid(CNG_dec_inst* cng_inst, uint8_t* SID,
- size_t length);
-
-/****************************************************************************
- * WebRtcCng_Generate(...)
- *
- * These functions generates CN data when needed
- *
- * Input:
- * - cng_inst : Pointer to created instance that should be freed
- * - outData : pointer to area to write CN data
- * - nrOfSamples : How much data to generate
- * - new_period : >0 if a new period of CNG, will reset history
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int16_t WebRtcCng_Generate(CNG_dec_inst* cng_inst, int16_t* outData,
- size_t nrOfSamples, int16_t new_period);
-
-/*****************************************************************************
- * WebRtcCng_GetErrorCodeEnc/Dec(...)
- *
- * This functions can be used to check the error code of a CNG instance. When
- * a function returns -1 a error code will be set for that instance. The
- * function below extract the code of the last error that occurred in the
- * specified instance.
- *
- * Input:
- * - CNG_inst : CNG enc/dec instance
- *
- * Return value : Error code
- */
-int16_t WebRtcCng_GetErrorCodeEnc(CNG_enc_inst* cng_inst);
-int16_t WebRtcCng_GetErrorCodeDec(CNG_dec_inst* cng_inst);
-#ifdef __cplusplus
-}
-#endif
+namespace webrtc {
+
+class ComfortNoiseDecoder {
+ public:
+ ComfortNoiseDecoder();
+ ~ComfortNoiseDecoder() = default;
+
+ ComfortNoiseDecoder(const ComfortNoiseDecoder&) = delete;
+ ComfortNoiseDecoder& operator=(const ComfortNoiseDecoder&) = delete;
+
+ void Reset();
+
+ // Updates the CN state when a new SID packet arrives.
+ // |sid| is a view of the SID packet without the headers.
+ void UpdateSid(rtc::ArrayView<const uint8_t> sid);
+
+ // Generates comfort noise.
+ // |out_data| will be filled with samples - its size determines the number of
+ // samples generated. When |new_period| is true, CNG history will be reset
+ // before any audio is generated. Returns |false| if outData is too large -
+ // currently 640 bytes (equalling 10ms at 64kHz).
+ // TODO(ossu): Specify better limits for the size of out_data. Either let it
+ // be unbounded or limit to 10ms in the current sample rate.
+ bool Generate(rtc::ArrayView<int16_t> out_data, bool new_period);
+
+ private:
+ uint32_t dec_seed_;
+ int32_t dec_target_energy_;
+ int32_t dec_used_energy_;
+ int16_t dec_target_reflCoefs_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t dec_used_reflCoefs_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t dec_filtstate_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int16_t dec_filtstateLow_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ uint16_t dec_order_;
+ int16_t dec_target_scale_factor_; /* Q29 */
+ int16_t dec_used_scale_factor_; /* Q29 */
+};
+
+class ComfortNoiseEncoder {
+ public:
+ // Creates a comfort noise encoder.
+ // |fs| selects sample rate: 8000 for narrowband or 16000 for wideband.
+ // |interval| sets the interval at which to generate SID data (in ms).
+ // |quality| selects the number of refl. coeffs. Maximum allowed is 12.
+ ComfortNoiseEncoder(int fs, int interval, int quality);
+ ~ComfortNoiseEncoder() = default;
+
+ ComfortNoiseEncoder(const ComfortNoiseEncoder&) = delete;
+ ComfortNoiseEncoder& operator=(const ComfortNoiseEncoder&) = delete;
+
+ // Resets the comfort noise encoder to its initial state.
+ // Parameters are set as during construction.
+ void Reset(int fs, int interval, int quality);
+
+ // Analyzes background noise from |speech| and appends coefficients to
+ // |output|. Returns the number of coefficients generated. If |force_sid| is
+ // true, a SID frame is forced and the internal sid interval counter is reset.
+ // Will fail if the input size is too large (> 640 samples, see
+ // ComfortNoiseDecoder::Generate).
+ size_t Encode(rtc::ArrayView<const int16_t> speech,
+ bool force_sid,
+ rtc::Buffer* output);
+
+ private:
+ size_t enc_nrOfCoefs_;
+ int enc_sampfreq_;
+ int16_t enc_interval_;
+ int16_t enc_msSinceSid_;
+ int32_t enc_Energy_;
+ int16_t enc_reflCoefs_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ int32_t enc_corrVector_[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+ uint32_t enc_seed_;
+};
+
+} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_WEBRTC_CNG_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h
index 9dc3a6fd7ad..7a627e757c9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G711_AUDIO_DECODER_PCM_H_
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
index a24b1526fd2..baa5d382d32 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc
@@ -52,10 +52,6 @@ AudioEncoderPcm::AudioEncoderPcm(const Config& config, int sample_rate_hz)
AudioEncoderPcm::~AudioEncoderPcm() = default;
-size_t AudioEncoderPcm::MaxEncodedBytes() const {
- return full_frame_samples_ * BytesPerSample();
-}
-
int AudioEncoderPcm::SampleRateHz() const {
return sample_rate_hz_;
}
@@ -93,13 +89,14 @@ AudioEncoder::EncodedInfo AudioEncoderPcm::EncodeImpl(
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = payload_type_;
info.encoded_bytes =
- encoded->AppendData(MaxEncodedBytes(),
+ encoded->AppendData(full_frame_samples_ * BytesPerSample(),
[&] (rtc::ArrayView<uint8_t> encoded) {
return EncodeCall(&speech_buffer_[0],
full_frame_samples_,
encoded.data());
});
speech_buffer_.clear();
+ info.encoder_type = GetCodecType();
return info;
}
@@ -120,6 +117,10 @@ size_t AudioEncoderPcmA::BytesPerSample() const {
return 1;
}
+AudioEncoder::CodecType AudioEncoderPcmA::GetCodecType() const {
+ return AudioEncoder::CodecType::kPcmA;
+}
+
AudioEncoderPcmU::AudioEncoderPcmU(const CodecInst& codec_inst)
: AudioEncoderPcmU(CreateConfig<AudioEncoderPcmU>(codec_inst)) {}
@@ -133,4 +134,8 @@ size_t AudioEncoderPcmU::BytesPerSample() const {
return 1;
}
+AudioEncoder::CodecType AudioEncoderPcmU::GetCodecType() const {
+ return AudioEncoder::CodecType::kPcmU;
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h
index 6b3cebfb336..721344528f8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h
@@ -35,7 +35,6 @@ class AudioEncoderPcm : public AudioEncoder {
~AudioEncoderPcm() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
@@ -56,6 +55,10 @@ class AudioEncoderPcm : public AudioEncoder {
virtual size_t BytesPerSample() const = 0;
+ // Used to set EncodedInfoLeaf::encoder_type in
+ // AudioEncoderPcm::EncodeImpl
+ virtual AudioEncoder::CodecType GetCodecType() const = 0;
+
private:
const int sample_rate_hz_;
const size_t num_channels_;
@@ -85,6 +88,8 @@ class AudioEncoderPcmA final : public AudioEncoderPcm {
size_t BytesPerSample() const override;
+ AudioEncoder::CodecType GetCodecType() const override;
+
private:
static const int kSampleRateHz = 8000;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmA);
@@ -107,6 +112,8 @@ class AudioEncoderPcmU final : public AudioEncoderPcm {
size_t BytesPerSample() const override;
+ AudioEncoder::CodecType GetCodecType() const override;
+
private:
static const int kSampleRateHz = 8000;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmU);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h
index 7cc2ea98773..1837ffabe29 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
typedef struct WebRtcG722DecInst G722DecInst;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
index 9256518445d..1f3936c8eee 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.cc
@@ -60,10 +60,6 @@ AudioEncoderG722::AudioEncoderG722(const CodecInst& codec_inst)
AudioEncoderG722::~AudioEncoderG722() = default;
-size_t AudioEncoderG722::MaxEncodedBytes() const {
- return SamplesPerChannel() / 2 * num_channels_;
-}
-
int AudioEncoderG722::SampleRateHz() const {
return kSampleRateHz;
}
@@ -149,6 +145,7 @@ AudioEncoder::EncodedInfo AudioEncoderG722::EncodeImpl(
});
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = payload_type_;
+ info.encoder_type = CodecType::kG722;
return info;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h
index dec87b2b7a4..ad49a865e25 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h
@@ -14,6 +14,7 @@
#include <memory>
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
@@ -35,7 +36,6 @@ class AudioEncoderG722 final : public AudioEncoder {
explicit AudioEncoderG722(const CodecInst& codec_inst);
~AudioEncoderG722() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
int RtpTimestampRateHz() const override;
@@ -44,7 +44,7 @@ class AudioEncoderG722 final : public AudioEncoder {
int GetTargetBitrate() const override;
void Reset() override;
-protected:
+ protected:
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h
index e890635da09..036c11fac47 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
typedef struct iLBC_decinst_t_ IlbcDecoderInstance;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
index c7d7411c45d..ca11587dfab 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
@@ -56,10 +56,6 @@ AudioEncoderIlbc::~AudioEncoderIlbc() {
RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderFree(encoder_));
}
-size_t AudioEncoderIlbc::MaxEncodedBytes() const {
- return RequiredOutputSizeBytes();
-}
-
int AudioEncoderIlbc::SampleRateHz() const {
return kSampleRateHz;
}
@@ -131,6 +127,7 @@ AudioEncoder::EncodedInfo AudioEncoderIlbc::EncodeImpl(
info.encoded_bytes = encoded_bytes;
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = config_.payload_type;
+ info.encoder_type = CodecType::kIlbc;
return info;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h
index 27329bbc4ee..63639860f45 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h
@@ -34,7 +34,6 @@ class AudioEncoderIlbc final : public AudioEncoder {
explicit AudioEncoderIlbc(const CodecInst& codec_inst);
~AudioEncoderIlbc() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
index 62a686495b1..a8375afb609 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
@@ -65,15 +65,15 @@ void WebRtcIlbcfix_GetLspPoly(
{
/* Compute f[j] = f[j] + tmp*f[j-1] + f[j-2]; */
high = (int16_t)(fPtr[-1] >> 16);
- low = (int16_t)((fPtr[-1] - ((int32_t)high << 16)) >> 1);
+ low = (int16_t)((fPtr[-1] & 0xffff) >> 1);
- tmpW32 = ((high * *lspPtr) << 2) + (((low * *lspPtr) >> 15) << 2);
+ tmpW32 = 4 * high * *lspPtr + 4 * ((low * *lspPtr) >> 15);
(*fPtr) += fPtr[-2];
(*fPtr) -= tmpW32;
fPtr--;
}
- *fPtr -= *lspPtr << 10;
+ *fPtr -= *lspPtr * (1 << 10);
fPtr+=i;
lspPtr+=2;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
index bd101bf30ca..8b18c047b93 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
@@ -48,7 +48,7 @@ void WebRtcIlbcfix_HpOutput(
tmpW32 = (tmpW32>>15);
tmpW32 += y[0] * ba[3]; /* (-a[1])*y[i-1] (high part) */
tmpW32 += y[2] * ba[4]; /* (-a[2])*y[i-2] (high part) */
- tmpW32 = (tmpW32<<1);
+ tmpW32 *= 2;
tmpW32 += signal[i] * ba[0]; /* b[0]*x[0] */
tmpW32 += x[0] * ba[1]; /* b[1]*x[i-1] */
@@ -77,11 +77,11 @@ void WebRtcIlbcfix_HpOutput(
} else if (tmpW32<-268435456) {
tmpW32 = WEBRTC_SPL_WORD32_MIN;
} else {
- tmpW32 <<= 3;
+ tmpW32 *= 8;
}
y[0] = (int16_t)(tmpW32 >> 16);
- y[1] = (int16_t)((tmpW32 - (y[0] << 16)) >> 1);
+ y[1] = (int16_t)((tmpW32 & 0xffff) >> 1);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi
index d4f6a4a41e6..1aba106f909 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/interfaces.gypi
@@ -15,6 +15,10 @@
'audio_decoder.cc',
'audio_decoder.h',
],
+ 'dependencies': [
+ '<(webrtc_root)/base/base.gyp:rtc_base_approved',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ ],
},
{
@@ -24,6 +28,10 @@
'audio_encoder.cc',
'audio_encoder.h',
],
+ 'dependencies': [
+ '<(webrtc_root)/base/base.gyp:rtc_base_approved',
+ '<(webrtc_root)/common.gyp:webrtc_common',
+ ],
},
],
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h
index d9d20ec0396..b1907bbb394 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h
@@ -13,6 +13,7 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
#include "webrtc/modules/audio_coding/codecs/isac/locked_bandwidth_info.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
index 0da8ed71d66..f1f2714ff9c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h
@@ -13,6 +13,7 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
#include "webrtc/modules/audio_coding/codecs/isac/locked_bandwidth_info.h"
@@ -56,7 +57,6 @@ class AudioEncoderIsacT final : public AudioEncoder {
const rtc::scoped_refptr<LockedIsacBandwidthInfo>& bwinfo);
~AudioEncoderIsacT() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
index 1debbeb9038..b6a1747c391 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t_impl.h
@@ -80,11 +80,6 @@ AudioEncoderIsacT<T>::~AudioEncoderIsacT() {
}
template <typename T>
-size_t AudioEncoderIsacT<T>::MaxEncodedBytes() const {
- return kSufficientEncodeBufferSizeBytes;
-}
-
-template <typename T>
int AudioEncoderIsacT<T>::SampleRateHz() const {
return T::EncSampRate(isac_state_);
}
@@ -150,6 +145,7 @@ AudioEncoder::EncodedInfo AudioEncoderIsacT<T>::EncodeImpl(
info.encoded_bytes = encoded_bytes;
info.encoded_timestamp = packet_timestamp_;
info.payload_type = config_.payload_type;
+ info.encoder_type = CodecType::kIsac;
return info;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
index fdbb2fcb0d7..001a04f39bf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
@@ -90,7 +90,7 @@ void WebRtcIsacfix_Spec2TimeC(int16_t* inreQ7,
int32_t* outre1Q16,
int32_t* outre2Q16);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcIsacfix_Time2SpecNeon(int16_t* inre1Q9,
int16_t* inre2Q9,
int16_t* outre,
@@ -174,7 +174,7 @@ void WebRtcIsacfix_FilterMaLoopC(int16_t input0,
int32_t* ptr1,
int32_t* ptr2);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
int WebRtcIsacfix_AutocorrNeon(int32_t* __restrict r,
const int16_t* __restrict x,
int16_t N,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
index 2c8c923cd33..1b87d0ea557 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
@@ -147,7 +147,7 @@ void WebRtcIsacfix_MatrixProduct2C(const int16_t matrix0[],
const int matrix0_index_factor,
const int matrix0_index_step);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcIsacfix_MatrixProduct1Neon(const int16_t matrix0[],
const int32_t matrix1[],
int32_t matrix_product[],
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
index 0e67e300ac1..d488339b31f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
@@ -60,7 +60,7 @@ void WebRtcIsacfix_AllpassFilter2FixDec16C(
int32_t *filter_state_ch1,
int32_t *filter_state_ch2);
-#if (defined WEBRTC_DETECT_NEON) || (defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcIsacfix_AllpassFilter2FixDec16Neon(
int16_t *data_ch1,
int16_t *data_ch2,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc
index 0ec115414b8..4b03181e456 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc
@@ -64,11 +64,7 @@ class FilterBanksTest : public testing::Test {
TEST_F(FilterBanksTest, AllpassFilter2FixDec16Test) {
CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16C);
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16Neon);
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
CalculateResidualEnergyTester(WebRtcIsacfix_AllpassFilter2FixDec16Neon);
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc
index 5cce1e9f0b2..3ed57788a1f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_unittest.cc
@@ -59,11 +59,7 @@ class FiltersTest : public testing::Test {
TEST_F(FiltersTest, AutocorrFixTest) {
FiltersTester(WebRtcIsacfix_AutocorrC);
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- FiltersTester(WebRtcIsacfix_AutocorrNeon);
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
FiltersTester(WebRtcIsacfix_AutocorrNeon);
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
index aba3aa0c0bf..e7905ae81fa 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
@@ -201,7 +201,7 @@ int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst)
* This function initializes function pointers for ARM Neon platform.
*/
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
static void WebRtcIsacfix_InitNeon(void) {
WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrNeon;
WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopNeon;
@@ -253,11 +253,7 @@ static void InitFunctionPointers(void) {
WebRtcIsacfix_MatrixProduct1 = WebRtcIsacfix_MatrixProduct1C;
WebRtcIsacfix_MatrixProduct2 = WebRtcIsacfix_MatrixProduct2C;
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- WebRtcIsacfix_InitNeon();
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
WebRtcIsacfix_InitNeon();
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c
index 18377dd370f..0d881e80442 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator_c.c
@@ -57,8 +57,6 @@ void WebRtcIsacfix_PCorr2Q32(const int16_t* in, int32_t* logcorQ8) {
ysum32 += in[PITCH_CORR_LEN2 + k - 1] * in[PITCH_CORR_LEN2 + k - 1] >>
scaling;
- // TODO(zhongwei.yao): Move this function into a separate NEON code file so
- // that WEBRTC_DETECT_NEON could take advantage of it.
#ifdef WEBRTC_HAS_NEON
{
int32_t vbuff[4];
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc
index 58d890011fe..c5cc87ffce2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_unittest.cc
@@ -179,22 +179,14 @@ class TransformTest : public testing::Test {
TEST_F(TransformTest, Time2SpecTest) {
Time2SpecTester(WebRtcIsacfix_Time2SpecC);
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- Time2SpecTester(WebRtcIsacfix_Time2SpecNeon);
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
Time2SpecTester(WebRtcIsacfix_Time2SpecNeon);
#endif
}
TEST_F(TransformTest, Spec2TimeTest) {
Spec2TimeTester(WebRtcIsacfix_Spec2TimeC);
-#ifdef WEBRTC_DETECT_NEON
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- Spec2TimeTester(WebRtcIsacfix_Spec2TimeNeon);
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
Spec2TimeTester(WebRtcIsacfix_Spec2TimeNeon);
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
index 32f36c52617..276eb60e280 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc
@@ -25,10 +25,10 @@ class IsacSpeedTest : public AudioCodecSpeedTest {
IsacSpeedTest();
void SetUp() override;
void TearDown() override;
- virtual float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
- size_t max_bytes, size_t* encoded_bytes);
- virtual float DecodeABlock(const uint8_t* bit_stream, size_t encoded_bytes,
- int16_t* out_data);
+ float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ size_t max_bytes, size_t* encoded_bytes) override;
+ float DecodeABlock(const uint8_t* bit_stream, size_t encoded_bytes,
+ int16_t* out_data) override;
ISACFIX_MainStruct *ISACFIX_main_inst_;
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
index 63e4928bd88..47bbe31b8ae 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
@@ -214,10 +214,10 @@ int WebRtcIsac_DecHistOneStepMulti(int *data, /* output: data vector */
if (streamdata->stream_index == 0) /* first time decoder is called for this stream */
{
/* read first word from bytestream */
- streamval = *stream_ptr << 24;
- streamval |= *++stream_ptr << 16;
- streamval |= *++stream_ptr << 8;
- streamval |= *++stream_ptr;
+ streamval = (uint32_t)(*stream_ptr) << 24;
+ streamval |= (uint32_t)(*++stream_ptr) << 16;
+ streamval |= (uint32_t)(*++stream_ptr) << 8;
+ streamval |= (uint32_t)(*++stream_ptr);
} else {
streamval = streamdata->streamval;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
index c1204ad03ad..f920dc2ef8b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
@@ -162,9 +162,9 @@ static void FindInvArSpec(const int16_t* ARCoefQ12,
}
for (k = 0; k < FRAMESAMPLES / 8; k++) {
- CurveQ16[FRAMESAMPLES_QUARTER - 1 - k] = CurveQ16[k] -
- (diffQ16[k] << shftVal);
- CurveQ16[k] += diffQ16[k] << shftVal;
+ int32_t diff_q16_shifted = (int32_t)((uint32_t)(diffQ16[k]) << shftVal);
+ CurveQ16[FRAMESAMPLES_QUARTER - 1 - k] = CurveQ16[k] - diff_q16_shifted;
+ CurveQ16[k] += diff_q16_shifted;
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h
new file mode 100644
index 00000000000..6e5737c89b8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_DECODER_FACTORY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_DECODER_FACTORY_H_
+
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/modules/audio_coding/codecs/audio_decoder_factory.h"
+
+namespace webrtc {
+
+class MockAudioDecoderFactory : public AudioDecoderFactory {
+ public:
+ MOCK_METHOD0(GetSupportedFormats, std::vector<SdpAudioFormat>());
+ std::unique_ptr<AudioDecoder> MakeAudioDecoder(
+ const SdpAudioFormat& format) {
+ std::unique_ptr<AudioDecoder> return_value;
+ MakeAudioDecoderMock(format, &return_value);
+ return return_value;
+ }
+ MOCK_METHOD2(MakeAudioDecoderMock,
+ void(const SdpAudioFormat& format,
+ std::unique_ptr<AudioDecoder>* return_value));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_DECODER_FACTORY_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc
index 52849691ac6..a674eba6607 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.cc
@@ -49,26 +49,4 @@ AudioEncoder::EncodedInfo MockAudioEncoder::CopyEncoding::operator()(
return info_;
}
-MockAudioEncoderDeprecated::CopyEncoding::CopyEncoding(
- AudioEncoder::EncodedInfo info,
- rtc::ArrayView<const uint8_t> payload)
- : info_(info), payload_(payload) { }
-
-MockAudioEncoderDeprecated::CopyEncoding::CopyEncoding(
- rtc::ArrayView<const uint8_t> payload)
- : payload_(payload) {
- info_.encoded_bytes = payload_.size();
-}
-
-AudioEncoder::EncodedInfo MockAudioEncoderDeprecated::CopyEncoding::operator()(
- uint32_t timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_bytes_encoded,
- uint8_t* encoded) {
- RTC_CHECK(encoded);
- RTC_CHECK_LE(info_.encoded_bytes, payload_.size());
- std::memcpy(encoded, payload_.data(), info_.encoded_bytes);
- return info_;
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
index 58a1e756f97..2ffb30b708a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/mock/mock_audio_encoder.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_ENCODER_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_MOCK_MOCK_AUDIO_ENCODER_H_
+#include <string>
+
#include "webrtc/base/array_view.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
@@ -18,12 +20,15 @@
namespace webrtc {
-class MockAudioEncoderBase : public AudioEncoder {
+class MockAudioEncoder : public AudioEncoder {
public:
- ~MockAudioEncoderBase() override { Die(); }
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
+ ~MockAudioEncoder() /* override */ { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD1(Mark, void(std::string desc));
- MOCK_CONST_METHOD0(MaxEncodedBytes, size_t());
MOCK_CONST_METHOD0(SampleRateHz, int());
MOCK_CONST_METHOD0(NumChannels, size_t());
MOCK_CONST_METHOD0(RtpTimestampRateHz, int());
@@ -39,10 +44,7 @@ class MockAudioEncoderBase : public AudioEncoder {
MOCK_METHOD1(SetTargetBitrate, void(int target_bps));
MOCK_METHOD1(SetMaxBitrate, void(int max_bps));
MOCK_METHOD1(SetMaxPayloadSize, void(int max_payload_size_bytes));
-};
-class MockAudioEncoder final : public MockAudioEncoderBase {
- public:
// Note, we explicitly chose not to create a mock for the Encode method.
MOCK_METHOD3(EncodeImpl,
EncodedInfo(uint32_t timestamp,
@@ -53,11 +55,11 @@ class MockAudioEncoder final : public MockAudioEncoderBase {
public:
// Creates a functor that will return |info| and adjust the rtc::Buffer
// given as input to it, so it is info.encoded_bytes larger.
- FakeEncoding(const AudioEncoder::EncodedInfo& info);
+ explicit FakeEncoding(const AudioEncoder::EncodedInfo& info);
// Shorthand version of the constructor above, for when only setting
// encoded_bytes in the EncodedInfo object matters.
- FakeEncoding(size_t encoded_bytes);
+ explicit FakeEncoding(size_t encoded_bytes);
AudioEncoder::EncodedInfo operator()(uint32_t timestamp,
rtc::ArrayView<const int16_t> audio,
@@ -80,41 +82,12 @@ class MockAudioEncoder final : public MockAudioEncoderBase {
// Shorthand version of the constructor above, for when you wish to append
// the whole payload and do not care about any EncodedInfo attribute other
// than encoded_bytes.
- CopyEncoding(rtc::ArrayView<const uint8_t> payload);
+ explicit CopyEncoding(rtc::ArrayView<const uint8_t> payload);
AudioEncoder::EncodedInfo operator()(uint32_t timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded);
- private:
- AudioEncoder::EncodedInfo info_;
- rtc::ArrayView<const uint8_t> payload_;
- };
-
-};
-
-class MockAudioEncoderDeprecated final : public MockAudioEncoderBase {
- public:
- // Note, we explicitly chose not to create a mock for the Encode method.
- MOCK_METHOD4(EncodeInternal,
- EncodedInfo(uint32_t timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_encoded_bytes,
- uint8_t* encoded));
- // A functor like MockAudioEncoder::CopyEncoding above, but which has the
- // deprecated Encode signature. Currently only used in one test and should be
- // removed once that backwards compatibility is.
- class CopyEncoding {
- public:
- CopyEncoding(AudioEncoder::EncodedInfo info,
- rtc::ArrayView<const uint8_t> payload);
-
- CopyEncoding(rtc::ArrayView<const uint8_t> payload);
-
- AudioEncoder::EncodedInfo operator()(uint32_t timestamp,
- rtc::ArrayView<const int16_t> audio,
- size_t max_bytes_encoded,
- uint8_t* encoded);
private:
AudioEncoder::EncodedInfo info_;
rtc::ArrayView<const uint8_t> payload_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h
index af32a84512e..be48ca988ef 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_DECODER_OPUS_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_DECODER_OPUS_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
#include "webrtc/modules/audio_coding/codecs/opus/opus_interface.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
index a599e291d47..a2497c7862a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h"
+#include <algorithm>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/safe_conversions.h"
#include "webrtc/common_types.h"
@@ -100,16 +102,6 @@ AudioEncoderOpus::~AudioEncoderOpus() {
RTC_CHECK_EQ(0, WebRtcOpus_EncoderFree(inst_));
}
-size_t AudioEncoderOpus::MaxEncodedBytes() const {
- // Calculate the number of bytes we expect the encoder to produce,
- // then multiply by two to give a wide margin for error.
- const size_t bytes_per_millisecond =
- static_cast<size_t>(config_.bitrate_bps / (1000 * 8) + 1);
- const size_t approx_encoded_bytes =
- Num10msFramesPerPacket() * 10 * bytes_per_millisecond;
- return 2 * approx_encoded_bytes;
-}
-
int AudioEncoderOpus::SampleRateHz() const {
return kSampleRateHz;
}
@@ -198,7 +190,7 @@ AudioEncoder::EncodedInfo AudioEncoderOpus::EncodeImpl(
RTC_CHECK_EQ(input_buffer_.size(),
Num10msFramesPerPacket() * SamplesPer10msFrame());
- const size_t max_encoded_bytes = MaxEncodedBytes();
+ const size_t max_encoded_bytes = SufficientOutputBufferSize();
EncodedInfo info;
info.encoded_bytes =
encoded->AppendData(
@@ -220,6 +212,7 @@ AudioEncoder::EncodedInfo AudioEncoderOpus::EncodeImpl(
info.payload_type = config_.payload_type;
info.send_even_if_empty = true; // Allows Opus to send empty packets.
info.speech = (info.encoded_bytes > 0);
+ info.encoder_type = CodecType::kOpus;
return info;
}
@@ -231,6 +224,16 @@ size_t AudioEncoderOpus::SamplesPer10msFrame() const {
return rtc::CheckedDivExact(kSampleRateHz, 100) * config_.num_channels;
}
+size_t AudioEncoderOpus::SufficientOutputBufferSize() const {
+ // Calculate the number of bytes we expect the encoder to produce,
+ // then multiply by two to give a wide margin for error.
+ const size_t bytes_per_millisecond =
+ static_cast<size_t>(config_.bitrate_bps / (1000 * 8) + 1);
+ const size_t approx_encoded_bytes =
+ Num10msFramesPerPacket() * 10 * bytes_per_millisecond;
+ return 2 * approx_encoded_bytes;
+}
+
// If the given config is OK, recreate the Opus encoder instance with those
// settings, save the config, and return true. Otherwise, do nothing and return
// false.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
index 3f11af1f9e0..8900659f48e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h
@@ -54,7 +54,6 @@ class AudioEncoderOpus final : public AudioEncoder {
explicit AudioEncoderOpus(const CodecInst& codec_inst);
~AudioEncoderOpus() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
size_t Num10MsFramesInNextPacket() const override;
@@ -79,7 +78,7 @@ class AudioEncoderOpus final : public AudioEncoder {
ApplicationMode application() const { return config_.application; }
bool dtx_enabled() const { return config_.dtx_enabled; }
-protected:
+ protected:
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
@@ -87,6 +86,7 @@ protected:
private:
size_t Num10msFramesPerPacket() const;
size_t SamplesPer10msFrame() const;
+ size_t SufficientOutputBufferSize() const;
bool RecreateEncoderInstance(const Config& config);
Config config_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
index 4d1aa42c89f..7165d29c8b4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
@@ -23,10 +23,10 @@ class OpusSpeedTest : public AudioCodecSpeedTest {
OpusSpeedTest();
void SetUp() override;
void TearDown() override;
- virtual float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
- size_t max_bytes, size_t* encoded_bytes);
- virtual float DecodeABlock(const uint8_t* bit_stream, size_t encoded_bytes,
- int16_t* out_data);
+ float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ size_t max_bytes, size_t* encoded_bytes) override;
+ float DecodeABlock(const uint8_t* bit_stream, size_t encoded_bytes,
+ int16_t* out_data) override;
WebRtcOpusEncInst* opus_encoder_;
WebRtcOpusDecInst* opus_decoder_;
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
index f4d40223024..cafd3e851bd 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc
@@ -26,6 +26,10 @@ size_t AudioEncoderPcm16B::BytesPerSample() const {
return 2;
}
+AudioEncoder::CodecType AudioEncoderPcm16B::GetCodecType() const {
+ return CodecType::kOther;
+}
+
namespace {
AudioEncoderPcm16B::Config CreateConfig(const CodecInst& codec_inst) {
AudioEncoderPcm16B::Config config;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h
index 34a780b49de..bdc27a67e30 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h"
namespace webrtc {
@@ -38,6 +39,8 @@ class AudioEncoderPcm16B final : public AudioEncoderPcm {
size_t BytesPerSample() const override;
+ AudioEncoder::CodecType GetCodecType() const override;
+
private:
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcm16B);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
index 4275f54103a..37fa55a4da1 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc
@@ -19,12 +19,7 @@
namespace webrtc {
AudioEncoderCopyRed::Config::Config() = default;
-
-// TODO(kwiberg): =default this when Visual Studio learns to handle it.
-AudioEncoderCopyRed::Config::Config(Config&& c)
- : payload_type(c.payload_type),
- speech_encoder(std::move(c.speech_encoder)) {}
-
+AudioEncoderCopyRed::Config::Config(Config&&) = default;
AudioEncoderCopyRed::Config::~Config() = default;
AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config)
@@ -35,10 +30,6 @@ AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config)
AudioEncoderCopyRed::~AudioEncoderCopyRed() = default;
-size_t AudioEncoderCopyRed::MaxEncodedBytes() const {
- return 2 * speech_encoder_->MaxEncodedBytes();
-}
-
int AudioEncoderCopyRed::SampleRateHz() const {
return speech_encoder_->SampleRateHz();
}
@@ -132,4 +123,9 @@ void AudioEncoderCopyRed::SetTargetBitrate(int bits_per_second) {
speech_encoder_->SetTargetBitrate(bits_per_second);
}
+rtc::ArrayView<std::unique_ptr<AudioEncoder>>
+AudioEncoderCopyRed::ReclaimContainedEncoders() {
+ return rtc::ArrayView<std::unique_ptr<AudioEncoder>>(&speech_encoder_, 1);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
index a67ae486bb2..a08118364cc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_encoder.h"
namespace webrtc {
@@ -37,7 +38,6 @@ class AudioEncoderCopyRed final : public AudioEncoder {
~AudioEncoderCopyRed() override;
- size_t MaxEncodedBytes() const override;
int SampleRateHz() const override;
size_t NumChannels() const override;
int RtpTimestampRateHz() const override;
@@ -51,8 +51,10 @@ class AudioEncoderCopyRed final : public AudioEncoder {
void SetMaxPlaybackRate(int frequency_hz) override;
void SetProjectedPacketLossRate(double fraction) override;
void SetTargetBitrate(int target_bps) override;
+ rtc::ArrayView<std::unique_ptr<AudioEncoder>> ReclaimContainedEncoders()
+ override;
-protected:
+ protected:
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
index c73cb9f2096..22b2ceb5f79 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc
@@ -26,7 +26,6 @@ using ::testing::MockFunction;
namespace webrtc {
namespace {
-static const size_t kMockMaxEncodedBytes = 1000;
static const size_t kMaxNumSamples = 48 * 10 * 2; // 10 ms @ 48 kHz stereo.
}
@@ -46,8 +45,6 @@ class AudioEncoderCopyRedTest : public ::testing::Test {
EXPECT_CALL(*mock_encoder_, NumChannels()).WillRepeatedly(Return(1U));
EXPECT_CALL(*mock_encoder_, SampleRateHz())
.WillRepeatedly(Return(sample_rate_hz_));
- EXPECT_CALL(*mock_encoder_, MaxEncodedBytes())
- .WillRepeatedly(Return(kMockMaxEncodedBytes));
}
void TearDown() override {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h b/chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h
index 381e35e639b..daf9ac8ae9a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/include/audio_coding_module.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_H_
#define WEBRTC_MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_H_
+#include <memory>
#include <string>
#include <vector>
@@ -686,13 +687,24 @@ class AudioCodingModule {
// and other relevant parameters, c.f.
// module_common_types.h for the definition of
// AudioFrame.
+ // -muted : if true, the sample data in audio_frame is not
+ // populated, and must be interpreted as all zero.
//
// Return value:
// -1 if the function fails,
// 0 if the function succeeds.
//
virtual int32_t PlayoutData10Ms(int32_t desired_freq_hz,
- AudioFrame* audio_frame) = 0;
+ AudioFrame* audio_frame,
+ bool* muted) = 0;
+
+ /////////////////////////////////////////////////////////////////////////////
+ // Same as above, but without the muted parameter. This methods should not be
+ // used if enable_fast_accelerate was set to true in NetEq::Config.
+ // TODO(henrik.lundin) Remove this method when downstream dependencies are
+ // ready.
+ virtual int32_t PlayoutData10Ms(int32_t desired_freq_hz,
+ AudioFrame* audio_frame) = 0;
///////////////////////////////////////////////////////////////////////////
// Codec specific
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
index d800cc7dbe9..762c3859837 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
@@ -13,7 +13,6 @@
#include <assert.h>
#include "webrtc/base/checks.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h"
#ifdef WEBRTC_CODEC_G722
#include "webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h"
@@ -36,43 +35,6 @@
namespace webrtc {
-AudioDecoderCng::AudioDecoderCng() {
- RTC_CHECK_EQ(0, WebRtcCng_CreateDec(&dec_state_));
- WebRtcCng_InitDec(dec_state_);
-}
-
-AudioDecoderCng::~AudioDecoderCng() {
- WebRtcCng_FreeDec(dec_state_);
-}
-
-void AudioDecoderCng::Reset() {
- WebRtcCng_InitDec(dec_state_);
-}
-
-int AudioDecoderCng::IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) {
- return -1;
-}
-
-CNG_dec_inst* AudioDecoderCng::CngDecoderInstance() {
- return dec_state_;
-}
-
-size_t AudioDecoderCng::Channels() const {
- return 1;
-}
-
-int AudioDecoderCng::DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) {
- return -1;
-}
-
bool CodecSupported(NetEqDecoder codec_type) {
switch (codec_type) {
case NetEqDecoder::kDecoderPCMu:
@@ -175,67 +137,4 @@ int CodecSampleRateHz(NetEqDecoder codec_type) {
}
}
-AudioDecoder* CreateAudioDecoder(NetEqDecoder codec_type) {
- if (!CodecSupported(codec_type)) {
- return NULL;
- }
- switch (codec_type) {
- case NetEqDecoder::kDecoderPCMu:
- return new AudioDecoderPcmU(1);
- case NetEqDecoder::kDecoderPCMa:
- return new AudioDecoderPcmA(1);
- case NetEqDecoder::kDecoderPCMu_2ch:
- return new AudioDecoderPcmU(2);
- case NetEqDecoder::kDecoderPCMa_2ch:
- return new AudioDecoderPcmA(2);
-#ifdef WEBRTC_CODEC_ILBC
- case NetEqDecoder::kDecoderILBC:
- return new AudioDecoderIlbc;
-#endif
-#if defined(WEBRTC_CODEC_ISACFX)
- case NetEqDecoder::kDecoderISAC:
- return new AudioDecoderIsacFix();
-#elif defined(WEBRTC_CODEC_ISAC)
- case NetEqDecoder::kDecoderISAC:
- case NetEqDecoder::kDecoderISACswb:
- return new AudioDecoderIsac();
-#endif
- case NetEqDecoder::kDecoderPCM16B:
- case NetEqDecoder::kDecoderPCM16Bwb:
- case NetEqDecoder::kDecoderPCM16Bswb32kHz:
- case NetEqDecoder::kDecoderPCM16Bswb48kHz:
- return new AudioDecoderPcm16B(1);
- case NetEqDecoder::kDecoderPCM16B_2ch:
- case NetEqDecoder::kDecoderPCM16Bwb_2ch:
- case NetEqDecoder::kDecoderPCM16Bswb32kHz_2ch:
- case NetEqDecoder::kDecoderPCM16Bswb48kHz_2ch:
- return new AudioDecoderPcm16B(2);
- case NetEqDecoder::kDecoderPCM16B_5ch:
- return new AudioDecoderPcm16B(5);
-#ifdef WEBRTC_CODEC_G722
- case NetEqDecoder::kDecoderG722:
- return new AudioDecoderG722;
- case NetEqDecoder::kDecoderG722_2ch:
- return new AudioDecoderG722Stereo;
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- case NetEqDecoder::kDecoderOpus:
- return new AudioDecoderOpus(1);
- case NetEqDecoder::kDecoderOpus_2ch:
- return new AudioDecoderOpus(2);
-#endif
- case NetEqDecoder::kDecoderCNGnb:
- case NetEqDecoder::kDecoderCNGwb:
- case NetEqDecoder::kDecoderCNGswb32kHz:
- case NetEqDecoder::kDecoderCNGswb48kHz:
- return new AudioDecoderCng;
- case NetEqDecoder::kDecoderRED:
- case NetEqDecoder::kDecoderAVT:
- case NetEqDecoder::kDecoderArbitrary:
- default: {
- return NULL;
- }
- }
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
index bc8bdd9626d..579ccb36f7a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
@@ -16,7 +16,6 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#ifdef WEBRTC_CODEC_G722
#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
#endif
@@ -25,38 +24,6 @@
namespace webrtc {
-// AudioDecoderCng is a special type of AudioDecoder. It inherits from
-// AudioDecoder just to fit in the DecoderDatabase. None of the class methods
-// should be used, except constructor, destructor, and accessors.
-// TODO(hlundin): Consider the possibility to create a super-class to
-// AudioDecoder that is stored in DecoderDatabase. Then AudioDecoder and a
-// specific CngDecoder class could both inherit from that class.
-class AudioDecoderCng : public AudioDecoder {
- public:
- explicit AudioDecoderCng();
- ~AudioDecoderCng() override;
- void Reset() override;
- int IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) override;
-
- CNG_dec_inst* CngDecoderInstance() override;
- size_t Channels() const override;
-
- protected:
- int DecodeInternal(const uint8_t* encoded,
- size_t encoded_len,
- int sample_rate_hz,
- int16_t* decoded,
- SpeechType* speech_type) override;
-
- private:
- CNG_dec_inst* dec_state_;
- RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderCng);
-};
-
using NetEqDecoder = acm2::RentACodec::NetEqDecoder;
// Returns true if |codec_type| is supported.
@@ -65,10 +32,5 @@ bool CodecSupported(NetEqDecoder codec_type);
// Returns the sample rate for |codec_type|.
int CodecSampleRateHz(NetEqDecoder codec_type);
-// Creates an AudioDecoder object of type |codec_type|. Returns NULL for for
-// unsupported codecs, and when creating an AudioDecoder is not applicable
-// (e.g., for RED and DTMF/AVT types).
-AudioDecoder* CreateAudioDecoder(NetEqDecoder codec_type);
-
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_DECODER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
index bd38c43903a..c80909d7b58 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
@@ -106,7 +106,7 @@ void AudioMultiVector::PushBackFromIndex(const AudioMultiVector& append_this,
assert(num_channels_ == append_this.num_channels_);
if (num_channels_ == append_this.num_channels_) {
for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->PushBack(&append_this[i][index], length);
+ channels_[i]->PushBack(append_this[i], length, index);
}
}
}
@@ -133,14 +133,14 @@ size_t AudioMultiVector::ReadInterleavedFromIndex(size_t start_index,
int16_t* destination) const {
RTC_DCHECK(destination);
size_t index = 0; // Number of elements written to |destination| so far.
- assert(start_index <= Size());
+ RTC_DCHECK_LE(start_index, Size());
start_index = std::min(start_index, Size());
if (length + start_index > Size()) {
length = Size() - start_index;
}
if (num_channels_ == 1) {
// Special case to avoid the nested for loop below.
- memcpy(destination, &(*this)[0][start_index], length * sizeof(int16_t));
+ (*this)[0].CopyTo(length, start_index, destination);
return length;
}
for (size_t i = 0; i < length; ++i) {
@@ -167,7 +167,7 @@ void AudioMultiVector::OverwriteAt(const AudioMultiVector& insert_this,
length = std::min(length, insert_this.Size());
if (num_channels_ == insert_this.num_channels_) {
for (size_t i = 0; i < num_channels_; ++i) {
- channels_[i]->OverwriteAt(&insert_this[i][0], length, position);
+ channels_[i]->OverwriteAt(insert_this[i], length, position);
}
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
index 013e1d89ad9..ea737a55424 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
@@ -15,124 +15,236 @@
#include <algorithm>
#include <memory>
+#include "webrtc/base/checks.h"
#include "webrtc/typedefs.h"
namespace webrtc {
AudioVector::AudioVector()
- : array_(new int16_t[kDefaultInitialSize]),
- first_free_ix_(0),
- capacity_(kDefaultInitialSize) {
+ : AudioVector(kDefaultInitialSize) {
+ Clear();
}
AudioVector::AudioVector(size_t initial_size)
- : array_(new int16_t[initial_size]),
- first_free_ix_(initial_size),
- capacity_(initial_size) {
- memset(array_.get(), 0, initial_size * sizeof(int16_t));
+ : array_(new int16_t[initial_size + 1]),
+ capacity_(initial_size + 1),
+ begin_index_(0),
+ end_index_(capacity_ - 1) {
+ memset(array_.get(), 0, capacity_ * sizeof(int16_t));
}
AudioVector::~AudioVector() = default;
void AudioVector::Clear() {
- first_free_ix_ = 0;
+ end_index_ = begin_index_ = 0;
}
void AudioVector::CopyTo(AudioVector* copy_to) const {
- if (copy_to) {
- copy_to->Reserve(Size());
- assert(copy_to->capacity_ >= Size());
- memcpy(copy_to->array_.get(), array_.get(), Size() * sizeof(int16_t));
- copy_to->first_free_ix_ = first_free_ix_;
+ RTC_DCHECK(copy_to);
+ copy_to->Reserve(Size());
+ CopyTo(Size(), 0, copy_to->array_.get());
+ copy_to->begin_index_ = 0;
+ copy_to->end_index_ = Size();
+}
+
+void AudioVector::CopyTo(
+ size_t length, size_t position, int16_t* copy_to) const {
+ if (length == 0)
+ return;
+ length = std::min(length, Size() - position);
+ const size_t copy_index = (begin_index_ + position) % capacity_;
+ const size_t first_chunk_length =
+ std::min(length, capacity_ - copy_index);
+ memcpy(copy_to, &array_[copy_index],
+ first_chunk_length * sizeof(int16_t));
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ memcpy(&copy_to[first_chunk_length], array_.get(),
+ remaining_length * sizeof(int16_t));
}
}
void AudioVector::PushFront(const AudioVector& prepend_this) {
- size_t insert_length = prepend_this.Size();
- Reserve(Size() + insert_length);
- memmove(&array_[insert_length], &array_[0], Size() * sizeof(int16_t));
- memcpy(&array_[0], &prepend_this.array_[0], insert_length * sizeof(int16_t));
- first_free_ix_ += insert_length;
+ const size_t length = prepend_this.Size();
+ if (length == 0)
+ return;
+
+ // Although the subsequent calling to PushFront does Reserve in it, it is
+ // always more efficient to do a big Reserve first.
+ Reserve(Size() + length);
+
+ const size_t first_chunk_length =
+ std::min(length, prepend_this.capacity_ - prepend_this.begin_index_);
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0)
+ PushFront(prepend_this.array_.get(), remaining_length);
+ PushFront(&prepend_this.array_[prepend_this.begin_index_],
+ first_chunk_length);
}
void AudioVector::PushFront(const int16_t* prepend_this, size_t length) {
- // Same operation as InsertAt beginning.
- InsertAt(prepend_this, length, 0);
+ if (length == 0)
+ return;
+ Reserve(Size() + length);
+ const size_t first_chunk_length = std::min(length, begin_index_);
+ memcpy(&array_[begin_index_ - first_chunk_length],
+ &prepend_this[length - first_chunk_length],
+ first_chunk_length * sizeof(int16_t));
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ memcpy(&array_[capacity_ - remaining_length], prepend_this,
+ remaining_length * sizeof(int16_t));
+ }
+ begin_index_ = (begin_index_ + capacity_ - length) % capacity_;
}
void AudioVector::PushBack(const AudioVector& append_this) {
- PushBack(append_this.array_.get(), append_this.Size());
+ PushBack(append_this, append_this.Size(), 0);
+}
+
+void AudioVector::PushBack(
+ const AudioVector& append_this, size_t length, size_t position) {
+ RTC_DCHECK_LE(position, append_this.Size());
+ RTC_DCHECK_LE(length, append_this.Size() - position);
+
+ if (length == 0)
+ return;
+
+ // Although the subsequent calling to PushBack does Reserve in it, it is
+ // always more efficient to do a big Reserve first.
+ Reserve(Size() + length);
+
+ const size_t start_index =
+ (append_this.begin_index_ + position) % append_this.capacity_;
+ const size_t first_chunk_length = std::min(
+ length, append_this.capacity_ - start_index);
+ PushBack(&append_this.array_[start_index], first_chunk_length);
+
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0)
+ PushBack(append_this.array_.get(), remaining_length);
}
void AudioVector::PushBack(const int16_t* append_this, size_t length) {
+ if (length == 0)
+ return;
Reserve(Size() + length);
- memcpy(&array_[first_free_ix_], append_this, length * sizeof(int16_t));
- first_free_ix_ += length;
+ const size_t first_chunk_length = std::min(length, capacity_ - end_index_);
+ memcpy(&array_[end_index_], append_this,
+ first_chunk_length * sizeof(int16_t));
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ memcpy(array_.get(), &append_this[first_chunk_length],
+ remaining_length * sizeof(int16_t));
+ }
+ end_index_ = (end_index_ + length) % capacity_;
}
void AudioVector::PopFront(size_t length) {
- if (length >= Size()) {
- // Remove all elements.
- Clear();
- } else {
- size_t remaining_samples = Size() - length;
- memmove(&array_[0], &array_[length], remaining_samples * sizeof(int16_t));
- first_free_ix_ -= length;
- }
+ if (length == 0)
+ return;
+ length = std::min(length, Size());
+ begin_index_ = (begin_index_ + length) % capacity_;
}
void AudioVector::PopBack(size_t length) {
+ if (length == 0)
+ return;
// Never remove more than what is in the array.
length = std::min(length, Size());
- first_free_ix_ -= length;
+ end_index_ = (end_index_ + capacity_ - length) % capacity_;
}
void AudioVector::Extend(size_t extra_length) {
- Reserve(Size() + extra_length);
- memset(&array_[first_free_ix_], 0, extra_length * sizeof(int16_t));
- first_free_ix_ += extra_length;
+ if (extra_length == 0)
+ return;
+ InsertZerosByPushBack(extra_length, Size());
}
void AudioVector::InsertAt(const int16_t* insert_this,
size_t length,
size_t position) {
- Reserve(Size() + length);
- // Cap the position at the current vector length, to be sure the iterator
- // does not extend beyond the end of the vector.
+ if (length == 0)
+ return;
+ // Cap the insert position at the current array length.
position = std::min(Size(), position);
- int16_t* insert_position_ptr = &array_[position];
- size_t samples_to_move = Size() - position;
- memmove(insert_position_ptr + length, insert_position_ptr,
- samples_to_move * sizeof(int16_t));
- memcpy(insert_position_ptr, insert_this, length * sizeof(int16_t));
- first_free_ix_ += length;
+
+ // When inserting to a position closer to the beginning, it is more efficient
+ // to insert by pushing front than to insert by pushing back, since less data
+ // will be moved, vice versa.
+ if (position <= Size() - position) {
+ InsertByPushFront(insert_this, length, position);
+ } else {
+ InsertByPushBack(insert_this, length, position);
+ }
}
void AudioVector::InsertZerosAt(size_t length,
size_t position) {
- Reserve(Size() + length);
- // Cap the position at the current vector length, to be sure the iterator
- // does not extend beyond the end of the vector.
- position = std::min(capacity_, position);
- int16_t* insert_position_ptr = &array_[position];
- size_t samples_to_move = Size() - position;
- memmove(insert_position_ptr + length, insert_position_ptr,
- samples_to_move * sizeof(int16_t));
- memset(insert_position_ptr, 0, length * sizeof(int16_t));
- first_free_ix_ += length;
+ if (length == 0)
+ return;
+ // Cap the insert position at the current array length.
+ position = std::min(Size(), position);
+
+ // When inserting to a position closer to the beginning, it is more efficient
+ // to insert by pushing front than to insert by pushing back, since less data
+ // will be moved, vice versa.
+ if (position <= Size() - position) {
+ InsertZerosByPushFront(length, position);
+ } else {
+ InsertZerosByPushBack(length, position);
+ }
+}
+
+void AudioVector::OverwriteAt(const AudioVector& insert_this,
+ size_t length,
+ size_t position) {
+ RTC_DCHECK_LE(length, insert_this.Size());
+ if (length == 0)
+ return;
+
+ // Cap the insert position at the current array length.
+ position = std::min(Size(), position);
+
+ // Although the subsequent calling to OverwriteAt does Reserve in it, it is
+ // always more efficient to do a big Reserve first.
+ size_t new_size = std::max(Size(), position + length);
+ Reserve(new_size);
+
+ const size_t first_chunk_length =
+ std::min(length, insert_this.capacity_ - insert_this.begin_index_);
+ OverwriteAt(&insert_this.array_[insert_this.begin_index_], first_chunk_length,
+ position);
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ OverwriteAt(insert_this.array_.get(), remaining_length,
+ position + first_chunk_length);
+ }
}
void AudioVector::OverwriteAt(const int16_t* insert_this,
size_t length,
size_t position) {
+ if (length == 0)
+ return;
// Cap the insert position at the current array length.
position = std::min(Size(), position);
- Reserve(position + length);
- memcpy(&array_[position], insert_this, length * sizeof(int16_t));
- if (position + length > Size()) {
- // Array was expanded.
- first_free_ix_ += position + length - Size();
+
+ size_t new_size = std::max(Size(), position + length);
+ Reserve(new_size);
+
+ const size_t overwrite_index = (begin_index_ + position) % capacity_;
+ const size_t first_chunk_length =
+ std::min(length, capacity_ - overwrite_index);
+ memcpy(&array_[overwrite_index], insert_this,
+ first_chunk_length * sizeof(int16_t));
+ const size_t remaining_length = length - first_chunk_length;
+ if (remaining_length > 0) {
+ memcpy(array_.get(), &insert_this[first_chunk_length],
+ remaining_length * sizeof(int16_t));
}
+
+ end_index_ = (begin_index_ + new_size) % capacity_;
}
void AudioVector::CrossFade(const AudioVector& append_this,
@@ -142,7 +254,7 @@ void AudioVector::CrossFade(const AudioVector& append_this,
assert(fade_length <= append_this.Size());
fade_length = std::min(fade_length, Size());
fade_length = std::min(fade_length, append_this.Size());
- size_t position = Size() - fade_length;
+ size_t position = Size() - fade_length + begin_index_;
// Cross fade the overlapping regions.
// |alpha| is the mixing factor in Q14.
// TODO(hlundin): Consider skipping +1 in the denominator to produce a
@@ -151,41 +263,132 @@ void AudioVector::CrossFade(const AudioVector& append_this,
int alpha = 16384;
for (size_t i = 0; i < fade_length; ++i) {
alpha -= alpha_step;
- array_[position + i] = (alpha * array_[position + i] +
- (16384 - alpha) * append_this[i] + 8192) >> 14;
+ array_[(position + i) % capacity_] =
+ (alpha * array_[(position + i) % capacity_] +
+ (16384 - alpha) * append_this[i] + 8192) >> 14;
}
assert(alpha >= 0); // Verify that the slope was correct.
// Append what is left of |append_this|.
size_t samples_to_push_back = append_this.Size() - fade_length;
if (samples_to_push_back > 0)
- PushBack(&append_this[fade_length], samples_to_push_back);
+ PushBack(append_this, samples_to_push_back, fade_length);
}
// Returns the number of elements in this AudioVector.
size_t AudioVector::Size() const {
- return first_free_ix_;
+ return (end_index_ + capacity_ - begin_index_) % capacity_;
}
// Returns true if this AudioVector is empty.
bool AudioVector::Empty() const {
- return first_free_ix_ == 0;
+ return begin_index_ == end_index_;
}
const int16_t& AudioVector::operator[](size_t index) const {
- return array_[index];
+ return array_[(begin_index_ + index) % capacity_];
}
int16_t& AudioVector::operator[](size_t index) {
- return array_[index];
+ return array_[(begin_index_ + index) % capacity_];
}
void AudioVector::Reserve(size_t n) {
- if (capacity_ < n) {
- std::unique_ptr<int16_t[]> temp_array(new int16_t[n]);
- memcpy(temp_array.get(), array_.get(), Size() * sizeof(int16_t));
- array_.swap(temp_array);
- capacity_ = n;
+ if (capacity_ > n)
+ return;
+ const size_t length = Size();
+ // Reserve one more sample to remove the ambiguity between empty vector and
+ // full vector. Therefore |begin_index_| == |end_index_| indicates empty
+ // vector, and |begin_index_| == (|end_index_| + 1) % capacity indicates
+ // full vector.
+ std::unique_ptr<int16_t[]> temp_array(new int16_t[n + 1]);
+ CopyTo(length, 0, temp_array.get());
+ array_.swap(temp_array);
+ begin_index_ = 0;
+ end_index_ = length;
+ capacity_ = n + 1;
+}
+
+void AudioVector::InsertByPushBack(const int16_t* insert_this,
+ size_t length,
+ size_t position) {
+ const size_t move_chunk_length = Size() - position;
+ std::unique_ptr<int16_t[]> temp_array(nullptr);
+ if (move_chunk_length > 0) {
+ // TODO(minyue): see if it is possible to avoid copying to a buffer.
+ temp_array.reset(new int16_t[move_chunk_length]);
+ CopyTo(move_chunk_length, position, temp_array.get());
+ PopBack(move_chunk_length);
+ }
+
+ Reserve(Size() + length + move_chunk_length);
+ PushBack(insert_this, length);
+ if (move_chunk_length > 0)
+ PushBack(temp_array.get(), move_chunk_length);
+}
+
+void AudioVector::InsertByPushFront(const int16_t* insert_this,
+ size_t length,
+ size_t position) {
+ std::unique_ptr<int16_t[]> temp_array(nullptr);
+ if (position > 0) {
+ // TODO(minyue): see if it is possible to avoid copying to a buffer.
+ temp_array.reset(new int16_t[position]);
+ CopyTo(position, 0, temp_array.get());
+ PopFront(position);
+ }
+
+ Reserve(Size() + length + position);
+ PushFront(insert_this, length);
+ if (position > 0)
+ PushFront(temp_array.get(), position);
+}
+
+void AudioVector::InsertZerosByPushBack(size_t length,
+ size_t position) {
+ const size_t move_chunk_length = Size() - position;
+ std::unique_ptr<int16_t[]> temp_array(nullptr);
+ if (move_chunk_length > 0) {
+ temp_array.reset(new int16_t[move_chunk_length]);
+ CopyTo(move_chunk_length, position, temp_array.get());
+ PopBack(move_chunk_length);
}
+
+ Reserve(Size() + length + move_chunk_length);
+
+ const size_t first_zero_chunk_length =
+ std::min(length, capacity_ - end_index_);
+ memset(&array_[end_index_], 0, first_zero_chunk_length * sizeof(int16_t));
+ const size_t remaining_zero_length = length - first_zero_chunk_length;
+ if (remaining_zero_length > 0)
+ memset(array_.get(), 0, remaining_zero_length * sizeof(int16_t));
+ end_index_ = (end_index_ + length) % capacity_;
+
+ if (move_chunk_length > 0)
+ PushBack(temp_array.get(), move_chunk_length);
+}
+
+void AudioVector::InsertZerosByPushFront(size_t length,
+ size_t position) {
+ std::unique_ptr<int16_t[]> temp_array(nullptr);
+ if (position > 0) {
+ temp_array.reset(new int16_t[position]);
+ CopyTo(position, 0, temp_array.get());
+ PopFront(position);
+ }
+
+ Reserve(Size() + length + position);
+
+ const size_t first_zero_chunk_length = std::min(length, begin_index_);
+ memset(&array_[begin_index_ - first_zero_chunk_length], 0,
+ first_zero_chunk_length * sizeof(int16_t));
+ const size_t remaining_zero_length = length - first_zero_chunk_length;
+ if (remaining_zero_length > 0)
+ memset(&array_[capacity_ - remaining_zero_length], 0,
+ remaining_zero_length * sizeof(int16_t));
+ begin_index_ = (begin_index_ + capacity_ - length) % capacity_;
+
+ if (position > 0)
+ PushFront(temp_array.get(), position);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
index 15297f9bc8c..756292aa783 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
@@ -37,6 +37,9 @@ class AudioVector {
// |copy_to| will be an exact replica of this object.
virtual void CopyTo(AudioVector* copy_to) const;
+ // Copies |length| values from |position| in this vector to |copy_to|.
+ virtual void CopyTo(size_t length, size_t position, int16_t* copy_to) const;
+
// Prepends the contents of AudioVector |prepend_this| to this object. The
// length of this object is increased with the length of |prepend_this|.
virtual void PushFront(const AudioVector& prepend_this);
@@ -48,6 +51,12 @@ class AudioVector {
// Same as PushFront but will append to the end of this object.
virtual void PushBack(const AudioVector& append_this);
+ // Appends a segment of |append_this| to the end of this object. The segment
+ // starts from |position| and has |length| samples.
+ virtual void PushBack(const AudioVector& append_this,
+ size_t length,
+ size_t position);
+
// Same as PushFront but will append to the end of this object.
virtual void PushBack(const int16_t* append_this, size_t length);
@@ -71,6 +80,15 @@ class AudioVector {
// Like InsertAt, but inserts |length| zero elements at |position|.
virtual void InsertZerosAt(size_t length, size_t position);
+ // Overwrites |length| elements of this AudioVector starting from |position|
+ // with first values in |AudioVector|. The definition of |position|
+ // is the same as for InsertAt(). If |length| and |position| are selected
+ // such that the new data extends beyond the end of the current AudioVector,
+ // the vector is extended to accommodate the new data.
+ virtual void OverwriteAt(const AudioVector& insert_this,
+ size_t length,
+ size_t position);
+
// Overwrites |length| elements of this AudioVector with values taken from the
// array |insert_this|, starting at |position|. The definition of |position|
// is the same as for InsertAt(). If |length| and |position| are selected
@@ -100,11 +118,27 @@ class AudioVector {
void Reserve(size_t n);
+ void InsertByPushBack(const int16_t* insert_this, size_t length,
+ size_t position);
+
+ void InsertByPushFront(const int16_t* insert_this, size_t length,
+ size_t position);
+
+ void InsertZerosByPushBack(size_t length, size_t position);
+
+ void InsertZerosByPushFront(size_t length, size_t position);
+
std::unique_ptr<int16_t[]> array_;
- size_t first_free_ix_; // The first index after the last sample in array_.
- // Note that this index may point outside of array_.
+
size_t capacity_; // Allocated number of samples in the array.
+ // The index of the first sample in |array_|, except when
+ // |begin_index_ == end_index_|, which indicates an empty buffer.
+ size_t begin_index_;
+
+ // The index of the sample after the last sample in |array_|.
+ size_t end_index_;
+
RTC_DISALLOW_COPY_AND_ASSIGN(AudioVector);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
index 08009863455..cee7e586695 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
@@ -82,14 +82,6 @@ TEST_F(AudioVectorTest, PushBackAndCopy) {
EXPECT_TRUE(vec_copy.Empty());
}
-// Try to copy to a NULL pointer. Nothing should happen.
-TEST_F(AudioVectorTest, CopyToNull) {
- AudioVector vec;
- AudioVector* vec_copy = NULL;
- vec.PushBack(array_, array_length());
- vec.CopyTo(vec_copy);
-}
-
// Test the PushBack method with another AudioVector as input argument.
TEST_F(AudioVectorTest, PushBackVector) {
static const size_t kLength = 10;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
index 7e7a6325e97..9cfd6cb40ed 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
@@ -17,6 +17,7 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
namespace webrtc {
@@ -58,10 +59,7 @@ void BackgroundNoise::Update(const AudioMultiVector& input,
ChannelParameters& parameters = channel_parameters_[channel_ix];
int16_t temp_signal_array[kVecLen + kMaxLpcOrder] = {0};
int16_t* temp_signal = &temp_signal_array[kMaxLpcOrder];
- memcpy(temp_signal,
- &input[channel_ix][input.Size() - kVecLen],
- sizeof(int16_t) * kVecLen);
-
+ input[channel_ix].CopyTo(kVecLen, input.Size() - kVecLen, temp_signal);
int32_t sample_energy = CalculateAutoCorrelation(temp_signal, kVecLen,
auto_correlation);
@@ -169,15 +167,10 @@ int16_t BackgroundNoise::ScaleShift(size_t channel) const {
int32_t BackgroundNoise::CalculateAutoCorrelation(
const int16_t* signal, size_t length, int32_t* auto_correlation) const {
- int16_t signal_max = WebRtcSpl_MaxAbsValueW16(signal, length);
- int correlation_scale = kLogVecLen -
- WebRtcSpl_NormW32(signal_max * signal_max);
- correlation_scale = std::max(0, correlation_scale);
-
static const int kCorrelationStep = -1;
- WebRtcSpl_CrossCorrelation(auto_correlation, signal, signal, length,
- kMaxLpcOrder + 1, correlation_scale,
- kCorrelationStep);
+ const int correlation_scale =
+ CrossCorrelationWithAutoShift(signal, signal, length, kMaxLpcOrder + 1,
+ kCorrelationStep, auto_correlation);
// Number of shifts to normalize energy to energy/sample.
int energy_sample_shift = kLogVecLen - correlation_scale;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
index a5b08469bea..90b02daf712 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
@@ -14,7 +14,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
@@ -23,31 +22,23 @@ namespace webrtc {
void ComfortNoise::Reset() {
first_call_ = true;
- internal_error_code_ = 0;
}
int ComfortNoise::UpdateParameters(Packet* packet) {
assert(packet); // Existence is verified by caller.
// Get comfort noise decoder.
- AudioDecoder* cng_decoder = decoder_database_->GetDecoder(
- packet->header.payloadType);
- if (!cng_decoder) {
+ if (decoder_database_->SetActiveCngDecoder(packet->header.payloadType)
+ != kOK) {
delete [] packet->payload;
delete packet;
return kUnknownPayloadType;
}
- decoder_database_->SetActiveCngDecoder(packet->header.payloadType);
- CNG_dec_inst* cng_inst = cng_decoder->CngDecoderInstance();
- int16_t ret = WebRtcCng_UpdateSid(cng_inst,
- packet->payload,
- packet->payload_length);
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ RTC_DCHECK(cng_decoder);
+ cng_decoder->UpdateSid(rtc::ArrayView<const uint8_t>(
+ packet->payload, packet->payload_length));
delete [] packet->payload;
delete packet;
- if (ret < 0) {
- internal_error_code_ = WebRtcCng_GetErrorCodeDec(cng_inst);
- LOG(LS_ERROR) << "WebRtcCng_UpdateSid produced " << internal_error_code_;
- return kInternalError;
- }
return kOK;
}
@@ -63,30 +54,31 @@ int ComfortNoise::Generate(size_t requested_length,
}
size_t number_of_samples = requested_length;
- int16_t new_period = 0;
+ bool new_period = false;
if (first_call_) {
// Generate noise and overlap slightly with old data.
number_of_samples = requested_length + overlap_length_;
- new_period = 1;
+ new_period = true;
}
output->AssertSize(number_of_samples);
// Get the decoder from the database.
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
if (!cng_decoder) {
LOG(LS_ERROR) << "Unknwown payload type";
return kUnknownPayloadType;
}
- CNG_dec_inst* cng_inst = cng_decoder->CngDecoderInstance();
- // The expression &(*output)[0][0] is a pointer to the first element in
- // the first channel.
- if (WebRtcCng_Generate(cng_inst, &(*output)[0][0], number_of_samples,
- new_period) < 0) {
+
+ std::unique_ptr<int16_t[]> temp(new int16_t[number_of_samples]);
+ if (!cng_decoder->Generate(
+ rtc::ArrayView<int16_t>(temp.get(), number_of_samples),
+ new_period)) {
// Error returned.
output->Zeros(requested_length);
- internal_error_code_ = WebRtcCng_GetErrorCodeDec(cng_inst);
- LOG(LS_ERROR) << "WebRtcCng_Generate produced " << internal_error_code_;
+ LOG(LS_ERROR) <<
+ "ComfortNoiseDecoder::Genererate failed to generate comfort noise";
return kInternalError;
}
+ (*output)[0].OverwriteAt(temp.get(), number_of_samples, 0);
if (first_call_) {
// Set tapering window parameters. Values are in Q15.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
index 1fc22586637..f877bf63efb 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
@@ -38,8 +38,7 @@ class ComfortNoise {
first_call_(true),
overlap_length_(5 * fs_hz_ / 8000),
decoder_database_(decoder_database),
- sync_buffer_(sync_buffer),
- internal_error_code_(0) {
+ sync_buffer_(sync_buffer) {
}
// Resets the state. Should be called before each new comfort noise period.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.cc
new file mode 100644
index 00000000000..ad89ab8a139
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
+
+#include <cstdlib>
+#include <limits>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+
+namespace webrtc {
+
+// This function decides the overflow-protecting scaling and calls
+// WebRtcSpl_CrossCorrelation.
+int CrossCorrelationWithAutoShift(const int16_t* sequence_1,
+ const int16_t* sequence_2,
+ size_t sequence_1_length,
+ size_t cross_correlation_length,
+ int cross_correlation_step,
+ int32_t* cross_correlation) {
+ // Find the maximum absolute value of sequence_1 and 2.
+ const int16_t max_1 = WebRtcSpl_MaxAbsValueW16(sequence_1, sequence_1_length);
+ const int sequence_2_shift =
+ cross_correlation_step * (static_cast<int>(cross_correlation_length) - 1);
+ const int16_t* sequence_2_start =
+ sequence_2_shift >= 0 ? sequence_2 : sequence_2 + sequence_2_shift;
+ const size_t sequence_2_length =
+ sequence_1_length + std::abs(sequence_2_shift);
+ const int16_t max_2 =
+ WebRtcSpl_MaxAbsValueW16(sequence_2_start, sequence_2_length);
+
+ // In order to avoid overflow when computing the sum we should scale the
+ // samples so that (in_vector_length * max_1 * max_2) will not overflow.
+ // Expected scaling fulfills
+ // 1) sufficient:
+ // sequence_1_length * (max_1 * max_2 >> scaling) <= 0x7fffffff;
+ // 2) necessary:
+ // if (scaling > 0)
+ // sequence_1_length * (max_1 * max_2 >> (scaling - 1)) > 0x7fffffff;
+ // The following calculation fulfills 1) and almost fulfills 2).
+ // There are some corner cases that 2) is not satisfied, e.g.,
+ // max_1 = 17, max_2 = 30848, sequence_1_length = 4095, in such case,
+ // optimal scaling is 0, while the following calculation results in 1.
+ const int32_t factor = (max_1 * max_2) / (std::numeric_limits<int32_t>::max()
+ / static_cast<int32_t>(sequence_1_length));
+ const int scaling = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor);
+
+ WebRtcSpl_CrossCorrelation(cross_correlation, sequence_1, sequence_2,
+ sequence_1_length, cross_correlation_length,
+ scaling, cross_correlation_step);
+
+ return scaling;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.h
new file mode 100644
index 00000000000..db14141027c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/cross_correlation.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_CROSS_CORRELATION_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_CROSS_CORRELATION_H_
+
+#include "webrtc/common_types.h"
+
+namespace webrtc {
+
+// The function calculates the cross-correlation between two sequences
+// |sequence_1| and |sequence_2|. |sequence_1| is taken as reference, with
+// |sequence_1_length| as its length. |sequence_2| slides for the calculation of
+// cross-correlation. The result will be saved in |cross_correlation|.
+// |cross_correlation_length| correlation points are calculated.
+// The corresponding lag starts from 0, and increases with a step of
+// |cross_correlation_step|. The result is without normalization. To avoid
+// overflow, the result will be right shifted. The amount of shifts will be
+// returned.
+//
+// Input:
+// - sequence_1 : First sequence (reference).
+// - sequence_2 : Second sequence (sliding during calculation).
+// - sequence_1_length : Length of |sequence_1|.
+// - cross_correlation_length : Number of cross-correlations to calculate.
+// - cross_correlation_step : Step in the lag for the cross-correlation.
+//
+// Output:
+// - cross_correlation : The cross-correlation in Q(-right_shifts)
+//
+// Return:
+// Number of right shifts in cross_correlation.
+
+int CrossCorrelationWithAutoShift(const int16_t* sequence_1,
+ const int16_t* sequence_2,
+ size_t sequence_1_length,
+ size_t cross_correlation_length,
+ int cross_correlation_step,
+ int32_t* cross_correlation);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_CROSS_CORRELATION_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
index 39bb4662c71..545d1d62455 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
@@ -29,26 +29,19 @@ DecisionLogic* DecisionLogic::Create(int fs_hz,
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter) {
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer) {
switch (playout_mode) {
case kPlayoutOn:
case kPlayoutStreaming:
- return new DecisionLogicNormal(fs_hz,
- output_size_samples,
- playout_mode,
- decoder_database,
- packet_buffer,
- delay_manager,
- buffer_level_filter);
+ return new DecisionLogicNormal(
+ fs_hz, output_size_samples, playout_mode, decoder_database,
+ packet_buffer, delay_manager, buffer_level_filter, tick_timer);
case kPlayoutFax:
case kPlayoutOff:
- return new DecisionLogicFax(fs_hz,
- output_size_samples,
- playout_mode,
- decoder_database,
- packet_buffer,
- delay_manager,
- buffer_level_filter);
+ return new DecisionLogicFax(
+ fs_hz, output_size_samples, playout_mode, decoder_database,
+ packet_buffer, delay_manager, buffer_level_filter, tick_timer);
}
// This line cannot be reached, but must be here to avoid compiler errors.
assert(false);
@@ -61,30 +54,34 @@ DecisionLogic::DecisionLogic(int fs_hz,
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer)
: decoder_database_(decoder_database),
packet_buffer_(packet_buffer),
delay_manager_(delay_manager),
buffer_level_filter_(buffer_level_filter),
+ tick_timer_(tick_timer),
cng_state_(kCngOff),
- generated_noise_samples_(0),
packet_length_samples_(0),
sample_memory_(0),
prev_time_scale_(false),
- timescale_hold_off_(kMinTimescaleInterval),
+ timescale_countdown_(
+ tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1)),
num_consecutive_expands_(0),
playout_mode_(playout_mode) {
delay_manager_->set_streaming_mode(playout_mode_ == kPlayoutStreaming);
SetSampleRate(fs_hz, output_size_samples);
}
+DecisionLogic::~DecisionLogic() = default;
+
void DecisionLogic::Reset() {
cng_state_ = kCngOff;
- generated_noise_samples_ = 0;
+ noise_fast_forward_ = 0;
packet_length_samples_ = 0;
sample_memory_ = 0;
prev_time_scale_ = false;
- timescale_hold_off_ = 0;
+ timescale_countdown_.reset();
num_consecutive_expands_ = 0;
}
@@ -92,7 +89,8 @@ void DecisionLogic::SoftReset() {
packet_length_samples_ = 0;
sample_memory_ = 0;
prev_time_scale_ = false;
- timescale_hold_off_ = kMinTimescaleInterval;
+ timescale_countdown_ =
+ tick_timer_->GetNewCountdown(kMinTimescaleInterval + 1);
}
void DecisionLogic::SetSampleRate(int fs_hz, size_t output_size_samples) {
@@ -107,15 +105,15 @@ Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer,
size_t decoder_frame_length,
const RTPHeader* packet_header,
Modes prev_mode,
- bool play_dtmf, bool* reset_decoder) {
+ bool play_dtmf,
+ size_t generated_noise_samples,
+ bool* reset_decoder) {
if (prev_mode == kModeRfc3389Cng ||
prev_mode == kModeCodecInternalCng ||
prev_mode == kModeExpand) {
// If last mode was CNG (or Expand, since this could be covering up for
- // a lost CNG packet), increase the |generated_noise_samples_| counter.
- generated_noise_samples_ += output_size_samples_;
- // Remember that CNG is on. This is needed if comfort noise is interrupted
- // by DTMF.
+ // a lost CNG packet), remember that CNG is on. This is needed if comfort
+ // noise is interrupted by DTMF.
if (prev_mode == kModeRfc3389Cng) {
cng_state_ = kCngRfc3389On;
} else if (prev_mode == kModeCodecInternalCng) {
@@ -139,7 +137,7 @@ Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer,
return GetDecisionSpecialized(sync_buffer, expand, decoder_frame_length,
packet_header, prev_mode, play_dtmf,
- reset_decoder);
+ reset_decoder, generated_noise_samples);
}
void DecisionLogic::ExpandDecision(Operations operation) {
@@ -152,10 +150,6 @@ void DecisionLogic::ExpandDecision(Operations operation) {
void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples,
Modes prev_mode) {
- const int elapsed_time_ms =
- static_cast<int>(output_size_samples_ / (8 * fs_mult_));
- delay_manager_->UpdateCounters(elapsed_time_ms);
-
// Do not update buffer history if currently playing CNG since it will bias
// the filtered buffer level.
if ((prev_mode != kModeRfc3389Cng) && (prev_mode != kModeCodecInternalCng)) {
@@ -170,14 +164,13 @@ void DecisionLogic::FilterBufferLevel(size_t buffer_size_samples,
int sample_memory_local = 0;
if (prev_time_scale_) {
sample_memory_local = sample_memory_;
- timescale_hold_off_ = kMinTimescaleInterval;
+ timescale_countdown_ =
+ tick_timer_->GetNewCountdown(kMinTimescaleInterval);
}
buffer_level_filter_->Update(buffer_size_packets, sample_memory_local,
packet_length_samples_);
prev_time_scale_ = false;
}
-
- timescale_hold_off_ = std::max(timescale_hold_off_ - 1, 0);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
index 72121b7aac5..008655d1a2b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
@@ -14,6 +14,7 @@
#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/neteq/defines.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -39,7 +40,8 @@ class DecisionLogic {
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter);
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer);
// Constructor.
DecisionLogic(int fs_hz,
@@ -48,10 +50,10 @@ class DecisionLogic {
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter);
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer);
- // Destructor.
- virtual ~DecisionLogic() {}
+ virtual ~DecisionLogic();
// Resets object to a clean state.
void Reset();
@@ -79,6 +81,7 @@ class DecisionLogic {
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
+ size_t generated_noise_samples,
bool* reset_decoder);
// These methods test the |cng_state_| for different conditions.
@@ -101,10 +104,7 @@ class DecisionLogic {
// Accessors and mutators.
void set_sample_memory(int32_t value) { sample_memory_ = value; }
- size_t generated_noise_samples() const { return generated_noise_samples_; }
- void set_generated_noise_samples(size_t value) {
- generated_noise_samples_ = value;
- }
+ size_t noise_fast_forward() const { return noise_fast_forward_; }
size_t packet_length_samples() const { return packet_length_samples_; }
void set_packet_length_samples(size_t value) {
packet_length_samples_ = value;
@@ -113,8 +113,8 @@ class DecisionLogic {
NetEqPlayoutMode playout_mode() const { return playout_mode_; }
protected:
- // The value 6 sets maximum time-stretch rate to about 100 ms/s.
- static const int kMinTimescaleInterval = 6;
+ // The value 5 sets maximum time-stretch rate to about 100 ms/s.
+ static const int kMinTimescaleInterval = 5;
enum CngState {
kCngOff,
@@ -138,7 +138,8 @@ class DecisionLogic {
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) = 0;
+ bool* reset_decoder,
+ size_t generated_noise_samples) = 0;
// Updates the |buffer_level_filter_| with the current buffer level
// |buffer_size_packets|.
@@ -148,15 +149,16 @@ class DecisionLogic {
const PacketBuffer& packet_buffer_;
DelayManager* delay_manager_;
BufferLevelFilter* buffer_level_filter_;
+ const TickTimer* tick_timer_;
int fs_mult_;
size_t output_size_samples_;
CngState cng_state_; // Remember if comfort noise is interrupted by other
// event (e.g., DTMF).
- size_t generated_noise_samples_;
+ size_t noise_fast_forward_ = 0;
size_t packet_length_samples_;
int sample_memory_;
bool prev_time_scale_;
- int timescale_hold_off_;
+ std::unique_ptr<TickTimer::Countdown> timescale_countdown_;
int num_consecutive_expands_;
const NetEqPlayoutMode playout_mode_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
index ddea64425f2..aace402a7de 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
@@ -26,7 +26,8 @@ Operations DecisionLogicFax::GetDecisionSpecialized(
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) {
+ bool* reset_decoder,
+ size_t generated_noise_samples) {
assert(playout_mode_ == kPlayoutFax || playout_mode_ == kPlayoutOff);
uint32_t target_timestamp = sync_buffer.end_timestamp();
uint32_t available_timestamp = 0;
@@ -37,7 +38,7 @@ Operations DecisionLogicFax::GetDecisionSpecialized(
decoder_database_->IsComfortNoise(packet_header->payloadType);
}
if (is_cng_packet) {
- if (static_cast<int32_t>((generated_noise_samples_ + target_timestamp)
+ if (static_cast<int32_t>((generated_noise_samples + target_timestamp)
- available_timestamp) >= 0) {
// Time to play this packet now.
return kRfc3389Cng;
@@ -70,13 +71,13 @@ Operations DecisionLogicFax::GetDecisionSpecialized(
} else if (target_timestamp == available_timestamp) {
return kNormal;
} else {
- if (static_cast<int32_t>((generated_noise_samples_ + target_timestamp)
+ if (static_cast<int32_t>((generated_noise_samples + target_timestamp)
- available_timestamp) >= 0) {
return kNormal;
} else {
// If currently playing comfort noise, continue with that. Do not
- // increase the timestamp counter since generated_noise_samples_ will
- // be increased.
+ // increase the timestamp counter since generated_noise_stopwatch_ in
+ // NetEqImpl will take care of the time-keeping.
if (cng_state_ == kCngRfc3389On) {
return kRfc3389CngNoPacket;
} else if (cng_state_ == kCngInternalOn) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
index 204dcc168a3..6958f908b1c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
@@ -28,11 +28,16 @@ class DecisionLogicFax : public DecisionLogic {
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
- : DecisionLogic(fs_hz, output_size_samples, playout_mode,
- decoder_database, packet_buffer, delay_manager,
- buffer_level_filter) {
- }
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer)
+ : DecisionLogic(fs_hz,
+ output_size_samples,
+ playout_mode,
+ decoder_database,
+ packet_buffer,
+ delay_manager,
+ buffer_level_filter,
+ tick_timer) {}
protected:
// Returns the operation that should be done next. |sync_buffer| and |expand|
@@ -50,7 +55,8 @@ class DecisionLogicFax : public DecisionLogic {
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) override;
+ bool* reset_decoder,
+ size_t generated_noise_samples) override;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(DecisionLogicFax);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
index 0252d1cdfaf..37a75d7f5ad 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
@@ -31,7 +31,8 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) {
+ bool* reset_decoder,
+ size_t generated_noise_samples) {
assert(playout_mode_ == kPlayoutOn || playout_mode_ == kPlayoutStreaming);
// Guard for errors, to avoid getting stuck in error mode.
if (prev_mode == kModeError) {
@@ -52,7 +53,8 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
}
if (is_cng_packet) {
- return CngOperation(prev_mode, target_timestamp, available_timestamp);
+ return CngOperation(prev_mode, target_timestamp, available_timestamp,
+ generated_noise_samples);
}
// Handle the case with no packet at all available (except maybe DTMF).
@@ -76,7 +78,8 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
available_timestamp, target_timestamp, five_seconds_samples)) {
return FuturePacketAvailable(sync_buffer, expand, decoder_frame_length,
prev_mode, target_timestamp,
- available_timestamp, play_dtmf);
+ available_timestamp, play_dtmf,
+ generated_noise_samples);
} else {
// This implies that available_timestamp < target_timestamp, which can
// happen when a new stream or codec is received. Signal for a reset.
@@ -86,10 +89,11 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
Operations DecisionLogicNormal::CngOperation(Modes prev_mode,
uint32_t target_timestamp,
- uint32_t available_timestamp) {
+ uint32_t available_timestamp,
+ size_t generated_noise_samples) {
// Signed difference between target and available timestamp.
int32_t timestamp_diff = static_cast<int32_t>(
- static_cast<uint32_t>(generated_noise_samples_ + target_timestamp) -
+ static_cast<uint32_t>(generated_noise_samples + target_timestamp) -
available_timestamp);
int32_t optimal_level_samp = static_cast<int32_t>(
(delay_manager_->TargetLevel() * packet_length_samples_) >> 8);
@@ -97,9 +101,9 @@ Operations DecisionLogicNormal::CngOperation(Modes prev_mode,
if (excess_waiting_time_samp > optimal_level_samp / 2) {
// The waiting time for this packet will be longer than 1.5
- // times the wanted buffer delay. Advance the clock to cut
+ // times the wanted buffer delay. Apply fast-forward to cut the
// waiting time down to the optimal.
- generated_noise_samples_ += excess_waiting_time_samp;
+ noise_fast_forward_ += excess_waiting_time_samp;
timestamp_diff += excess_waiting_time_samp;
}
@@ -109,6 +113,7 @@ Operations DecisionLogicNormal::CngOperation(Modes prev_mode,
return kRfc3389CngNoPacket;
} else {
// Otherwise, go for the CNG packet now.
+ noise_fast_forward_ = 0;
return kRfc3389Cng;
}
}
@@ -153,7 +158,8 @@ Operations DecisionLogicNormal::FuturePacketAvailable(
Modes prev_mode,
uint32_t target_timestamp,
uint32_t available_timestamp,
- bool play_dtmf) {
+ bool play_dtmf,
+ size_t generated_noise_samples) {
// Required packet is not available, but a future packet is.
// Check if we should continue with an ongoing expand because the new packet
// is too far into the future.
@@ -184,7 +190,7 @@ Operations DecisionLogicNormal::FuturePacketAvailable(
// safety precaution), but make sure that the number of samples in buffer
// is no higher than 4 times the optimal level. (Note that TargetLevel()
// is in Q8.)
- if (static_cast<uint32_t>(generated_noise_samples_ + target_timestamp) >=
+ if (static_cast<uint32_t>(generated_noise_samples + target_timestamp) >=
available_timestamp ||
cur_size_samples >
((delay_manager_->TargetLevel() * packet_length_samples_) >> 8) *
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
index 7465906a381..aa0edf3152a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
@@ -28,11 +28,16 @@ class DecisionLogicNormal : public DecisionLogic {
DecoderDatabase* decoder_database,
const PacketBuffer& packet_buffer,
DelayManager* delay_manager,
- BufferLevelFilter* buffer_level_filter)
- : DecisionLogic(fs_hz, output_size_samples, playout_mode,
- decoder_database, packet_buffer, delay_manager,
- buffer_level_filter) {
- }
+ BufferLevelFilter* buffer_level_filter,
+ const TickTimer* tick_timer)
+ : DecisionLogic(fs_hz,
+ output_size_samples,
+ playout_mode,
+ decoder_database,
+ packet_buffer,
+ delay_manager,
+ buffer_level_filter,
+ tick_timer) {}
protected:
static const int kAllowMergeWithoutExpandMs = 20; // 20 ms.
@@ -54,7 +59,8 @@ class DecisionLogicNormal : public DecisionLogic {
const RTPHeader* packet_header,
Modes prev_mode,
bool play_dtmf,
- bool* reset_decoder) override;
+ bool* reset_decoder,
+ size_t generated_noise_samples) override;
// Returns the operation to do given that the expected packet is not
// available, but a packet further into the future is at hand.
@@ -65,7 +71,8 @@ class DecisionLogicNormal : public DecisionLogic {
Modes prev_mode,
uint32_t target_timestamp,
uint32_t available_timestamp,
- bool play_dtmf);
+ bool play_dtmf,
+ size_t generated_noise_samples);
// Returns the operation to do given that the expected packet is available.
virtual Operations ExpectedPacketAvailable(Modes prev_mode, bool play_dtmf);
@@ -77,12 +84,16 @@ class DecisionLogicNormal : public DecisionLogic {
private:
// Returns the operation given that the next available packet is a comfort
// noise payload (RFC 3389 only, not codec-internal).
- Operations CngOperation(Modes prev_mode, uint32_t target_timestamp,
- uint32_t available_timestamp);
+ Operations CngOperation(Modes prev_mode,
+ uint32_t target_timestamp,
+ uint32_t available_timestamp,
+ size_t generated_noise_samples);
// Checks if enough time has elapsed since the last successful timescale
// operation was done (i.e., accelerate or preemptive expand).
- bool TimescaleAllowed() const { return timescale_hold_off_ == 0; }
+ bool TimescaleAllowed() const {
+ return !timescale_countdown_ || timescale_countdown_->Finished();
+ }
// Checks if the current (filtered) buffer level is under the target level.
bool UnderTargetLevel() const;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
index 499f9464347..ebb366890b3 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
@@ -11,45 +11,42 @@
// Unit tests for DecisionLogic class and derived classes.
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h"
#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
namespace webrtc {
TEST(DecisionLogic, CreateAndDestroy) {
int fs_hz = 8000;
int output_size_samples = fs_hz / 100; // Samples per 10 ms.
- DecoderDatabase decoder_database;
- PacketBuffer packet_buffer(10);
- DelayPeakDetector delay_peak_detector;
- DelayManager delay_manager(240, &delay_peak_detector);
+ DecoderDatabase decoder_database(
+ std::unique_ptr<MockAudioDecoderFactory>(new MockAudioDecoderFactory));
+ TickTimer tick_timer;
+ PacketBuffer packet_buffer(10, &tick_timer);
+ DelayPeakDetector delay_peak_detector(&tick_timer);
+ DelayManager delay_manager(240, &delay_peak_detector, &tick_timer);
BufferLevelFilter buffer_level_filter;
- DecisionLogic* logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutOn, &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
+ DecisionLogic* logic = DecisionLogic::Create(
+ fs_hz, output_size_samples, kPlayoutOn, &decoder_database, packet_buffer,
+ &delay_manager, &buffer_level_filter, &tick_timer);
delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutStreaming,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
+ logic = DecisionLogic::Create(
+ fs_hz, output_size_samples, kPlayoutStreaming, &decoder_database,
+ packet_buffer, &delay_manager, &buffer_level_filter, &tick_timer);
delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutFax,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
+ logic = DecisionLogic::Create(
+ fs_hz, output_size_samples, kPlayoutFax, &decoder_database, packet_buffer,
+ &delay_manager, &buffer_level_filter, &tick_timer);
delete logic;
- logic = DecisionLogic::Create(fs_hz, output_size_samples,
- kPlayoutOff,
- &decoder_database,
- packet_buffer, &delay_manager,
- &buffer_level_filter);
+ logic = DecisionLogic::Create(
+ fs_hz, output_size_samples, kPlayoutOff, &decoder_database, packet_buffer,
+ &delay_manager, &buffer_level_filter, &tick_timer);
delete logic;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
index 92d4bab1e4a..4fddf75ce26 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
@@ -19,13 +19,39 @@
namespace webrtc {
-DecoderDatabase::DecoderDatabase()
- : active_decoder_(-1), active_cng_decoder_(-1) {}
+DecoderDatabase::DecoderDatabase(
+ std::unique_ptr<AudioDecoderFactory> decoder_factory)
+ : active_decoder_type_(-1),
+ active_cng_decoder_type_(-1),
+ decoder_factory_(std::move(decoder_factory)) {}
-DecoderDatabase::~DecoderDatabase() {}
+DecoderDatabase::~DecoderDatabase() = default;
-DecoderDatabase::DecoderInfo::~DecoderInfo() {
- if (!external) delete decoder;
+DecoderDatabase::DecoderInfo::DecoderInfo(NetEqDecoder ct,
+ const std::string& nm,
+ int fs,
+ AudioDecoder* ext_dec)
+ : codec_type(ct),
+ name(nm),
+ fs_hz(fs),
+ external_decoder(ext_dec),
+ audio_format_(acm2::RentACodec::NetEqDecoderToSdpAudioFormat(ct)) {}
+
+DecoderDatabase::DecoderInfo::DecoderInfo(DecoderInfo&&) = default;
+DecoderDatabase::DecoderInfo::~DecoderInfo() = default;
+
+AudioDecoder* DecoderDatabase::DecoderInfo::GetDecoder(
+ AudioDecoderFactory* factory) {
+ if (external_decoder) {
+ RTC_DCHECK(!decoder_);
+ return external_decoder;
+ }
+ RTC_DCHECK(audio_format_);
+ if (!decoder_) {
+ decoder_ = factory->MakeAudioDecoder(*audio_format_);
+ }
+ RTC_DCHECK(decoder_) << "Failed to create: " << *audio_format_;
+ return decoder_.get();
}
bool DecoderDatabase::Empty() const { return decoders_.empty(); }
@@ -34,8 +60,8 @@ int DecoderDatabase::Size() const { return static_cast<int>(decoders_.size()); }
void DecoderDatabase::Reset() {
decoders_.clear();
- active_decoder_ = -1;
- active_cng_decoder_ = -1;
+ active_decoder_type_ = -1;
+ active_cng_decoder_type_ = -1;
}
int DecoderDatabase::RegisterPayload(uint8_t rtp_payload_type,
@@ -48,8 +74,9 @@ int DecoderDatabase::RegisterPayload(uint8_t rtp_payload_type,
return kCodecNotSupported;
}
const int fs_hz = CodecSampleRateHz(codec_type);
- DecoderInfo info(codec_type, name, fs_hz, NULL, false);
- auto ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
+ DecoderInfo info(codec_type, name, fs_hz, nullptr);
+ auto ret =
+ decoders_.insert(std::make_pair(rtp_payload_type, std::move(info)));
if (ret.second == false) {
// Database already contains a decoder with type |rtp_payload_type|.
return kDecoderExists;
@@ -75,8 +102,8 @@ int DecoderDatabase::InsertExternal(uint8_t rtp_payload_type,
return kInvalidPointer;
}
std::pair<DecoderMap::iterator, bool> ret;
- DecoderInfo info(codec_type, codec_name, fs_hz, decoder, true);
- ret = decoders_.insert(std::make_pair(rtp_payload_type, info));
+ DecoderInfo info(codec_type, codec_name, fs_hz, decoder);
+ ret = decoders_.insert(std::make_pair(rtp_payload_type, std::move(info)));
if (ret.second == false) {
// Database already contains a decoder with type |rtp_payload_type|.
return kDecoderExists;
@@ -89,11 +116,11 @@ int DecoderDatabase::Remove(uint8_t rtp_payload_type) {
// No decoder with that |rtp_payload_type|.
return kDecoderNotFound;
}
- if (active_decoder_ == rtp_payload_type) {
- active_decoder_ = -1; // No active decoder.
+ if (active_decoder_type_ == rtp_payload_type) {
+ active_decoder_type_ = -1; // No active decoder.
}
- if (active_cng_decoder_ == rtp_payload_type) {
- active_cng_decoder_ = -1; // No active CNG decoder.
+ if (active_cng_decoder_type_ == rtp_payload_type) {
+ active_cng_decoder_type_ = -1; // No active CNG decoder.
}
return kOK;
}
@@ -122,7 +149,8 @@ uint8_t DecoderDatabase::GetRtpPayloadType(
}
AudioDecoder* DecoderDatabase::GetDecoder(uint8_t rtp_payload_type) {
- if (IsDtmf(rtp_payload_type) || IsRed(rtp_payload_type)) {
+ if (IsDtmf(rtp_payload_type) || IsRed(rtp_payload_type) ||
+ IsComfortNoise(rtp_payload_type)) {
// These are not real decoders.
return NULL;
}
@@ -132,13 +160,7 @@ AudioDecoder* DecoderDatabase::GetDecoder(uint8_t rtp_payload_type) {
return NULL;
}
DecoderInfo* info = &(*it).second;
- if (!info->decoder) {
- // Create the decoder object.
- AudioDecoder* decoder = CreateAudioDecoder(info->codec_type);
- assert(decoder); // Should not be able to have an unsupported codec here.
- info->decoder = decoder;
- }
- return info->decoder;
+ return info->GetDecoder(decoder_factory_.get());
}
bool DecoderDatabase::IsType(uint8_t rtp_payload_type,
@@ -152,14 +174,16 @@ bool DecoderDatabase::IsType(uint8_t rtp_payload_type,
}
bool DecoderDatabase::IsComfortNoise(uint8_t rtp_payload_type) const {
- if (IsType(rtp_payload_type, NetEqDecoder::kDecoderCNGnb) ||
- IsType(rtp_payload_type, NetEqDecoder::kDecoderCNGwb) ||
- IsType(rtp_payload_type, NetEqDecoder::kDecoderCNGswb32kHz) ||
- IsType(rtp_payload_type, NetEqDecoder::kDecoderCNGswb48kHz)) {
- return true;
- } else {
+ DecoderMap::const_iterator it = decoders_.find(rtp_payload_type);
+ if (it == decoders_.end()) {
+ // Decoder not found.
return false;
}
+ const auto& type = it->second.codec_type;
+ return type == NetEqDecoder::kDecoderCNGnb
+ || type == NetEqDecoder::kDecoderCNGwb
+ || type == NetEqDecoder::kDecoderCNGswb32kHz
+ || type == NetEqDecoder::kDecoderCNGswb48kHz;
}
bool DecoderDatabase::IsDtmf(uint8_t rtp_payload_type) const {
@@ -178,37 +202,33 @@ int DecoderDatabase::SetActiveDecoder(uint8_t rtp_payload_type,
// Decoder not found.
return kDecoderNotFound;
}
+ RTC_CHECK(!IsComfortNoise(rtp_payload_type));
assert(new_decoder);
*new_decoder = false;
- if (active_decoder_ < 0) {
+ if (active_decoder_type_ < 0) {
// This is the first active decoder.
*new_decoder = true;
- } else if (active_decoder_ != rtp_payload_type) {
+ } else if (active_decoder_type_ != rtp_payload_type) {
// Moving from one active decoder to another. Delete the first one.
- DecoderMap::iterator it = decoders_.find(active_decoder_);
+ DecoderMap::iterator it = decoders_.find(active_decoder_type_);
if (it == decoders_.end()) {
// Decoder not found. This should not be possible.
assert(false);
return kDecoderNotFound;
}
- if (!(*it).second.external) {
- // Delete the AudioDecoder object, unless it is an externally created
- // decoder.
- delete (*it).second.decoder;
- (*it).second.decoder = NULL;
- }
+ it->second.DropDecoder();
*new_decoder = true;
}
- active_decoder_ = rtp_payload_type;
+ active_decoder_type_ = rtp_payload_type;
return kOK;
}
AudioDecoder* DecoderDatabase::GetActiveDecoder() {
- if (active_decoder_ < 0) {
+ if (active_decoder_type_ < 0) {
// No active decoder.
return NULL;
}
- return GetDecoder(active_decoder_);
+ return GetDecoder(active_decoder_type_);
}
int DecoderDatabase::SetActiveCngDecoder(uint8_t rtp_payload_type) {
@@ -218,31 +238,32 @@ int DecoderDatabase::SetActiveCngDecoder(uint8_t rtp_payload_type) {
// Decoder not found.
return kDecoderNotFound;
}
- if (active_cng_decoder_ >= 0 && active_cng_decoder_ != rtp_payload_type) {
+ if (active_cng_decoder_type_ >= 0 &&
+ active_cng_decoder_type_ != rtp_payload_type) {
// Moving from one active CNG decoder to another. Delete the first one.
- DecoderMap::iterator it = decoders_.find(active_cng_decoder_);
+ DecoderMap::iterator it = decoders_.find(active_cng_decoder_type_);
if (it == decoders_.end()) {
// Decoder not found. This should not be possible.
assert(false);
return kDecoderNotFound;
}
- if (!(*it).second.external) {
- // Delete the AudioDecoder object, unless it is an externally created
- // decoder.
- delete (*it).second.decoder;
- (*it).second.decoder = NULL;
- }
+ // The CNG decoder should never be provided externally.
+ RTC_CHECK(!it->second.external_decoder);
+ active_cng_decoder_.reset();
}
- active_cng_decoder_ = rtp_payload_type;
+ active_cng_decoder_type_ = rtp_payload_type;
return kOK;
}
-AudioDecoder* DecoderDatabase::GetActiveCngDecoder() {
- if (active_cng_decoder_ < 0) {
+ComfortNoiseDecoder* DecoderDatabase::GetActiveCngDecoder() {
+ if (active_cng_decoder_type_ < 0) {
// No active CNG decoder.
return NULL;
}
- return GetDecoder(active_cng_decoder_);
+ if (!active_cng_decoder_) {
+ active_cng_decoder_.reset(new ComfortNoiseDecoder);
+ }
+ return active_cng_decoder_.get();
}
int DecoderDatabase::CheckPayloadTypes(const PacketList& packet_list) const {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
index 01ff0c9fdb3..3a40e08c8a6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
@@ -12,10 +12,14 @@
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
#include <map>
+#include <memory>
#include <string>
#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h" // NULL
+#include "webrtc/modules/audio_coding/codecs/audio_decoder_factory.h"
+#include "webrtc/modules/audio_coding/codecs/audio_format.h"
+#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
#include "webrtc/modules/audio_coding/neteq/packet.h"
#include "webrtc/typedefs.h"
@@ -34,37 +38,38 @@ class DecoderDatabase {
kInvalidPointer = -6
};
- // Struct used to store decoder info in the database.
- struct DecoderInfo {
- DecoderInfo() = default;
- DecoderInfo(NetEqDecoder ct, int fs, AudioDecoder* dec, bool ext)
- : DecoderInfo(ct, "", fs, dec, ext) {}
+ // Class that stores decoder info in the database.
+ class DecoderInfo {
+ public:
DecoderInfo(NetEqDecoder ct,
const std::string& nm,
int fs,
- AudioDecoder* dec,
- bool ext)
- : codec_type(ct),
- name(nm),
- fs_hz(fs),
- rtp_sample_rate_hz(fs),
- decoder(dec),
- external(ext) {}
+ AudioDecoder* ext_dec);
+ DecoderInfo(DecoderInfo&&);
~DecoderInfo();
- NetEqDecoder codec_type = NetEqDecoder::kDecoderArbitrary;
- std::string name;
- int fs_hz = 8000;
- int rtp_sample_rate_hz = 8000;
- AudioDecoder* decoder = nullptr;
- bool external = false;
+ // Get the AudioDecoder object, creating it first if necessary.
+ AudioDecoder* GetDecoder(AudioDecoderFactory* factory);
+
+ // Delete the AudioDecoder object, unless it's external. (This means we can
+ // always recreate it later if we need it.)
+ void DropDecoder() { decoder_.reset(); }
+
+ const NetEqDecoder codec_type;
+ const std::string name;
+ const int fs_hz;
+ AudioDecoder* const external_decoder;
+
+ private:
+ const rtc::Optional<SdpAudioFormat> audio_format_;
+ std::unique_ptr<AudioDecoder> decoder_;
};
// Maximum value for 8 bits, and an invalid RTP payload type (since it is
// only 7 bits).
static const uint8_t kRtpPayloadTypeError = 0xFF;
- DecoderDatabase();
+ DecoderDatabase(std::unique_ptr<AudioDecoderFactory> decoder_factory);
virtual ~DecoderDatabase();
@@ -142,7 +147,7 @@ class DecoderDatabase {
// Returns the current active comfort noise decoder, or NULL if no active
// comfort noise decoder exists.
- virtual AudioDecoder* GetActiveCngDecoder();
+ virtual ComfortNoiseDecoder* GetActiveCngDecoder();
// Returns kOK if all packets in |packet_list| carry payload types that are
// registered in the database. Otherwise, returns kDecoderNotFound.
@@ -152,8 +157,10 @@ class DecoderDatabase {
typedef std::map<uint8_t, DecoderInfo> DecoderMap;
DecoderMap decoders_;
- int active_decoder_;
- int active_cng_decoder_;
+ int active_decoder_type_;
+ int active_cng_decoder_type_;
+ std::unique_ptr<ComfortNoiseDecoder> active_cng_decoder_;
+ const std::unique_ptr<AudioDecoderFactory> decoder_factory_;
RTC_DISALLOW_COPY_AND_ASSIGN(DecoderDatabase);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
index 85aaef11431..91ca606d65b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
@@ -19,17 +19,21 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h"
namespace webrtc {
TEST(DecoderDatabase, CreateAndDestroy) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
EXPECT_EQ(0, db.Size());
EXPECT_TRUE(db.Empty());
}
TEST(DecoderDatabase, InsertAndRemove) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(
@@ -43,7 +47,8 @@ TEST(DecoderDatabase, InsertAndRemove) {
}
TEST(DecoderDatabase, GetDecoderInfo) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(
@@ -53,16 +58,16 @@ TEST(DecoderDatabase, GetDecoderInfo) {
info = db.GetDecoderInfo(kPayloadType);
ASSERT_TRUE(info != NULL);
EXPECT_EQ(NetEqDecoder::kDecoderPCMu, info->codec_type);
- EXPECT_EQ(NULL, info->decoder);
+ EXPECT_EQ(nullptr, info->external_decoder);
EXPECT_EQ(8000, info->fs_hz);
EXPECT_EQ(kCodecName, info->name);
- EXPECT_FALSE(info->external);
info = db.GetDecoderInfo(kPayloadType + 1); // Other payload type.
EXPECT_TRUE(info == NULL); // Should not be found.
}
TEST(DecoderDatabase, GetRtpPayloadType) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(
@@ -76,7 +81,7 @@ TEST(DecoderDatabase, GetRtpPayloadType) {
}
TEST(DecoderDatabase, GetDecoder) {
- DecoderDatabase db;
+ DecoderDatabase db(CreateBuiltinAudioDecoderFactory());
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
EXPECT_EQ(DecoderDatabase::kOK,
@@ -87,7 +92,8 @@ TEST(DecoderDatabase, GetDecoder) {
}
TEST(DecoderDatabase, TypeTests) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadTypePcmU = 0;
const uint8_t kPayloadTypeCng = 13;
const uint8_t kPayloadTypeDtmf = 100;
@@ -122,7 +128,8 @@ TEST(DecoderDatabase, TypeTests) {
}
TEST(DecoderDatabase, ExternalDecoder) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
const uint8_t kPayloadType = 0;
const std::string kCodecName = "Robert\'); DROP TABLE Students;";
MockAudioDecoder decoder;
@@ -139,9 +146,8 @@ TEST(DecoderDatabase, ExternalDecoder) {
ASSERT_TRUE(info != NULL);
EXPECT_EQ(NetEqDecoder::kDecoderPCMu, info->codec_type);
EXPECT_EQ(kCodecName, info->name);
- EXPECT_EQ(&decoder, info->decoder);
+ EXPECT_EQ(&decoder, info->external_decoder);
EXPECT_EQ(8000, info->fs_hz);
- EXPECT_TRUE(info->external);
// Expect not to delete the decoder when removing it from the database, since
// it was declared externally.
EXPECT_CALL(decoder, Die()).Times(0);
@@ -152,7 +158,8 @@ TEST(DecoderDatabase, ExternalDecoder) {
}
TEST(DecoderDatabase, CheckPayloadTypes) {
- DecoderDatabase db;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase db(std::move(factory));
// Load a number of payloads into the database. Payload types are 0, 1, ...,
// while the decoder type is the same for all payload types (this does not
// matter for the test).
@@ -196,7 +203,7 @@ TEST(DecoderDatabase, CheckPayloadTypes) {
// Test the methods for setting and getting active speech and CNG decoders.
TEST(DecoderDatabase, IF_ISAC(ActiveDecoders)) {
- DecoderDatabase db;
+ DecoderDatabase db(CreateBuiltinAudioDecoderFactory());
// Load payload types.
ASSERT_EQ(DecoderDatabase::kOK,
db.RegisterPayload(0, NetEqDecoder::kDecoderPCMu, "pcmu"));
@@ -233,8 +240,8 @@ TEST(DecoderDatabase, IF_ISAC(ActiveDecoders)) {
// Set active CNG codec.
EXPECT_EQ(DecoderDatabase::kOK, db.SetActiveCngDecoder(13));
- decoder = db.GetActiveCngDecoder();
- ASSERT_FALSE(decoder == NULL); // Should get a decoder here.
+ ComfortNoiseDecoder* cng = db.GetActiveCngDecoder();
+ ASSERT_FALSE(cng == NULL); // Should get a decoder here.
// Remove the active CNG decoder, and verify that the active becomes NULL.
EXPECT_EQ(DecoderDatabase::kOK, db.Remove(13));
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
index af49f00f8af..84bda7cf699 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
@@ -24,12 +24,13 @@
namespace webrtc {
DelayManager::DelayManager(size_t max_packets_in_buffer,
- DelayPeakDetector* peak_detector)
+ DelayPeakDetector* peak_detector,
+ const TickTimer* tick_timer)
: first_packet_received_(false),
max_packets_in_buffer_(max_packets_in_buffer),
iat_vector_(kMaxIat + 1, 0),
iat_factor_(0),
- packet_iat_count_ms_(0),
+ tick_timer_(tick_timer),
base_target_level_(4), // In Q0 domain.
target_level_(base_target_level_ << 8), // In Q8 domain.
packet_len_ms_(0),
@@ -41,7 +42,6 @@ DelayManager::DelayManager(size_t max_packets_in_buffer,
maximum_delay_ms_(target_level_),
iat_cumulative_sum_(0),
max_iat_cumulative_sum_(0),
- max_timer_ms_(0),
peak_detector_(*peak_detector),
last_pack_cng_or_dtmf_(1) {
assert(peak_detector); // Should never be NULL.
@@ -79,7 +79,7 @@ int DelayManager::Update(uint16_t sequence_number,
if (!first_packet_received_) {
// Prepare for next packet arrival.
- packet_iat_count_ms_ = 0;
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
last_seq_no_ = sequence_number;
last_timestamp_ = timestamp;
first_packet_received_ = true;
@@ -106,7 +106,7 @@ int DelayManager::Update(uint16_t sequence_number,
// Calculate inter-arrival time (IAT) in integer "packet times"
// (rounding down). This is the value used as index to the histogram
// vector |iat_vector_|.
- int iat_packets = packet_iat_count_ms_ / packet_len_ms;
+ int iat_packets = packet_iat_stopwatch_->ElapsedMs() / packet_len_ms;
if (streaming_mode_) {
UpdateCumulativeSums(packet_len_ms, sequence_number);
@@ -137,7 +137,7 @@ int DelayManager::Update(uint16_t sequence_number,
} // End if (packet_len_ms > 0).
// Prepare for next packet arrival.
- packet_iat_count_ms_ = 0;
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
last_seq_no_ = sequence_number;
last_timestamp_ = timestamp;
return 0;
@@ -147,7 +147,8 @@ void DelayManager::UpdateCumulativeSums(int packet_len_ms,
uint16_t sequence_number) {
// Calculate IAT in Q8, including fractions of a packet (i.e., more
// accurate than |iat_packets|.
- int iat_packets_q8 = (packet_iat_count_ms_ << 8) / packet_len_ms;
+ int iat_packets_q8 =
+ (packet_iat_stopwatch_->ElapsedMs() << 8) / packet_len_ms;
// Calculate cumulative sum IAT with sequence number compensation. The sum
// is zero if there is no clock-drift.
iat_cumulative_sum_ += (iat_packets_q8 -
@@ -159,9 +160,9 @@ void DelayManager::UpdateCumulativeSums(int packet_len_ms,
if (iat_cumulative_sum_ > max_iat_cumulative_sum_) {
// Found a new maximum.
max_iat_cumulative_sum_ = iat_cumulative_sum_;
- max_timer_ms_ = 0;
+ max_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
}
- if (max_timer_ms_ > kMaxStreamingPeakPeriodMs) {
+ if (max_iat_stopwatch_->ElapsedMs() > kMaxStreamingPeakPeriodMs) {
// Too long since the last maximum was observed; decrease max value.
max_iat_cumulative_sum_ -= kCumulativeSumDrift;
}
@@ -299,7 +300,7 @@ int DelayManager::SetPacketAudioLength(int length_ms) {
}
packet_len_ms_ = length_ms;
peak_detector_.SetPacketAudioLength(packet_len_ms_);
- packet_iat_count_ms_ = 0;
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
last_pack_cng_or_dtmf_ = 1; // TODO(hlundin): Legacy. Remove?
return 0;
}
@@ -311,8 +312,8 @@ void DelayManager::Reset() {
peak_detector_.Reset();
ResetHistogram(); // Resets target levels too.
iat_factor_ = 0; // Adapt the histogram faster for the first few packets.
- packet_iat_count_ms_ = 0;
- max_timer_ms_ = 0;
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
+ max_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
iat_cumulative_sum_ = 0;
max_iat_cumulative_sum_ = 0;
last_pack_cng_or_dtmf_ = 1;
@@ -340,14 +341,10 @@ bool DelayManager::PeakFound() const {
return peak_detector_.peak_found();
}
-void DelayManager::UpdateCounters(int elapsed_time_ms) {
- packet_iat_count_ms_ += elapsed_time_ms;
- peak_detector_.IncrementCounter(elapsed_time_ms);
- max_timer_ms_ += elapsed_time_ms;
+void DelayManager::ResetPacketIatCount() {
+ packet_iat_stopwatch_ = tick_timer_->GetNewStopwatch();
}
-void DelayManager::ResetPacketIatCount() { packet_iat_count_ms_ = 0; }
-
// Note that |low_limit| and |higher_limit| are not assigned to
// |minimum_delay_ms_| and |maximum_delay_ms_| defined by the client of this
// class. They are computed from |target_level_| and used for decision making.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
index 785fced15df..6f3c14aea9d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
@@ -13,10 +13,12 @@
#include <string.h> // Provide access to size_t.
+#include <memory>
#include <vector>
#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -32,7 +34,9 @@ class DelayManager {
// buffer can hold no more than |max_packets_in_buffer| packets (i.e., this
// is the number of packet slots in the buffer). Supply a PeakDetector
// object to the DelayManager.
- DelayManager(size_t max_packets_in_buffer, DelayPeakDetector* peak_detector);
+ DelayManager(size_t max_packets_in_buffer,
+ DelayPeakDetector* peak_detector,
+ const TickTimer* tick_timer);
virtual ~DelayManager();
@@ -75,10 +79,6 @@ class DelayManager {
// DelayPeakDetector object.
virtual bool PeakFound() const;
- // Notifies the counters in DelayManager and DelayPeakDetector that
- // |elapsed_time_ms| have elapsed.
- virtual void UpdateCounters(int elapsed_time_ms);
-
// Reset the inter-arrival time counter to 0.
virtual void ResetPacketIatCount();
@@ -135,7 +135,9 @@ class DelayManager {
const size_t max_packets_in_buffer_; // Capacity of the packet buffer.
IATVector iat_vector_; // Histogram of inter-arrival times.
int iat_factor_; // Forgetting factor for updating the IAT histogram (Q15).
- int packet_iat_count_ms_; // Milliseconds elapsed since last packet.
+ const TickTimer* tick_timer_;
+ // Time elapsed since last packet.
+ std::unique_ptr<TickTimer::Stopwatch> packet_iat_stopwatch_;
int base_target_level_; // Currently preferred buffer level before peak
// detection and streaming mode (Q0).
// TODO(turajs) change the comment according to the implementation of
@@ -153,7 +155,8 @@ class DelayManager {
int maximum_delay_ms_; // Externally set maximum allowed delay.
int iat_cumulative_sum_; // Cumulative sum of delta inter-arrival times.
int max_iat_cumulative_sum_; // Max of |iat_cumulative_sum_|.
- int max_timer_ms_; // Time elapsed since maximum was observed.
+ // Time elapsed since maximum was observed.
+ std::unique_ptr<TickTimer::Stopwatch> max_iat_stopwatch_;
DelayPeakDetector& peak_detector_;
int last_pack_cng_or_dtmf_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
index f231c3da301..3290e9cca68 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
@@ -39,21 +39,19 @@ class DelayManagerTest : public ::testing::Test {
void IncreaseTime(int inc_ms);
DelayManager* dm_;
+ TickTimer tick_timer_;
MockDelayPeakDetector detector_;
uint16_t seq_no_;
uint32_t ts_;
};
DelayManagerTest::DelayManagerTest()
- : dm_(NULL),
- seq_no_(0x1234),
- ts_(0x12345678) {
-}
+ : dm_(NULL), detector_(&tick_timer_), seq_no_(0x1234), ts_(0x12345678) {}
void DelayManagerTest::SetUp() {
EXPECT_CALL(detector_, Reset())
.Times(1);
- dm_ = new DelayManager(kMaxNumberOfPackets, &detector_);
+ dm_ = new DelayManager(kMaxNumberOfPackets, &detector_, &tick_timer_);
}
void DelayManagerTest::SetPacketAudioLength(int lengt_ms) {
@@ -69,9 +67,7 @@ void DelayManagerTest::InsertNextPacket() {
void DelayManagerTest::IncreaseTime(int inc_ms) {
for (int t = 0; t < inc_ms; t += kTimeStepMs) {
- EXPECT_CALL(detector_, IncrementCounter(kTimeStepMs))
- .Times(1);
- dm_->UpdateCounters(kTimeStepMs);
+ tick_timer_.Increment();
}
}
void DelayManagerTest::TearDown() {
@@ -115,13 +111,6 @@ TEST_F(DelayManagerTest, PeakFound) {
EXPECT_FALSE(dm_->PeakFound());
}
-TEST_F(DelayManagerTest, UpdateCounters) {
- // Expect DelayManager to pass on the counter update to the detector.
- EXPECT_CALL(detector_, IncrementCounter(kTimeStepMs))
- .Times(1);
- dm_->UpdateCounters(kTimeStepMs);
-}
-
TEST_F(DelayManagerTest, UpdateNormal) {
SetPacketAudioLength(kFrameSizeMs);
// First packet arrival.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
index 712c7788aca..ce9133bdaed 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
@@ -12,6 +12,9 @@
#include <algorithm> // max
+#include "webrtc/base/checks.h"
+#include "webrtc/base/safe_conversions.h"
+
namespace webrtc {
// The DelayPeakDetector keeps track of severe inter-arrival times, called
@@ -23,14 +26,15 @@ namespace webrtc {
DelayPeakDetector::~DelayPeakDetector() = default;
-DelayPeakDetector::DelayPeakDetector()
- : peak_found_(false),
- peak_detection_threshold_(0),
- peak_period_counter_ms_(-1) {
+DelayPeakDetector::DelayPeakDetector(const TickTimer* tick_timer)
+ : peak_found_(false),
+ peak_detection_threshold_(0),
+ tick_timer_(tick_timer) {
+ RTC_DCHECK(!peak_period_stopwatch_);
}
void DelayPeakDetector::Reset() {
- peak_period_counter_ms_ = -1; // Indicate that next peak is the first.
+ peak_period_stopwatch_.reset();
peak_found_ = false;
peak_history_.clear();
}
@@ -55,38 +59,40 @@ int DelayPeakDetector::MaxPeakHeight() const {
return max_height;
}
-int DelayPeakDetector::MaxPeakPeriod() const {
- int max_period = -1; // Returns -1 for an empty history.
- std::list<Peak>::const_iterator it;
- for (it = peak_history_.begin(); it != peak_history_.end(); ++it) {
- max_period = std::max(max_period, it->period_ms);
+uint64_t DelayPeakDetector::MaxPeakPeriod() const {
+ auto max_period_element = std::max_element(
+ peak_history_.begin(), peak_history_.end(),
+ [](Peak a, Peak b) { return a.period_ms < b.period_ms; });
+ if (max_period_element == peak_history_.end()) {
+ return 0; // |peak_history_| is empty.
}
- return max_period;
+ RTC_DCHECK_GT(max_period_element->period_ms, 0u);
+ return max_period_element->period_ms;
}
bool DelayPeakDetector::Update(int inter_arrival_time, int target_level) {
if (inter_arrival_time > target_level + peak_detection_threshold_ ||
inter_arrival_time > 2 * target_level) {
// A delay peak is observed.
- if (peak_period_counter_ms_ == -1) {
+ if (!peak_period_stopwatch_) {
// This is the first peak. Reset the period counter.
- peak_period_counter_ms_ = 0;
- } else if (peak_period_counter_ms_ <= kMaxPeakPeriodMs) {
+ peak_period_stopwatch_ = tick_timer_->GetNewStopwatch();
+ } else if (peak_period_stopwatch_->ElapsedMs() <= kMaxPeakPeriodMs) {
// This is not the first peak, and the period is valid.
// Store peak data in the vector.
Peak peak_data;
- peak_data.period_ms = peak_period_counter_ms_;
+ peak_data.period_ms = peak_period_stopwatch_->ElapsedMs();
peak_data.peak_height_packets = inter_arrival_time;
peak_history_.push_back(peak_data);
while (peak_history_.size() > kMaxNumPeaks) {
// Delete the oldest data point.
peak_history_.pop_front();
}
- peak_period_counter_ms_ = 0;
- } else if (peak_period_counter_ms_ <= 2 * kMaxPeakPeriodMs) {
+ peak_period_stopwatch_ = tick_timer_->GetNewStopwatch();
+ } else if (peak_period_stopwatch_->ElapsedMs() <= 2 * kMaxPeakPeriodMs) {
// Invalid peak due to too long period. Reset period counter and start
// looking for next peak.
- peak_period_counter_ms_ = 0;
+ peak_period_stopwatch_ = tick_timer_->GetNewStopwatch();
} else {
// More than 2 times the maximum period has elapsed since the last peak
// was registered. It seams that the network conditions have changed.
@@ -97,16 +103,10 @@ bool DelayPeakDetector::Update(int inter_arrival_time, int target_level) {
return CheckPeakConditions();
}
-void DelayPeakDetector::IncrementCounter(int inc_ms) {
- if (peak_period_counter_ms_ >= 0) {
- peak_period_counter_ms_ += inc_ms;
- }
-}
-
bool DelayPeakDetector::CheckPeakConditions() {
size_t s = peak_history_.size();
if (s >= kMinPeaksToTrigger &&
- peak_period_counter_ms_ <= 2 * MaxPeakPeriod()) {
+ peak_period_stopwatch_->ElapsedMs() <= 2 * MaxPeakPeriod()) {
peak_found_ = true;
} else {
peak_found_ = false;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
index 69433b45248..f57d3bd71e5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
@@ -14,14 +14,16 @@
#include <string.h> // size_t
#include <list>
+#include <memory>
#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
namespace webrtc {
class DelayPeakDetector {
public:
- DelayPeakDetector();
+ DelayPeakDetector(const TickTimer* tick_timer);
virtual ~DelayPeakDetector();
virtual void Reset();
@@ -37,20 +39,15 @@ class DelayPeakDetector {
// delay peaks have been observed recently. The unit is number of packets.
virtual int MaxPeakHeight() const;
- // Calculates and returns the maximum delay peak distance in ms.
- // Returns -1 if no delay peaks have been observed recently.
- virtual int MaxPeakPeriod() const;
+ // Calculates and returns the maximum delay peak distance in ms (strictly
+ // larger than 0), or 0 if no delay peaks have been observed recently.
+ virtual uint64_t MaxPeakPeriod() const;
// Updates the DelayPeakDetector with a new inter-arrival time (in packets)
// and the current target buffer level (needed to decide if a peak is observed
// or not). Returns true if peak-mode is active, false if not.
virtual bool Update(int inter_arrival_time, int target_level);
- // Increments the |peak_period_counter_ms_| with |inc_ms|. Only increments
- // the counter if it is non-negative. A negative denotes that no peak has
- // been observed.
- virtual void IncrementCounter(int inc_ms);
-
private:
static const size_t kMaxNumPeaks = 8;
static const size_t kMinPeaksToTrigger = 2;
@@ -58,7 +55,7 @@ class DelayPeakDetector {
static const int kMaxPeakPeriodMs = 10000;
typedef struct {
- int period_ms;
+ uint64_t period_ms;
int peak_height_packets;
} Peak;
@@ -67,7 +64,8 @@ class DelayPeakDetector {
std::list<Peak> peak_history_;
bool peak_found_;
int peak_detection_threshold_;
- int peak_period_counter_ms_;
+ const TickTimer* tick_timer_;
+ std::unique_ptr<TickTimer::Stopwatch> peak_period_stopwatch_;
RTC_DISALLOW_COPY_AND_ASSIGN(DelayPeakDetector);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
index c40f3991b04..32b36b25ef4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
@@ -17,22 +17,25 @@
namespace webrtc {
TEST(DelayPeakDetector, CreateAndDestroy) {
- DelayPeakDetector* detector = new DelayPeakDetector();
+ TickTimer tick_timer;
+ DelayPeakDetector* detector = new DelayPeakDetector(&tick_timer);
EXPECT_FALSE(detector->peak_found());
delete detector;
}
TEST(DelayPeakDetector, EmptyHistory) {
- DelayPeakDetector detector;
+ TickTimer tick_timer;
+ DelayPeakDetector detector(&tick_timer);
EXPECT_EQ(-1, detector.MaxPeakHeight());
- EXPECT_EQ(-1, detector.MaxPeakPeriod());
+ EXPECT_EQ(0u, detector.MaxPeakPeriod());
}
// Inject a series of packet arrivals into the detector. Three of the packets
// have suffered delays. After the third delay peak, peak-mode is expected to
// start. This should then continue until it is disengaged due to lack of peaks.
TEST(DelayPeakDetector, TriggerPeakMode) {
- DelayPeakDetector detector;
+ TickTimer tick_timer;
+ DelayPeakDetector detector(&tick_timer);
const int kPacketSizeMs = 30;
detector.SetPacketAudioLength(kPacketSizeMs);
@@ -52,7 +55,7 @@ TEST(DelayPeakDetector, TriggerPeakMode) {
// Third delay peak. Trigger peak-mode after this packet.
arrival_times_ms[400] += kPeakDelayMs;
// The second peak period is the longest, 200 packets.
- const int kWorstPeakPeriod = 200 * kPacketSizeMs;
+ const uint64_t kWorstPeakPeriod = 200 * kPacketSizeMs;
int peak_mode_start_ms = arrival_times_ms[400];
// Expect to disengage after no peaks are observed for two period times.
int peak_mode_end_ms = peak_mode_start_ms + 2 * kWorstPeakPeriod;
@@ -74,7 +77,7 @@ TEST(DelayPeakDetector, TriggerPeakMode) {
}
++next;
}
- detector.IncrementCounter(10);
+ tick_timer.Increment();
time += 10; // Increase time 10 ms.
}
}
@@ -83,7 +86,8 @@ TEST(DelayPeakDetector, TriggerPeakMode) {
// 2, in order to raise the bar for delay peaks to inter-arrival times > 4.
// The delay pattern has peaks with delay = 3, thus should not trigger.
TEST(DelayPeakDetector, DoNotTriggerPeakMode) {
- DelayPeakDetector detector;
+ TickTimer tick_timer;
+ DelayPeakDetector detector(&tick_timer);
const int kPacketSizeMs = 30;
detector.SetPacketAudioLength(kPacketSizeMs);
@@ -114,7 +118,7 @@ TEST(DelayPeakDetector, DoNotTriggerPeakMode) {
EXPECT_FALSE(detector.Update(iat_packets, kTargetBufferLevel));
++next;
}
- detector.IncrementCounter(10);
+ tick_timer.Increment();
time += 10; // Increase time 10 ms.
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
index 4188914c86c..32756650942 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
@@ -80,6 +80,22 @@ int DspHelper::RampSignal(int16_t* signal,
return RampSignal(signal, length, factor, increment, signal);
}
+int DspHelper::RampSignal(AudioVector* signal,
+ size_t start_index,
+ size_t length,
+ int factor,
+ int increment) {
+ int factor_q20 = (factor << 6) + 32;
+ // TODO(hlundin): Add 32 to factor_q20 when converting back to Q14?
+ for (size_t i = start_index; i < start_index + length; ++i) {
+ (*signal)[i] = (factor * (*signal)[i] + 8192) >> 14;
+ factor_q20 += increment;
+ factor_q20 = std::max(factor_q20, 0); // Never go negative.
+ factor = std::min(factor_q20 >> 6, 16384);
+ }
+ return factor;
+}
+
int DspHelper::RampSignal(AudioMultiVector* signal,
size_t start_index,
size_t length,
@@ -94,7 +110,7 @@ int DspHelper::RampSignal(AudioMultiVector* signal,
// Loop over the channels, starting at the same |factor| each time.
for (size_t channel = 0; channel < signal->Channels(); ++channel) {
end_factor =
- RampSignal(&(*signal)[channel][start_index], length, factor, increment);
+ RampSignal(&(*signal)[channel], start_index, length, factor, increment);
}
return end_factor;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
index 269c2eb0f25..23543fe383e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
@@ -67,6 +67,13 @@ class DspHelper {
// Same as above, but processes |length| samples from |signal|, starting at
// |start_index|.
+ static int RampSignal(AudioVector* signal,
+ size_t start_index,
+ size_t length,
+ int factor,
+ int increment);
+
+ // Same as above, but for an AudioMultiVector.
static int RampSignal(AudioMultiVector* signal,
size_t start_index,
size_t length,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
index ef7af46597e..963f4bdb6c0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
@@ -19,6 +19,7 @@
#include "webrtc/base/safe_conversions.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "webrtc/modules/audio_coding/neteq/random_vector.h"
#include "webrtc/modules/audio_coding/neteq/statistics_calculator.h"
@@ -111,25 +112,33 @@ int Expand::Process(AudioMultiVector* output) {
// Use only expand_vector0.
assert(expansion_vector_position + temp_length <=
parameters.expand_vector0.Size());
- memcpy(voiced_vector_storage,
- &parameters.expand_vector0[expansion_vector_position],
- sizeof(int16_t) * temp_length);
+ parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position,
+ voiced_vector_storage);
} else if (current_lag_index_ == 1) {
+ std::unique_ptr<int16_t[]> temp_0(new int16_t[temp_length]);
+ parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position,
+ temp_0.get());
+ std::unique_ptr<int16_t[]> temp_1(new int16_t[temp_length]);
+ parameters.expand_vector1.CopyTo(temp_length, expansion_vector_position,
+ temp_1.get());
// Mix 3/4 of expand_vector0 with 1/4 of expand_vector1.
- WebRtcSpl_ScaleAndAddVectorsWithRound(
- &parameters.expand_vector0[expansion_vector_position], 3,
- &parameters.expand_vector1[expansion_vector_position], 1, 2,
- voiced_vector_storage, temp_length);
+ WebRtcSpl_ScaleAndAddVectorsWithRound(temp_0.get(), 3, temp_1.get(), 1, 2,
+ voiced_vector_storage, temp_length);
} else if (current_lag_index_ == 2) {
// Mix 1/2 of expand_vector0 with 1/2 of expand_vector1.
assert(expansion_vector_position + temp_length <=
parameters.expand_vector0.Size());
assert(expansion_vector_position + temp_length <=
parameters.expand_vector1.Size());
- WebRtcSpl_ScaleAndAddVectorsWithRound(
- &parameters.expand_vector0[expansion_vector_position], 1,
- &parameters.expand_vector1[expansion_vector_position], 1, 1,
- voiced_vector_storage, temp_length);
+
+ std::unique_ptr<int16_t[]> temp_0(new int16_t[temp_length]);
+ parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position,
+ temp_0.get());
+ std::unique_ptr<int16_t[]> temp_1(new int16_t[temp_length]);
+ parameters.expand_vector1.CopyTo(temp_length, expansion_vector_position,
+ temp_1.get());
+ WebRtcSpl_ScaleAndAddVectorsWithRound(temp_0.get(), 1, temp_1.get(), 1, 1,
+ voiced_vector_storage, temp_length);
}
// Get tapering window parameters. Values are in Q15.
@@ -298,8 +307,7 @@ int Expand::Process(AudioMultiVector* output) {
} else {
assert(output->Size() == current_lag);
}
- memcpy(&(*output)[channel_ix][0], temp_data,
- sizeof(temp_data[0]) * current_lag);
+ (*output)[channel_ix].OverwriteAt(temp_data, current_lag, 0);
}
// Increase call number and cap it.
@@ -326,6 +334,17 @@ void Expand::SetParametersForMergeAfterExpand() {
stop_muting_ = true;
}
+bool Expand::Muted() const {
+ if (first_expand_ || stop_muting_)
+ return false;
+ RTC_DCHECK(channel_parameters_);
+ for (size_t ch = 0; ch < num_channels_; ++ch) {
+ if (channel_parameters_[ch].mute_factor != 0)
+ return false;
+ }
+ return true;
+}
+
size_t Expand::overlap_length() const {
return overlap_length_;
}
@@ -372,19 +391,20 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
size_t fs_mult_lpc_analysis_len = fs_mult * kLpcAnalysisLength;
const size_t signal_length = static_cast<size_t>(256 * fs_mult);
- const int16_t* audio_history =
- &(*sync_buffer_)[0][sync_buffer_->Size() - signal_length];
+
+ const size_t audio_history_position = sync_buffer_->Size() - signal_length;
+ std::unique_ptr<int16_t[]> audio_history(new int16_t[signal_length]);
+ (*sync_buffer_)[0].CopyTo(signal_length, audio_history_position,
+ audio_history.get());
// Initialize.
InitializeForAnExpandPeriod();
// Calculate correlation in downsampled domain (4 kHz sample rate).
- int correlation_scale;
size_t correlation_length = 51; // TODO(hlundin): Legacy bit-exactness.
// If it is decided to break bit-exactness |correlation_length| should be
// initialized to the return value of Correlation().
- Correlation(audio_history, signal_length, correlation_vector,
- &correlation_scale);
+ Correlation(audio_history.get(), signal_length, correlation_vector);
// Find peaks in correlation vector.
DspHelper::PeakDetection(correlation_vector, correlation_length,
@@ -455,7 +475,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
&audio_history[signal_length - correlation_length - start_index
- correlation_lags],
correlation_length + start_index + correlation_lags - 1);
- correlation_scale = (31 - WebRtcSpl_NormW32(signal_max * signal_max)) +
+ int correlation_scale = (31 - WebRtcSpl_NormW32(signal_max * signal_max)) +
(31 - WebRtcSpl_NormW32(static_cast<int32_t>(correlation_length))) - 31;
correlation_scale = std::max(0, correlation_scale);
@@ -541,12 +561,14 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
parameters.expand_vector1.Extend(
expansion_length - parameters.expand_vector1.Size());
}
- WebRtcSpl_AffineTransformVector(&parameters.expand_vector1[0],
+ std::unique_ptr<int16_t[]> temp_1(new int16_t[expansion_length]);
+ WebRtcSpl_AffineTransformVector(temp_1.get(),
const_cast<int16_t*>(vector2),
amplitude_ratio,
4096,
13,
expansion_length);
+ parameters.expand_vector1.OverwriteAt(temp_1.get(), expansion_length, 0);
} else {
// Energy change constraint not fulfilled. Only use last vector.
parameters.expand_vector0.Clear();
@@ -582,13 +604,6 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
}
// Calculate the LPC and the gain of the filters.
- // Calculate scale value needed for auto-correlation.
- correlation_scale = WebRtcSpl_MaxAbsValueW16(
- &(audio_history[signal_length - fs_mult_lpc_analysis_len]),
- fs_mult_lpc_analysis_len);
-
- correlation_scale = std::min(16 - WebRtcSpl_NormW32(correlation_scale), 0);
- correlation_scale = std::max(correlation_scale * 2 + 7, 0);
// Calculate kUnvoicedLpcOrder + 1 lags of the auto-correlation function.
size_t temp_index = signal_length - fs_mult_lpc_analysis_len -
@@ -601,11 +616,9 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
memcpy(&temp_signal[kUnvoicedLpcOrder],
&audio_history[temp_index + kUnvoicedLpcOrder],
sizeof(int16_t) * fs_mult_lpc_analysis_len);
- WebRtcSpl_CrossCorrelation(auto_correlation,
- &temp_signal[kUnvoicedLpcOrder],
- &temp_signal[kUnvoicedLpcOrder],
- fs_mult_lpc_analysis_len, kUnvoicedLpcOrder + 1,
- correlation_scale, -1);
+ CrossCorrelationWithAutoShift(
+ &temp_signal[kUnvoicedLpcOrder], &temp_signal[kUnvoicedLpcOrder],
+ fs_mult_lpc_analysis_len, kUnvoicedLpcOrder + 1, -1, auto_correlation);
delete [] temp_signal;
// Verify that variance is positive.
@@ -766,8 +779,7 @@ Expand::ChannelParameters::ChannelParameters()
void Expand::Correlation(const int16_t* input,
size_t input_length,
- int16_t* output,
- int* output_scale) const {
+ int16_t* output) const {
// Set parameters depending on sample rate.
const int16_t* filter_coefficients;
size_t num_coefficients;
@@ -814,13 +826,11 @@ void Expand::Correlation(const int16_t* input,
downsampled_input, norm_shift);
int32_t correlation[kNumCorrelationLags];
- static const int kCorrelationShift = 6;
- WebRtcSpl_CrossCorrelation(
- correlation,
+ CrossCorrelationWithAutoShift(
&downsampled_input[kDownsampledLength - kCorrelationLength],
&downsampled_input[kDownsampledLength - kCorrelationLength
- kCorrelationStartLag],
- kCorrelationLength, kNumCorrelationLags, kCorrelationShift, -1);
+ kCorrelationLength, kNumCorrelationLags, -1, correlation);
// Normalize and move data from 32-bit to 16-bit vector.
int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
@@ -829,8 +839,6 @@ void Expand::Correlation(const int16_t* input,
std::max(18 - WebRtcSpl_NormW32(max_correlation), 0));
WebRtcSpl_VectorBitShiftW32ToW16(output, kNumCorrelationLags, correlation,
norm_shift2);
- // Total scale factor (right shifts) of correlation value.
- *output_scale = 2 * norm_shift + kCorrelationShift + norm_shift2;
}
void Expand::UpdateLagIndex() {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
index 7f61bf3b18c..0feba3693a1 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
@@ -62,6 +62,10 @@ class Expand {
return channel_parameters_[channel].mute_factor;
}
+ // Returns true if expansion has been faded down to zero amplitude (for all
+ // channels); false otherwise.
+ bool Muted() const;
+
// Accessors and mutators.
virtual size_t overlap_length() const;
size_t max_lag() const { return max_lag_; }
@@ -120,12 +124,10 @@ class Expand {
// Calculate the auto-correlation of |input|, with length |input_length|
// samples. The correlation is calculated from a downsampled version of
- // |input|, and is written to |output|. The scale factor is written to
- // |output_scale|.
+ // |input|, and is written to |output|.
void Correlation(const int16_t* input,
size_t input_length,
- int16_t* output,
- int* output_scale) const;
+ int16_t* output) const;
void UpdateLagIndex();
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
index 1441704102d..f19487ab17d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
@@ -93,8 +93,9 @@ class ExpandTest : public ::testing::Test {
ASSERT_TRUE(input_file_.Seek(speech_start_samples));
// Pre-load the sync buffer with speech data.
- ASSERT_TRUE(
- input_file_.Read(sync_buffer_.Size(), &sync_buffer_.Channel(0)[0]));
+ std::unique_ptr<int16_t[]> temp(new int16_t[sync_buffer_.Size()]);
+ ASSERT_TRUE(input_file_.Read(sync_buffer_.Size(), temp.get()));
+ sync_buffer_.Channel(0).OverwriteAt(temp.get(), sync_buffer_.Size(), 0);
ASSERT_EQ(1u, num_channels_) << "Fix: Must populate all channels.";
}
@@ -169,6 +170,37 @@ TEST_F(ExpandTest, CheckOutageStatsAfterReset) {
statistics_.last_outage_duration_ms());
}
+namespace {
+// Runs expand until Muted() returns true. Times out after 1000 calls.
+void ExpandUntilMuted(size_t num_channels, Expand* expand) {
+ EXPECT_FALSE(expand->Muted()) << "Instance is muted from the start";
+ AudioMultiVector output(num_channels);
+ int num_calls = 0;
+ while (!expand->Muted()) {
+ ASSERT_LT(num_calls++, 1000) << "Test timed out";
+ EXPECT_EQ(0, expand->Process(&output));
+ }
+}
+} // namespace
+
+// Verifies that Muted() returns true after a long expand period. Also verifies
+// that Muted() is reset to false after calling Reset(),
+// SetParametersForMergeAfterExpand() and SetParametersForNormalAfterExpand().
+TEST_F(ExpandTest, Muted) {
+ ExpandUntilMuted(num_channels_, &expand_);
+ expand_.Reset();
+ EXPECT_FALSE(expand_.Muted()); // Should be back to unmuted.
+
+ ExpandUntilMuted(num_channels_, &expand_);
+ expand_.SetParametersForMergeAfterExpand();
+ EXPECT_FALSE(expand_.Muted()); // Should be back to unmuted.
+
+ expand_.Reset(); // Must reset in order to start a new expand period.
+ ExpandUntilMuted(num_channels_, &expand_);
+ expand_.SetParametersForNormalAfterExpand();
+ EXPECT_FALSE(expand_.Muted()); // Should be back to unmuted.
+}
+
// TODO(hlundin): Write more tests.
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h
index 89b0c543244..3a9de1d2606 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/include/neteq.h
@@ -93,6 +93,7 @@ class NetEq {
BackgroundNoiseMode background_noise_mode;
NetEqPlayoutMode playout_mode;
bool enable_fast_accelerate;
+ bool enable_muted_state = false;
};
enum ReturnCodes {
@@ -161,8 +162,12 @@ class NetEq {
// |num_channels_|, |sample_rate_hz_|, |samples_per_channel_|, and
// |vad_activity_| are updated upon success. If an error is returned, some
// fields may not have been updated.
+ // If muted state is enabled (through Config::enable_muted_state), |muted|
+ // may be set to true after a prolonged expand period. When this happens, the
+ // |data_| in |audio_frame| is not written, but should be interpreted as being
+ // all zeros.
// Returns kOK on success, or kFail in case of an error.
- virtual int GetAudio(AudioFrame* audio_frame) = 0;
+ virtual int GetAudio(AudioFrame* audio_frame, bool* muted) = 0;
// Associates |rtp_payload_type| with |codec| and |codec_name|, and stores the
// information in the codec database. Returns 0 on success, -1 on failure.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
index 9aed91f7887..299682f60d4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
@@ -18,6 +18,7 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "webrtc/modules/audio_coding/neteq/expand.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
@@ -38,6 +39,8 @@ Merge::Merge(int fs_hz,
assert(num_channels_ > 0);
}
+Merge::~Merge() = default;
+
size_t Merge::Process(int16_t* input, size_t input_length,
int16_t* external_mute_factor_array,
AudioMultiVector* output) {
@@ -60,13 +63,16 @@ size_t Merge::Process(int16_t* input, size_t input_length,
size_t best_correlation_index = 0;
size_t output_length = 0;
+ std::unique_ptr<int16_t[]> input_channel(
+ new int16_t[input_length_per_channel]);
+ std::unique_ptr<int16_t[]> expanded_channel(new int16_t[expanded_length]);
for (size_t channel = 0; channel < num_channels_; ++channel) {
- int16_t* input_channel = &input_vector[channel][0];
- int16_t* expanded_channel = &expanded_[channel][0];
- int16_t expanded_max, input_max;
+ input_vector[channel].CopyTo(
+ input_length_per_channel, 0, input_channel.get());
+ expanded_[channel].CopyTo(expanded_length, 0, expanded_channel.get());
+
int16_t new_mute_factor = SignalScaling(
- input_channel, input_length_per_channel, expanded_channel,
- &expanded_max, &input_max);
+ input_channel.get(), input_length_per_channel, expanded_channel.get());
// Adjust muting factor (product of "main" muting factor and expand muting
// factor).
@@ -84,18 +90,16 @@ size_t Merge::Process(int16_t* input, size_t input_length,
// Downsample, correlate, and find strongest correlation period for the
// master (i.e., first) channel only.
// Downsample to 4kHz sample rate.
- Downsample(input_channel, input_length_per_channel, expanded_channel,
- expanded_length);
+ Downsample(input_channel.get(), input_length_per_channel,
+ expanded_channel.get(), expanded_length);
// Calculate the lag of the strongest correlation period.
best_correlation_index = CorrelateAndPeakSearch(
- expanded_max, input_max, old_length,
- input_length_per_channel, expand_period);
+ old_length, input_length_per_channel, expand_period);
}
- static const int kTempDataSize = 3600;
- int16_t temp_data[kTempDataSize]; // TODO(hlundin) Remove this.
- int16_t* decoded_output = temp_data + best_correlation_index;
+ temp_data_.resize(input_length_per_channel + best_correlation_index);
+ int16_t* decoded_output = temp_data_.data() + best_correlation_index;
// Mute the new decoded data if needed (and unmute it linearly).
// This is the overlapping part of expanded_signal.
@@ -109,7 +113,7 @@ size_t Merge::Process(int16_t* input, size_t input_length,
// and so on.
int increment = 4194 / fs_mult_;
*external_mute_factor =
- static_cast<int16_t>(DspHelper::RampSignal(input_channel,
+ static_cast<int16_t>(DspHelper::RampSignal(input_channel.get(),
interpolation_length,
*external_mute_factor,
increment));
@@ -129,10 +133,10 @@ size_t Merge::Process(int16_t* input, size_t input_length,
int16_t increment =
static_cast<int16_t>(16384 / (interpolation_length + 1)); // In Q14.
int16_t mute_factor = 16384 - increment;
- memmove(temp_data, expanded_channel,
+ memmove(temp_data_.data(), expanded_channel.get(),
sizeof(int16_t) * best_correlation_index);
DspHelper::CrossFade(&expanded_channel[best_correlation_index],
- input_channel, interpolation_length,
+ input_channel.get(), interpolation_length,
&mute_factor, increment, decoded_output);
output_length = best_correlation_index + input_length_per_channel;
@@ -142,8 +146,7 @@ size_t Merge::Process(int16_t* input, size_t input_length,
} else {
assert(output->Size() == output_length);
}
- memcpy(&(*output)[channel][0], temp_data,
- sizeof(temp_data[0]) * output_length);
+ (*output)[channel].OverwriteAt(temp_data_.data(), output_length, 0);
}
// Copy back the first part of the data to |sync_buffer_| and remove it from
@@ -204,29 +207,26 @@ size_t Merge::GetExpandedSignal(size_t* old_length, size_t* expand_period) {
}
int16_t Merge::SignalScaling(const int16_t* input, size_t input_length,
- const int16_t* expanded_signal,
- int16_t* expanded_max, int16_t* input_max) const {
+ const int16_t* expanded_signal) const {
// Adjust muting factor if new vector is more or less of the BGN energy.
const size_t mod_input_length =
std::min(static_cast<size_t>(64 * fs_mult_), input_length);
- *expanded_max = WebRtcSpl_MaxAbsValueW16(expanded_signal, mod_input_length);
- *input_max = WebRtcSpl_MaxAbsValueW16(input, mod_input_length);
-
- // Calculate energy of expanded signal.
- // |log_fs_mult| is log2(fs_mult_), but is not exact for 48000 Hz.
- int log_fs_mult = 30 - WebRtcSpl_NormW32(fs_mult_);
- int expanded_shift = 6 + log_fs_mult
- - WebRtcSpl_NormW32(*expanded_max * *expanded_max);
- expanded_shift = std::max(expanded_shift, 0);
+ const int16_t expanded_max =
+ WebRtcSpl_MaxAbsValueW16(expanded_signal, mod_input_length);
+ int32_t factor = (expanded_max * expanded_max) /
+ (std::numeric_limits<int32_t>::max() /
+ static_cast<int32_t>(mod_input_length));
+ const int expanded_shift = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor);
int32_t energy_expanded = WebRtcSpl_DotProductWithScale(expanded_signal,
expanded_signal,
mod_input_length,
expanded_shift);
// Calculate energy of input signal.
- int input_shift = 6 + log_fs_mult -
- WebRtcSpl_NormW32(*input_max * *input_max);
- input_shift = std::max(input_shift, 0);
+ const int16_t input_max = WebRtcSpl_MaxAbsValueW16(input, mod_input_length);
+ factor = (input_max * input_max) / (std::numeric_limits<int32_t>::max() /
+ static_cast<int32_t>(mod_input_length));
+ const int input_shift = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor);
int32_t energy_input = WebRtcSpl_DotProductWithScale(input, input,
mod_input_length,
input_shift);
@@ -307,22 +307,17 @@ void Merge::Downsample(const int16_t* input, size_t input_length,
}
}
-size_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
- size_t start_position, size_t input_length,
+size_t Merge::CorrelateAndPeakSearch(size_t start_position, size_t input_length,
size_t expand_period) const {
// Calculate correlation without any normalization.
const size_t max_corr_length = kMaxCorrelationLength;
size_t stop_position_downsamp =
std::min(max_corr_length, expand_->max_lag() / (fs_mult_ * 2) + 1);
- int correlation_shift = 0;
- if (expanded_max * input_max > 26843546) {
- correlation_shift = 3;
- }
int32_t correlation[kMaxCorrelationLength];
- WebRtcSpl_CrossCorrelation(correlation, input_downsampled_,
- expanded_downsampled_, kInputDownsampLength,
- stop_position_downsamp, correlation_shift, 1);
+ CrossCorrelationWithAutoShift(input_downsampled_, expanded_downsampled_,
+ kInputDownsampLength, stop_position_downsamp, 1,
+ correlation);
// Normalize correlation to 14 bits and copy to a 16-bit array.
const size_t pad_length = expand_->overlap_length() - 1;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
index a168502c271..48f09a16727 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
@@ -37,7 +37,7 @@ class Merge {
size_t num_channels,
Expand* expand,
SyncBuffer* sync_buffer);
- virtual ~Merge() {}
+ virtual ~Merge();
// The main method to produce the audio data. The decoded data is supplied in
// |input|, having |input_length| samples in total for all channels
@@ -69,11 +69,10 @@ class Merge {
// of samples that were taken from the |sync_buffer_|.
size_t GetExpandedSignal(size_t* old_length, size_t* expand_period);
- // Analyzes |input| and |expanded_signal| to find maximum values. Returns
- // a muting factor (Q14) to be used on the new data.
+ // Analyzes |input| and |expanded_signal| and returns muting factor (Q14) to
+ // be used on the new data.
int16_t SignalScaling(const int16_t* input, size_t input_length,
- const int16_t* expanded_signal,
- int16_t* expanded_max, int16_t* input_max) const;
+ const int16_t* expanded_signal) const;
// Downsamples |input| (|input_length| samples) and |expanded_signal| to
// 4 kHz sample rate. The downsampled signals are written to
@@ -84,8 +83,7 @@ class Merge {
// Calculates cross-correlation between |input_downsampled_| and
// |expanded_downsampled_|, and finds the correlation maximum. The maximizing
// lag is returned.
- size_t CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
- size_t start_position, size_t input_length,
+ size_t CorrelateAndPeakSearch(size_t start_position, size_t input_length,
size_t expand_period) const;
const int fs_mult_; // fs_hz_ / 8000.
@@ -95,6 +93,7 @@ class Merge {
int16_t expanded_downsampled_[kExpandDownsampLength];
int16_t input_downsampled_[kInputDownsampLength];
AudioMultiVector expanded_;
+ std::vector<int16_t> temp_data_;
RTC_DISALLOW_COPY_AND_ASSIGN(Merge);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
index 1b4a3c9da5b..60ae0f6501e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
@@ -21,6 +21,7 @@ namespace webrtc {
class MockDecoderDatabase : public DecoderDatabase {
public:
+ MockDecoderDatabase() : DecoderDatabase(nullptr) {}
virtual ~MockDecoderDatabase() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_CONST_METHOD0(Empty,
@@ -59,7 +60,7 @@ class MockDecoderDatabase : public DecoderDatabase {
MOCK_METHOD1(SetActiveCngDecoder,
int(uint8_t rtp_payload_type));
MOCK_METHOD0(GetActiveCngDecoder,
- AudioDecoder*());
+ ComfortNoiseDecoder*());
MOCK_CONST_METHOD1(CheckPayloadTypes,
int(const PacketList& packet_list));
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
index 6fb85854d77..7ceea70621f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
@@ -20,8 +20,9 @@ namespace webrtc {
class MockDelayManager : public DelayManager {
public:
MockDelayManager(size_t max_packets_in_buffer,
- DelayPeakDetector* peak_detector)
- : DelayManager(max_packets_in_buffer, peak_detector) {}
+ DelayPeakDetector* peak_detector,
+ const TickTimer* tick_timer)
+ : DelayManager(max_packets_in_buffer, peak_detector, tick_timer) {}
virtual ~MockDelayManager() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_CONST_METHOD0(iat_vector,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
index fa5cd7ed061..5564fba312c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
@@ -19,15 +19,16 @@ namespace webrtc {
class MockDelayPeakDetector : public DelayPeakDetector {
public:
+ MockDelayPeakDetector(const TickTimer* tick_timer)
+ : DelayPeakDetector(tick_timer) {}
virtual ~MockDelayPeakDetector() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD0(Reset, void());
MOCK_METHOD1(SetPacketAudioLength, void(int length_ms));
MOCK_METHOD0(peak_found, bool());
MOCK_CONST_METHOD0(MaxPeakHeight, int());
- MOCK_CONST_METHOD0(MaxPeakPeriod, int());
+ MOCK_CONST_METHOD0(MaxPeakPeriod, uint64_t());
MOCK_METHOD2(Update, bool(int inter_arrival_time, int target_level));
- MOCK_METHOD1(IncrementCounter, void(int inc_ms));
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
index 97e54d83a5e..6bb95901d8c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
@@ -19,8 +19,8 @@ namespace webrtc {
class MockPacketBuffer : public PacketBuffer {
public:
- MockPacketBuffer(size_t max_number_of_packets)
- : PacketBuffer(max_number_of_packets) {}
+ MockPacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer)
+ : PacketBuffer(max_number_of_packets, tick_timer) {}
virtual ~MockPacketBuffer() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD0(Flush,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
index c31dbdc1a3c..2d1ce724cab 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
@@ -10,21 +10,10 @@
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
+#include <memory>
#include <sstream>
-#include "webrtc/modules/audio_coding/neteq/accelerate.h"
-#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
-#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
-#include "webrtc/modules/audio_coding/neteq/expand.h"
#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
-#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
-#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
-#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
namespace webrtc {
@@ -37,41 +26,16 @@ std::string NetEq::Config::ToString() const {
<< ", max_packets_in_buffer=" << max_packets_in_buffer
<< ", background_noise_mode=" << background_noise_mode
<< ", playout_mode=" << playout_mode
- << ", enable_fast_accelerate=" << enable_fast_accelerate;
+ << ", enable_fast_accelerate="
+ << (enable_fast_accelerate ? " true": "false")
+ << ", enable_muted_state=" << (enable_muted_state ? " true": "false");
return ss.str();
}
// Creates all classes needed and inject them into a new NetEqImpl object.
// Return the new object.
NetEq* NetEq::Create(const NetEq::Config& config) {
- BufferLevelFilter* buffer_level_filter = new BufferLevelFilter;
- DecoderDatabase* decoder_database = new DecoderDatabase;
- DelayPeakDetector* delay_peak_detector = new DelayPeakDetector;
- DelayManager* delay_manager =
- new DelayManager(config.max_packets_in_buffer, delay_peak_detector);
- delay_manager->SetMaximumDelay(config.max_delay_ms);
- DtmfBuffer* dtmf_buffer = new DtmfBuffer(config.sample_rate_hz);
- DtmfToneGenerator* dtmf_tone_generator = new DtmfToneGenerator;
- PacketBuffer* packet_buffer = new PacketBuffer(config.max_packets_in_buffer);
- PayloadSplitter* payload_splitter = new PayloadSplitter;
- TimestampScaler* timestamp_scaler = new TimestampScaler(*decoder_database);
- AccelerateFactory* accelerate_factory = new AccelerateFactory;
- ExpandFactory* expand_factory = new ExpandFactory;
- PreemptiveExpandFactory* preemptive_expand_factory =
- new PreemptiveExpandFactory;
- return new NetEqImpl(config,
- buffer_level_filter,
- decoder_database,
- delay_manager,
- delay_peak_detector,
- dtmf_buffer,
- dtmf_tone_generator,
- packet_buffer,
- payload_splitter,
- timestamp_scaler,
- accelerate_factory,
- expand_factory,
- preemptive_expand_factory);
+ return new NetEqImpl(config, NetEqImpl::Dependencies(config));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
index ead9586f5ce..e92567eef5b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
@@ -51,6 +51,8 @@
'dependencies': [
'<@(neteq_dependencies)',
'<(webrtc_root)/common.gyp:webrtc_common',
+ 'builtin_audio_decoder_factory',
+ 'rent_a_codec',
],
'defines': [
'<@(neteq_defines)',
@@ -73,6 +75,8 @@
'buffer_level_filter.h',
'comfort_noise.cc',
'comfort_noise.h',
+ 'cross_correlation.cc',
+ 'cross_correlation.h',
'decision_logic.cc',
'decision_logic.h',
'decision_logic_fax.cc',
@@ -105,6 +109,8 @@
'statistics_calculator.h',
'normal.cc',
'normal.h',
+ 'packet.cc',
+ 'packet.h',
'packet_buffer.cc',
'packet_buffer.h',
'payload_splitter.cc',
@@ -119,6 +125,8 @@
'rtcp.h',
'sync_buffer.cc',
'sync_buffer.h',
+ 'tick_timer.cc',
+ 'tick_timer.h',
'timestamp_scaler.cc',
'timestamp_scaler.h',
'time_stretch.cc',
@@ -206,19 +214,6 @@
],
}, # neteq_unittest_tools
], # targets
- 'conditions': [
- ['OS=="android"', {
- 'targets': [
- {
- 'target_name': 'audio_decoder_unittests_apk_target',
- 'type': 'none',
- 'dependencies': [
- '<(apk_tests_path):audio_decoder_unittests_apk',
- ],
- },
- ],
- }],
- ],
}], # include_tests
], # conditions
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
index 50c24a3b73a..25fa1a7365c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
@@ -189,7 +189,9 @@ class NetEqExternalVsInternalDecoderTest : public NetEqExternalDecoderUnitTest,
void GetAndVerifyOutput() override {
// Get audio from internal decoder instance.
- EXPECT_EQ(NetEq::kOK, neteq_internal_->GetAudio(&output_internal_));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_internal_->GetAudio(&output_internal_, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(1u, output_internal_.num_channels_);
EXPECT_EQ(static_cast<size_t>(kOutputLengthMs * sample_rate_hz_ / 1000),
output_internal_.samples_per_channel_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
index db37e716d66..7f8661bae89 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
@@ -14,6 +14,7 @@
#include <memory.h> // memset
#include <algorithm>
+#include <vector>
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
@@ -21,6 +22,7 @@
#include "webrtc/base/trace_event.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
#include "webrtc/modules/audio_coding/neteq/accelerate.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
@@ -42,6 +44,7 @@
#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
#include "webrtc/modules/include/module_common_types.h"
@@ -52,33 +55,43 @@
namespace webrtc {
+NetEqImpl::Dependencies::Dependencies(const NetEq::Config& config)
+ : tick_timer(new TickTimer),
+ buffer_level_filter(new BufferLevelFilter),
+ decoder_database(new DecoderDatabase(CreateBuiltinAudioDecoderFactory())),
+ delay_peak_detector(new DelayPeakDetector(tick_timer.get())),
+ delay_manager(new DelayManager(config.max_packets_in_buffer,
+ delay_peak_detector.get(),
+ tick_timer.get())),
+ dtmf_buffer(new DtmfBuffer(config.sample_rate_hz)),
+ dtmf_tone_generator(new DtmfToneGenerator),
+ packet_buffer(
+ new PacketBuffer(config.max_packets_in_buffer, tick_timer.get())),
+ payload_splitter(new PayloadSplitter),
+ timestamp_scaler(new TimestampScaler(*decoder_database)),
+ accelerate_factory(new AccelerateFactory),
+ expand_factory(new ExpandFactory),
+ preemptive_expand_factory(new PreemptiveExpandFactory) {}
+
+NetEqImpl::Dependencies::~Dependencies() = default;
+
NetEqImpl::NetEqImpl(const NetEq::Config& config,
- BufferLevelFilter* buffer_level_filter,
- DecoderDatabase* decoder_database,
- DelayManager* delay_manager,
- DelayPeakDetector* delay_peak_detector,
- DtmfBuffer* dtmf_buffer,
- DtmfToneGenerator* dtmf_tone_generator,
- PacketBuffer* packet_buffer,
- PayloadSplitter* payload_splitter,
- TimestampScaler* timestamp_scaler,
- AccelerateFactory* accelerate_factory,
- ExpandFactory* expand_factory,
- PreemptiveExpandFactory* preemptive_expand_factory,
+ Dependencies&& deps,
bool create_components)
- : buffer_level_filter_(buffer_level_filter),
- decoder_database_(decoder_database),
- delay_manager_(delay_manager),
- delay_peak_detector_(delay_peak_detector),
- dtmf_buffer_(dtmf_buffer),
- dtmf_tone_generator_(dtmf_tone_generator),
- packet_buffer_(packet_buffer),
- payload_splitter_(payload_splitter),
- timestamp_scaler_(timestamp_scaler),
+ : tick_timer_(std::move(deps.tick_timer)),
+ buffer_level_filter_(std::move(deps.buffer_level_filter)),
+ decoder_database_(std::move(deps.decoder_database)),
+ delay_manager_(std::move(deps.delay_manager)),
+ delay_peak_detector_(std::move(deps.delay_peak_detector)),
+ dtmf_buffer_(std::move(deps.dtmf_buffer)),
+ dtmf_tone_generator_(std::move(deps.dtmf_tone_generator)),
+ packet_buffer_(std::move(deps.packet_buffer)),
+ payload_splitter_(std::move(deps.payload_splitter)),
+ timestamp_scaler_(std::move(deps.timestamp_scaler)),
vad_(new PostDecodeVad()),
- expand_factory_(expand_factory),
- accelerate_factory_(accelerate_factory),
- preemptive_expand_factory_(preemptive_expand_factory),
+ expand_factory_(std::move(deps.expand_factory)),
+ accelerate_factory_(std::move(deps.accelerate_factory)),
+ preemptive_expand_factory_(std::move(deps.preemptive_expand_factory)),
last_mode_(kModeNormal),
decoded_buffer_length_(kMaxFrameSize),
decoded_buffer_(new int16_t[decoded_buffer_length_]),
@@ -95,7 +108,8 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
background_noise_mode_(config.background_noise_mode),
playout_mode_(config.playout_mode),
enable_fast_accelerate_(config.enable_fast_accelerate),
- nack_enabled_(false) {
+ nack_enabled_(false),
+ enable_muted_state_(config.enable_muted_state) {
LOG(LS_INFO) << "NetEq config: " << config.ToString();
int fs = config.sample_rate_hz;
if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
@@ -103,6 +117,7 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
"Changing to 8000 Hz.";
fs = 8000;
}
+ delay_manager_->SetMaximumDelay(config.max_delay_ms);
fs_hz_ = fs;
fs_mult_ = fs / 8000;
last_output_sample_rate_hz_ = fs;
@@ -191,10 +206,10 @@ void SetAudioFrameActivityAndType(bool vad_enabled,
}
} // namespace
-int NetEqImpl::GetAudio(AudioFrame* audio_frame) {
+int NetEqImpl::GetAudio(AudioFrame* audio_frame, bool* muted) {
TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio");
rtc::CritScope lock(&crit_sect_);
- int error = GetAudioInternal(audio_frame);
+ int error = GetAudioInternal(audio_frame, muted);
RTC_DCHECK_EQ(
audio_frame->sample_rate_hz_,
rtc::checked_cast<int>(audio_frame->samples_per_channel_ * 100));
@@ -487,6 +502,11 @@ const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
return sync_buffer_.get();
}
+Operations NetEqImpl::last_operation_for_test() const {
+ rtc::CritScope lock(&crit_sect_);
+ return last_operation_;
+}
+
// Methods below this line are private.
int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
@@ -532,7 +552,8 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
packet->header.numCSRCs = 0;
packet->payload_length = payload.size();
packet->primary = true;
- packet->waiting_time = 0;
+ // Waiting time will be set upon inserting the packet in the buffer.
+ RTC_DCHECK(!packet->waiting_time);
packet->payload = new uint8_t[packet->payload_length];
packet->sync_packet = is_sync_packet;
if (!packet->payload) {
@@ -664,13 +685,15 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
}
}
- // Update bandwidth estimate, if the packet is not sync-packet.
- if (!packet_list.empty() && !packet_list.front()->sync_packet) {
+ // Update bandwidth estimate, if the packet is not sync-packet nor comfort
+ // noise.
+ if (!packet_list.empty() && !packet_list.front()->sync_packet &&
+ !decoder_database_->IsComfortNoise(main_header.payloadType)) {
// The list can be empty here if we got nothing but DTMF payloads.
AudioDecoder* decoder =
decoder_database_->GetDecoder(main_header.payloadType);
assert(decoder); // Should always get a valid object, since we have
- // already checked that the payload types are known.
+ // already checked that the payload types are known.
decoder->IncomingPacket(packet_list.front()->payload,
packet_list.front()->payload_length,
packet_list.front()->header.sequenceNumber,
@@ -728,14 +751,18 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
const RTPHeader* rtp_header = packet_buffer_->NextRtpHeader();
assert(rtp_header);
int payload_type = rtp_header->payloadType;
- AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type);
- assert(decoder); // Payloads are already checked to be valid.
+ size_t channels = 1;
+ if (!decoder_database_->IsComfortNoise(payload_type)) {
+ AudioDecoder* decoder = decoder_database_->GetDecoder(payload_type);
+ assert(decoder); // Payloads are already checked to be valid.
+ channels = decoder->Channels();
+ }
const DecoderDatabase::DecoderInfo* decoder_info =
decoder_database_->GetDecoderInfo(payload_type);
assert(decoder_info);
if (decoder_info->fs_hz != fs_hz_ ||
- decoder->Channels() != algorithm_buffer_->Channels()) {
- SetSampleRateAndChannels(decoder_info->fs_hz, decoder->Channels());
+ channels != algorithm_buffer_->Channels()) {
+ SetSampleRateAndChannels(decoder_info->fs_hz, channels);
}
if (nack_enabled_) {
RTC_DCHECK(nack_);
@@ -783,11 +810,32 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
return 0;
}
-int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame) {
+int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, bool* muted) {
PacketList packet_list;
DtmfEvent dtmf_event;
Operations operation;
bool play_dtmf;
+ *muted = false;
+ tick_timer_->Increment();
+ stats_.IncreaseCounter(output_size_samples_, fs_hz_);
+
+ // Check for muted state.
+ if (enable_muted_state_ && expand_->Muted() && packet_buffer_->Empty()) {
+ RTC_DCHECK_EQ(last_mode_, kModeExpand);
+ playout_timestamp_ += static_cast<uint32_t>(output_size_samples_);
+ audio_frame->sample_rate_hz_ = fs_hz_;
+ audio_frame->samples_per_channel_ = output_size_samples_;
+ audio_frame->timestamp_ =
+ first_packet_
+ ? 0
+ : timestamp_scaler_->ToExternal(playout_timestamp_) -
+ static_cast<uint32_t>(audio_frame->samples_per_channel_);
+ audio_frame->num_channels_ = sync_buffer_->Channels();
+ stats_.ExpandedNoiseSamples(output_size_samples_);
+ *muted = true;
+ return 0;
+ }
+
int return_value = GetDecision(&operation, &packet_list, &dtmf_event,
&play_dtmf);
if (return_value != 0) {
@@ -806,6 +854,11 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame) {
vad_->Update(decoded_buffer_.get(), static_cast<size_t>(length), speech_type,
sid_frame_available, fs_hz_);
+ if (sid_frame_available || speech_type == AudioDecoder::kComfortNoise) {
+ // Start a new stopwatch since we are decoding a new CNG packet.
+ generated_noise_stopwatch_ = tick_timer_->GetNewStopwatch();
+ }
+
algorithm_buffer_->Clear();
switch (operation) {
case kNormal: {
@@ -884,6 +937,7 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame) {
return kInvalidOperation;
}
} // End of switch.
+ last_operation_ = operation;
if (return_value < 0) {
return return_value;
}
@@ -978,6 +1032,12 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame) {
: timestamp_scaler_->ToExternal(playout_timestamp_) -
static_cast<uint32_t>(audio_frame->samples_per_channel_);
+ if (!(last_mode_ == kModeRfc3389Cng ||
+ last_mode_ == kModeCodecInternalCng ||
+ last_mode_ == kModeExpand)) {
+ generated_noise_stopwatch_.reset();
+ }
+
if (decode_return_value) return decode_return_value;
return return_value;
}
@@ -990,10 +1050,6 @@ int NetEqImpl::GetDecision(Operations* operation,
*play_dtmf = false;
*operation = kUndefined;
- // Increment time counters.
- packet_buffer_->IncrementWaitingTimes();
- stats_.IncreaseCounter(output_size_samples_, fs_hz_);
-
assert(sync_buffer_.get());
uint32_t end_timestamp = sync_buffer_->end_timestamp();
if (!new_codec_) {
@@ -1002,14 +1058,22 @@ int NetEqImpl::GetDecision(Operations* operation,
}
const RTPHeader* header = packet_buffer_->NextRtpHeader();
+ RTC_DCHECK(!generated_noise_stopwatch_ ||
+ generated_noise_stopwatch_->ElapsedTicks() >= 1);
+ uint64_t generated_noise_samples =
+ generated_noise_stopwatch_
+ ? (generated_noise_stopwatch_->ElapsedTicks() - 1) *
+ output_size_samples_ +
+ decision_logic_->noise_fast_forward()
+ : 0;
+
if (decision_logic_->CngRfc3389On() || last_mode_ == kModeRfc3389Cng) {
// Because of timestamp peculiarities, we have to "manually" disallow using
// a CNG packet with the same timestamp as the one that was last played.
// This can happen when using redundancy and will cause the timing to shift.
while (header && decoder_database_->IsComfortNoise(header->payloadType) &&
(end_timestamp >= header->timestamp ||
- end_timestamp + decision_logic_->generated_noise_samples() >
- header->timestamp)) {
+ end_timestamp + generated_noise_samples > header->timestamp)) {
// Don't use this packet, discard it.
if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) {
assert(false); // Must be ok by design.
@@ -1037,7 +1101,7 @@ int NetEqImpl::GetDecision(Operations* operation,
// Check if it is time to play a DTMF event.
if (dtmf_buffer_->GetEvent(
static_cast<uint32_t>(
- end_timestamp + decision_logic_->generated_noise_samples()),
+ end_timestamp + generated_noise_samples),
dtmf_event)) {
*play_dtmf = true;
}
@@ -1045,13 +1109,14 @@ int NetEqImpl::GetDecision(Operations* operation,
// Get instruction.
assert(sync_buffer_.get());
assert(expand_.get());
- *operation = decision_logic_->GetDecision(*sync_buffer_,
- *expand_,
- decoder_frame_length_,
- header,
- last_mode_,
- *play_dtmf,
- &reset_decoder_);
+ generated_noise_samples =
+ generated_noise_stopwatch_
+ ? generated_noise_stopwatch_->ElapsedTicks() * output_size_samples_ +
+ decision_logic_->noise_fast_forward()
+ : 0;
+ *operation = decision_logic_->GetDecision(
+ *sync_buffer_, *expand_, decoder_frame_length_, header, last_mode_,
+ *play_dtmf, generated_noise_samples, &reset_decoder_);
// Check if we already have enough samples in the |sync_buffer_|. If so,
// change decision to normal, unless the decision was merge, accelerate, or
@@ -1124,15 +1189,19 @@ int NetEqImpl::GetDecision(Operations* operation,
// TODO(hlundin): Write test for this.
// Update timestamp.
timestamp_ = end_timestamp;
- if (decision_logic_->generated_noise_samples() > 0 &&
- last_mode_ != kModeDtmf) {
+ const uint64_t generated_noise_samples =
+ generated_noise_stopwatch_
+ ? generated_noise_stopwatch_->ElapsedTicks() *
+ output_size_samples_ +
+ decision_logic_->noise_fast_forward()
+ : 0;
+ if (generated_noise_samples > 0 && last_mode_ != kModeDtmf) {
// Make a jump in timestamp due to the recently played comfort noise.
uint32_t timestamp_jump =
- static_cast<uint32_t>(decision_logic_->generated_noise_samples());
+ static_cast<uint32_t>(generated_noise_samples);
sync_buffer_->IncreaseEndTimestamp(timestamp_jump);
timestamp_ += timestamp_jump;
}
- decision_logic_->set_generated_noise_samples(0);
return 0;
}
case kAccelerate:
@@ -1215,9 +1284,6 @@ int NetEqImpl::GetDecision(Operations* operation,
// We are about to decode and use a non-CNG packet.
decision_logic_->SetCngOff();
}
- // Reset CNG timestamp as a new packet will be delivered.
- // (Also if this is a CNG packet, since playedOutTS is updated.)
- decision_logic_->set_generated_noise_samples(0);
extracted_samples = ExtractPackets(required_samples, packet_list);
if (extracted_samples < 0) {
@@ -1297,7 +1363,7 @@ int NetEqImpl::Decode(PacketList* packet_list, Operations* operation,
decoder->Reset();
// Reset comfort noise decoder.
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
if (cng_decoder)
cng_decoder->Reset();
@@ -1550,6 +1616,12 @@ int NetEqImpl::DoExpand(bool play_dtmf) {
if (!play_dtmf) {
dtmf_tone_generator_->Reset();
}
+
+ if (!generated_noise_stopwatch_) {
+ // Start a new stopwatch since we may be covering for a lost CNG packet.
+ generated_noise_stopwatch_ = tick_timer_->GetNewStopwatch();
+ }
+
return 0;
}
@@ -1920,8 +1992,7 @@ int NetEqImpl::ExtractPackets(size_t required_samples,
return -1;
}
stats_.PacketsDiscarded(discard_count);
- // Store waiting time in ms; packets->waiting_time is in "output blocks".
- stats_.StoreWaitingTime(packet->waiting_time * kOutputSizeMs);
+ stats_.StoreWaitingTime(packet->waiting_time->ElapsedMs());
assert(packet->payload_length > 0);
packet_list->push_back(packet); // Store packet in list.
@@ -1955,7 +2026,7 @@ int NetEqImpl::ExtractPackets(size_t required_samples,
stats_.SecondaryDecodedSamples(packet_duration);
}
}
- } else {
+ } else if (!decoder_database_->IsComfortNoise(packet->header.payloadType)) {
LOG(LS_WARNING) << "Unknown payload type "
<< static_cast<int>(packet->header.payloadType);
assert(false);
@@ -2023,7 +2094,7 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
mute_factor_array_[i] = 16384; // 1.0 in Q14.
}
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
if (cng_decoder)
cng_decoder->Reset();
@@ -2094,11 +2165,9 @@ NetEqImpl::OutputType NetEqImpl::LastOutputType() {
}
void NetEqImpl::CreateDecisionLogic() {
- decision_logic_.reset(DecisionLogic::Create(fs_hz_, output_size_samples_,
- playout_mode_,
- decoder_database_.get(),
- *packet_buffer_.get(),
- delay_manager_.get(),
- buffer_level_filter_.get()));
+ decision_logic_.reset(DecisionLogic::Create(
+ fs_hz_, output_size_samples_, playout_mode_, decoder_database_.get(),
+ *packet_buffer_.get(), delay_manager_.get(), buffer_level_filter_.get(),
+ tick_timer_.get()));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
index 75055a7b47f..cc5550411f2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
@@ -24,6 +24,7 @@
#include "webrtc/modules/audio_coding/neteq/random_vector.h"
#include "webrtc/modules/audio_coding/neteq/rtcp.h"
#include "webrtc/modules/audio_coding/neteq/statistics_calculator.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -65,21 +66,33 @@ class NetEqImpl : public webrtc::NetEq {
kVadPassive
};
- // Creates a new NetEqImpl object. The object will assume ownership of all
- // injected dependencies, and will delete them when done.
+ struct Dependencies {
+ // The constructor populates the Dependencies struct with the default
+ // implementations of the objects. They can all be replaced by the user
+ // before sending the struct to the NetEqImpl constructor. However, there
+ // are dependencies between some of the classes inside the struct, so
+ // swapping out one may make it necessary to re-create another one.
+ explicit Dependencies(const NetEq::Config& config);
+ ~Dependencies();
+
+ std::unique_ptr<TickTimer> tick_timer;
+ std::unique_ptr<BufferLevelFilter> buffer_level_filter;
+ std::unique_ptr<DecoderDatabase> decoder_database;
+ std::unique_ptr<DelayPeakDetector> delay_peak_detector;
+ std::unique_ptr<DelayManager> delay_manager;
+ std::unique_ptr<DtmfBuffer> dtmf_buffer;
+ std::unique_ptr<DtmfToneGenerator> dtmf_tone_generator;
+ std::unique_ptr<PacketBuffer> packet_buffer;
+ std::unique_ptr<PayloadSplitter> payload_splitter;
+ std::unique_ptr<TimestampScaler> timestamp_scaler;
+ std::unique_ptr<AccelerateFactory> accelerate_factory;
+ std::unique_ptr<ExpandFactory> expand_factory;
+ std::unique_ptr<PreemptiveExpandFactory> preemptive_expand_factory;
+ };
+
+ // Creates a new NetEqImpl object.
NetEqImpl(const NetEq::Config& config,
- BufferLevelFilter* buffer_level_filter,
- DecoderDatabase* decoder_database,
- DelayManager* delay_manager,
- DelayPeakDetector* delay_peak_detector,
- DtmfBuffer* dtmf_buffer,
- DtmfToneGenerator* dtmf_tone_generator,
- PacketBuffer* packet_buffer,
- PayloadSplitter* payload_splitter,
- TimestampScaler* timestamp_scaler,
- AccelerateFactory* accelerate_factory,
- ExpandFactory* expand_factory,
- PreemptiveExpandFactory* preemptive_expand_factory,
+ Dependencies&& deps,
bool create_components = true);
~NetEqImpl() override;
@@ -104,7 +117,7 @@ class NetEqImpl : public webrtc::NetEq {
int InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
uint32_t receive_timestamp) override;
- int GetAudio(AudioFrame* audio_frame) override;
+ int GetAudio(AudioFrame* audio_frame, bool* muted) override;
int RegisterPayloadType(NetEqDecoder codec,
const std::string& codec_name,
@@ -191,12 +204,15 @@ class NetEqImpl : public webrtc::NetEq {
// This accessor method is only intended for testing purposes.
const SyncBuffer* sync_buffer_for_test() const;
+ Operations last_operation_for_test() const;
protected:
static const int kOutputSizeMs = 10;
- static const size_t kMaxFrameSize = 2880; // 60 ms @ 48 kHz.
+ static const size_t kMaxFrameSize = 5760; // 120 ms @ 48 kHz.
// TODO(hlundin): Provide a better value for kSyncBufferSize.
- static const size_t kSyncBufferSize = 2 * kMaxFrameSize;
+ // Current value is kMaxFrameSize + 60 ms * 48 kHz, which is enough for
+ // calculating correlations of current frame against history.
+ static const size_t kSyncBufferSize = kMaxFrameSize + 60 * 48;
// Inserts a new packet into NetEq. This is used by the InsertPacket method
// above. Returns 0 on success, otherwise an error code.
@@ -209,7 +225,7 @@ class NetEqImpl : public webrtc::NetEq {
// Delivers 10 ms of audio data. The data is written to |audio_frame|.
// Returns 0 on success, otherwise an error code.
- int GetAudioInternal(AudioFrame* audio_frame)
+ int GetAudioInternal(AudioFrame* audio_frame, bool* muted)
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Provides a decision to the GetAudioInternal method. The decision what to
@@ -328,6 +344,7 @@ class NetEqImpl : public webrtc::NetEq {
virtual void CreateDecisionLogic() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
rtc::CriticalSection crit_sect_;
+ const std::unique_ptr<TickTimer> tick_timer_ GUARDED_BY(crit_sect_);
const std::unique_ptr<BufferLevelFilter> buffer_level_filter_
GUARDED_BY(crit_sect_);
const std::unique_ptr<DecoderDatabase> decoder_database_
@@ -369,6 +386,7 @@ class NetEqImpl : public webrtc::NetEq {
size_t output_size_samples_ GUARDED_BY(crit_sect_);
size_t decoder_frame_length_ GUARDED_BY(crit_sect_);
Modes last_mode_ GUARDED_BY(crit_sect_);
+ Operations last_operation_ GUARDED_BY(crit_sect_);
std::unique_ptr<int16_t[]> mute_factor_array_ GUARDED_BY(crit_sect_);
size_t decoded_buffer_length_ GUARDED_BY(crit_sect_);
std::unique_ptr<int16_t[]> decoded_buffer_ GUARDED_BY(crit_sect_);
@@ -387,8 +405,11 @@ class NetEqImpl : public webrtc::NetEq {
bool enable_fast_accelerate_ GUARDED_BY(crit_sect_);
std::unique_ptr<Nack> nack_ GUARDED_BY(crit_sect_);
bool nack_enabled_ GUARDED_BY(crit_sect_);
+ const bool enable_muted_state_ GUARDED_BY(crit_sect_);
AudioFrame::VADActivity last_vad_activity_ GUARDED_BY(crit_sect_) =
AudioFrame::kVadPassive;
+ std::unique_ptr<TickTimer::Stopwatch> generated_noise_stopwatch_
+ GUARDED_BY(crit_sect_);
private:
RTC_DISALLOW_COPY_AND_ASSIGN(NetEqImpl);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
index 561c0459bfb..43db87f4fa7 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
@@ -54,111 +56,82 @@ int DeletePacketsAndReturnOk(PacketList* packet_list) {
class NetEqImplTest : public ::testing::Test {
protected:
- NetEqImplTest()
- : neteq_(NULL),
- config_(),
- mock_buffer_level_filter_(NULL),
- buffer_level_filter_(NULL),
- use_mock_buffer_level_filter_(true),
- mock_decoder_database_(NULL),
- decoder_database_(NULL),
- use_mock_decoder_database_(true),
- mock_delay_peak_detector_(NULL),
- delay_peak_detector_(NULL),
- use_mock_delay_peak_detector_(true),
- mock_delay_manager_(NULL),
- delay_manager_(NULL),
- use_mock_delay_manager_(true),
- mock_dtmf_buffer_(NULL),
- dtmf_buffer_(NULL),
- use_mock_dtmf_buffer_(true),
- mock_dtmf_tone_generator_(NULL),
- dtmf_tone_generator_(NULL),
- use_mock_dtmf_tone_generator_(true),
- mock_packet_buffer_(NULL),
- packet_buffer_(NULL),
- use_mock_packet_buffer_(true),
- mock_payload_splitter_(NULL),
- payload_splitter_(NULL),
- use_mock_payload_splitter_(true),
- timestamp_scaler_(NULL) {
- config_.sample_rate_hz = 8000;
- }
+ NetEqImplTest() { config_.sample_rate_hz = 8000; }
void CreateInstance() {
+ NetEqImpl::Dependencies deps(config_);
+
+ // Get a local pointer to NetEq's TickTimer object.
+ tick_timer_ = deps.tick_timer.get();
+
if (use_mock_buffer_level_filter_) {
- mock_buffer_level_filter_ = new MockBufferLevelFilter;
- buffer_level_filter_ = mock_buffer_level_filter_;
- } else {
- buffer_level_filter_ = new BufferLevelFilter;
+ std::unique_ptr<MockBufferLevelFilter> mock(new MockBufferLevelFilter);
+ mock_buffer_level_filter_ = mock.get();
+ deps.buffer_level_filter = std::move(mock);
}
+ buffer_level_filter_ = deps.buffer_level_filter.get();
+
if (use_mock_decoder_database_) {
- mock_decoder_database_ = new MockDecoderDatabase;
+ std::unique_ptr<MockDecoderDatabase> mock(new MockDecoderDatabase);
+ mock_decoder_database_ = mock.get();
EXPECT_CALL(*mock_decoder_database_, GetActiveCngDecoder())
.WillOnce(ReturnNull());
- decoder_database_ = mock_decoder_database_;
- } else {
- decoder_database_ = new DecoderDatabase;
+ deps.decoder_database = std::move(mock);
}
+ decoder_database_ = deps.decoder_database.get();
+
if (use_mock_delay_peak_detector_) {
- mock_delay_peak_detector_ = new MockDelayPeakDetector;
+ std::unique_ptr<MockDelayPeakDetector> mock(
+ new MockDelayPeakDetector(tick_timer_));
+ mock_delay_peak_detector_ = mock.get();
EXPECT_CALL(*mock_delay_peak_detector_, Reset()).Times(1);
- delay_peak_detector_ = mock_delay_peak_detector_;
- } else {
- delay_peak_detector_ = new DelayPeakDetector;
+ deps.delay_peak_detector = std::move(mock);
}
+ delay_peak_detector_ = deps.delay_peak_detector.get();
+
if (use_mock_delay_manager_) {
- mock_delay_manager_ = new MockDelayManager(config_.max_packets_in_buffer,
- delay_peak_detector_);
+ std::unique_ptr<MockDelayManager> mock(new MockDelayManager(
+ config_.max_packets_in_buffer, delay_peak_detector_, tick_timer_));
+ mock_delay_manager_ = mock.get();
EXPECT_CALL(*mock_delay_manager_, set_streaming_mode(false)).Times(1);
- delay_manager_ = mock_delay_manager_;
- } else {
- delay_manager_ =
- new DelayManager(config_.max_packets_in_buffer, delay_peak_detector_);
+ deps.delay_manager = std::move(mock);
}
+ delay_manager_ = deps.delay_manager.get();
+
if (use_mock_dtmf_buffer_) {
- mock_dtmf_buffer_ = new MockDtmfBuffer(config_.sample_rate_hz);
- dtmf_buffer_ = mock_dtmf_buffer_;
- } else {
- dtmf_buffer_ = new DtmfBuffer(config_.sample_rate_hz);
+ std::unique_ptr<MockDtmfBuffer> mock(
+ new MockDtmfBuffer(config_.sample_rate_hz));
+ mock_dtmf_buffer_ = mock.get();
+ deps.dtmf_buffer = std::move(mock);
}
+ dtmf_buffer_ = deps.dtmf_buffer.get();
+
if (use_mock_dtmf_tone_generator_) {
- mock_dtmf_tone_generator_ = new MockDtmfToneGenerator;
- dtmf_tone_generator_ = mock_dtmf_tone_generator_;
- } else {
- dtmf_tone_generator_ = new DtmfToneGenerator;
+ std::unique_ptr<MockDtmfToneGenerator> mock(new MockDtmfToneGenerator);
+ mock_dtmf_tone_generator_ = mock.get();
+ deps.dtmf_tone_generator = std::move(mock);
}
+ dtmf_tone_generator_ = deps.dtmf_tone_generator.get();
+
if (use_mock_packet_buffer_) {
- mock_packet_buffer_ = new MockPacketBuffer(config_.max_packets_in_buffer);
- packet_buffer_ = mock_packet_buffer_;
- } else {
- packet_buffer_ = new PacketBuffer(config_.max_packets_in_buffer);
+ std::unique_ptr<MockPacketBuffer> mock(
+ new MockPacketBuffer(config_.max_packets_in_buffer, tick_timer_));
+ mock_packet_buffer_ = mock.get();
+ deps.packet_buffer = std::move(mock);
}
+ packet_buffer_ = deps.packet_buffer.get();
+
if (use_mock_payload_splitter_) {
- mock_payload_splitter_ = new MockPayloadSplitter;
- payload_splitter_ = mock_payload_splitter_;
- } else {
- payload_splitter_ = new PayloadSplitter;
+ std::unique_ptr<MockPayloadSplitter> mock(new MockPayloadSplitter);
+ mock_payload_splitter_ = mock.get();
+ deps.payload_splitter = std::move(mock);
}
- timestamp_scaler_ = new TimestampScaler(*decoder_database_);
- AccelerateFactory* accelerate_factory = new AccelerateFactory;
- ExpandFactory* expand_factory = new ExpandFactory;
- PreemptiveExpandFactory* preemptive_expand_factory =
- new PreemptiveExpandFactory;
-
- neteq_ = new NetEqImpl(config_,
- buffer_level_filter_,
- decoder_database_,
- delay_manager_,
- delay_peak_detector_,
- dtmf_buffer_,
- dtmf_tone_generator_,
- packet_buffer_,
- payload_splitter_,
- timestamp_scaler_,
- accelerate_factory,
- expand_factory,
- preemptive_expand_factory);
+ payload_splitter_ = deps.payload_splitter.get();
+
+ deps.timestamp_scaler = std::unique_ptr<TimestampScaler>(
+ new TimestampScaler(*deps.decoder_database.get()));
+
+ neteq_.reset(new NetEqImpl(config_, std::move(deps)));
ASSERT_TRUE(neteq_ != NULL);
}
@@ -196,36 +169,35 @@ class NetEqImplTest : public ::testing::Test {
if (use_mock_packet_buffer_) {
EXPECT_CALL(*mock_packet_buffer_, Die()).Times(1);
}
- delete neteq_;
}
- NetEqImpl* neteq_;
+ std::unique_ptr<NetEqImpl> neteq_;
NetEq::Config config_;
- MockBufferLevelFilter* mock_buffer_level_filter_;
- BufferLevelFilter* buffer_level_filter_;
- bool use_mock_buffer_level_filter_;
- MockDecoderDatabase* mock_decoder_database_;
- DecoderDatabase* decoder_database_;
- bool use_mock_decoder_database_;
- MockDelayPeakDetector* mock_delay_peak_detector_;
- DelayPeakDetector* delay_peak_detector_;
- bool use_mock_delay_peak_detector_;
- MockDelayManager* mock_delay_manager_;
- DelayManager* delay_manager_;
- bool use_mock_delay_manager_;
- MockDtmfBuffer* mock_dtmf_buffer_;
- DtmfBuffer* dtmf_buffer_;
- bool use_mock_dtmf_buffer_;
- MockDtmfToneGenerator* mock_dtmf_tone_generator_;
- DtmfToneGenerator* dtmf_tone_generator_;
- bool use_mock_dtmf_tone_generator_;
- MockPacketBuffer* mock_packet_buffer_;
- PacketBuffer* packet_buffer_;
- bool use_mock_packet_buffer_;
- MockPayloadSplitter* mock_payload_splitter_;
- PayloadSplitter* payload_splitter_;
- bool use_mock_payload_splitter_;
- TimestampScaler* timestamp_scaler_;
+ TickTimer* tick_timer_ = nullptr;
+ MockBufferLevelFilter* mock_buffer_level_filter_ = nullptr;
+ BufferLevelFilter* buffer_level_filter_ = nullptr;
+ bool use_mock_buffer_level_filter_ = true;
+ MockDecoderDatabase* mock_decoder_database_ = nullptr;
+ DecoderDatabase* decoder_database_ = nullptr;
+ bool use_mock_decoder_database_ = true;
+ MockDelayPeakDetector* mock_delay_peak_detector_ = nullptr;
+ DelayPeakDetector* delay_peak_detector_ = nullptr;
+ bool use_mock_delay_peak_detector_ = true;
+ MockDelayManager* mock_delay_manager_ = nullptr;
+ DelayManager* delay_manager_ = nullptr;
+ bool use_mock_delay_manager_ = true;
+ MockDtmfBuffer* mock_dtmf_buffer_ = nullptr;
+ DtmfBuffer* dtmf_buffer_ = nullptr;
+ bool use_mock_dtmf_buffer_ = true;
+ MockDtmfToneGenerator* mock_dtmf_tone_generator_ = nullptr;
+ DtmfToneGenerator* dtmf_tone_generator_ = nullptr;
+ bool use_mock_dtmf_tone_generator_ = true;
+ MockPacketBuffer* mock_packet_buffer_ = nullptr;
+ PacketBuffer* packet_buffer_ = nullptr;
+ bool use_mock_packet_buffer_ = true;
+ MockPayloadSplitter* mock_payload_splitter_ = nullptr;
+ PayloadSplitter* payload_splitter_ = nullptr;
+ bool use_mock_payload_splitter_ = true;
};
@@ -301,8 +273,8 @@ TEST_F(NetEqImplTest, InsertPacket) {
.WillRepeatedly(Return(&mock_decoder));
EXPECT_CALL(*mock_decoder_database_, IsComfortNoise(kPayloadType))
.WillRepeatedly(Return(false)); // This is not CNG.
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderPCMu;
+ DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderPCMu, "", 8000,
+ nullptr);
EXPECT_CALL(*mock_decoder_database_, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(&info));
@@ -356,6 +328,9 @@ TEST_F(NetEqImplTest, InsertPacket) {
}
// Expectations for payload splitter.
+ EXPECT_CALL(*mock_payload_splitter_, SplitFec(_, _))
+ .Times(2)
+ .WillRepeatedly(Return(PayloadSplitter::kOK));
EXPECT_CALL(*mock_payload_splitter_, SplitAudio(_, _))
.Times(2)
.WillRepeatedly(Return(PayloadSplitter::kOK));
@@ -466,7 +441,9 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -521,6 +498,8 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
EXPECT_CALL(mock_decoder, Channels()).WillRepeatedly(Return(1));
EXPECT_CALL(mock_decoder, IncomingPacket(_, kPayloadLengthBytes, _, _, _))
.WillRepeatedly(Return(0));
+ EXPECT_CALL(mock_decoder, PacketDuration(_, kPayloadLengthBytes))
+ .WillRepeatedly(Return(kPayloadLengthSamples));
int16_t dummy_output[kPayloadLengthSamples] = {0};
// The below expectation will make the mock decoder write
// |kPayloadLengthSamples| zeros to the output array, and mark it as speech.
@@ -541,7 +520,8 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -569,7 +549,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) {
Return(kPayloadLengthSamples)));
// Pull audio once.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -609,7 +589,8 @@ TEST_F(NetEqImplTest, FirstPacketUnknown) {
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_LE(output.samples_per_channel_, kMaxOutputSize);
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
@@ -630,7 +611,7 @@ TEST_F(NetEqImplTest, FirstPacketUnknown) {
// Pull audio repeatedly and make sure we get normal output, that is not PLC.
for (size_t i = 0; i < 3; ++i) {
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_LE(output.samples_per_channel_, kMaxOutputSize);
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
@@ -734,7 +715,8 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
50 * kSampleRateKhz, 10 * kSampleRateKhz
};
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
rtc::Optional<uint32_t> last_timestamp = neteq_->GetPlayoutTimestamp();
ASSERT_TRUE(last_timestamp);
@@ -756,7 +738,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(expected_type[i - 1], output.speech_type_);
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
SCOPED_TRACE("");
verify_timestamp(neteq_->GetPlayoutTimestamp(), i);
}
@@ -772,7 +754,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(expected_type[i - 1], output.speech_type_);
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
SCOPED_TRACE("");
verify_timestamp(neteq_->GetPlayoutTimestamp(), i);
}
@@ -786,7 +768,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) {
TEST_F(NetEqImplTest, UnsupportedDecoder) {
UseNoMocks();
CreateInstance();
- static const size_t kNetEqMaxFrameSize = 2880; // 60 ms @ 48 kHz.
+ static const size_t kNetEqMaxFrameSize = 5760; // 120 ms @ 48 kHz.
static const size_t kChannels = 2;
const uint8_t kPayloadType = 17; // Just an arbitrary number.
@@ -796,7 +778,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
const size_t kPayloadLengthSamples =
static_cast<size_t>(10 * kSampleRateHz / 1000); // 10 ms.
const size_t kPayloadLengthBytes = 1;
- uint8_t payload[kPayloadLengthBytes]= {0};
+ uint8_t payload[kPayloadLengthBytes] = {0};
int16_t dummy_output[kPayloadLengthSamples * kChannels] = {0};
WebRtcRTPHeader rtp_header;
rtp_header.header.payloadType = kPayloadType;
@@ -806,11 +788,15 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
class MockAudioDecoder : public AudioDecoder {
public:
- void Reset() override {}
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
+ void Reset() /* override */ {}
MOCK_CONST_METHOD2(PacketDuration, int(const uint8_t*, size_t));
MOCK_METHOD5(DecodeInternal, int(const uint8_t*, size_t, int, int16_t*,
SpeechType*));
- size_t Channels() const override { return kChannels; }
+ size_t Channels() const /* override */ { return kChannels; }
} decoder_;
const uint8_t kFirstPayloadValue = 1;
@@ -860,9 +846,10 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
neteq_->InsertPacket(rtp_header, payload, kReceiveTime));
AudioFrame output;
+ bool muted;
// First call to GetAudio will try to decode the "faulty" packet.
// Expect kFail return value...
- EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output, &muted));
// ... and kOtherDecoderError error code.
EXPECT_EQ(NetEq::kOtherDecoderError, neteq_->LastError());
// Output size and number of channels should be correct.
@@ -872,7 +859,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) {
// Second call to GetAudio will decode the packet that is ok. No errors are
// expected.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kExpectedOutputSize, output.samples_per_channel_ * kChannels);
EXPECT_EQ(kChannels, output.num_channels_);
}
@@ -965,7 +952,8 @@ TEST_F(NetEqImplTest, DecodedPayloadTooShort) {
// Pull audio once.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -1057,13 +1045,14 @@ TEST_F(NetEqImplTest, DecodingError) {
// Pull audio.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
// Pull audio again. Decoder fails.
- EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
EXPECT_EQ(kDecoderErrorCode, neteq_->LastDecoderError());
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
@@ -1072,13 +1061,13 @@ TEST_F(NetEqImplTest, DecodingError) {
// returned.
// Pull audio again, should continue an expansion.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kPLC, output.speech_type_);
// Pull audio again, should behave normal.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -1166,13 +1155,14 @@ TEST_F(NetEqImplTest, DecodingErrorDuringInternalCng) {
// Pull audio.
const size_t kMaxOutputSize = static_cast<size_t>(10 * kSampleRateHz / 1000);
AudioFrame output;
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kCNG, output.speech_type_);
// Pull audio again. Decoder fails.
- EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
EXPECT_EQ(kDecoderErrorCode, neteq_->LastDecoderError());
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
@@ -1181,7 +1171,7 @@ TEST_F(NetEqImplTest, DecodingErrorDuringInternalCng) {
// returned.
// Pull audio again, should resume codec CNG.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output));
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
EXPECT_EQ(kMaxOutputSize, output.samples_per_channel_);
EXPECT_EQ(1u, output.num_channels_);
EXPECT_EQ(AudioFrame::kCNG, output.speech_type_);
@@ -1198,4 +1188,235 @@ TEST_F(NetEqImplTest, InitialLastOutputSampleRate) {
EXPECT_EQ(48000, neteq_->last_output_sample_rate_hz());
}
+TEST_F(NetEqImplTest, TickTimerIncrement) {
+ UseNoMocks();
+ CreateInstance();
+ ASSERT_TRUE(tick_timer_);
+ EXPECT_EQ(0u, tick_timer_->ticks());
+ AudioFrame output;
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted));
+ EXPECT_EQ(1u, tick_timer_->ticks());
+}
+
+class Decoder120ms : public AudioDecoder {
+ public:
+ Decoder120ms(SpeechType speech_type)
+ : next_value_(1),
+ speech_type_(speech_type) {}
+
+ int DecodeInternal(const uint8_t* encoded,
+ size_t encoded_len,
+ int sample_rate_hz,
+ int16_t* decoded,
+ SpeechType* speech_type) override {
+ size_t decoded_len =
+ rtc::CheckedDivExact(sample_rate_hz, 1000) * 120 * Channels();
+ for (size_t i = 0; i < decoded_len; ++i) {
+ decoded[i] = next_value_++;
+ }
+ *speech_type = speech_type_;
+ return decoded_len;
+ }
+
+ void Reset() override { next_value_ = 1; }
+ size_t Channels() const override { return 2; }
+
+ private:
+ int16_t next_value_;
+ SpeechType speech_type_;
+};
+
+class NetEqImplTest120ms : public NetEqImplTest {
+ protected:
+ NetEqImplTest120ms() : NetEqImplTest() {}
+ virtual ~NetEqImplTest120ms() {}
+
+ void CreateInstanceNoMocks() {
+ UseNoMocks();
+ CreateInstance();
+ }
+
+ void CreateInstanceWithDelayManagerMock() {
+ UseNoMocks();
+ use_mock_delay_manager_ = true;
+ CreateInstance();
+ }
+
+ uint32_t timestamp_diff_between_packets() const {
+ return rtc::CheckedDivExact(kSamplingFreq_, 1000u) * 120;
+ }
+
+ uint32_t first_timestamp() const { return 10u; }
+
+ void GetFirstPacket() {
+ bool muted;
+ for (int i = 0; i < 12; i++) {
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_FALSE(muted);
+ }
+ }
+
+ void InsertPacket(uint32_t timestamp) {
+ WebRtcRTPHeader rtp_header;
+ rtp_header.header.payloadType = kPayloadType;
+ rtp_header.header.sequenceNumber = sequence_number_;
+ rtp_header.header.timestamp = timestamp;
+ rtp_header.header.ssrc = 15;
+ const size_t kPayloadLengthBytes = 1; // This can be arbitrary.
+ uint8_t payload[kPayloadLengthBytes] = {0};
+ EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload, 10));
+ sequence_number_++;
+ }
+
+ void Register120msCodec(AudioDecoder::SpeechType speech_type) {
+ decoder_.reset(new Decoder120ms(speech_type));
+ ASSERT_EQ(2u, decoder_->Channels());
+ EXPECT_EQ(NetEq::kOK, neteq_->RegisterExternalDecoder(
+ decoder_.get(), NetEqDecoder::kDecoderOpus_2ch,
+ "120ms codec", kPayloadType, kSamplingFreq_));
+ }
+
+ std::unique_ptr<Decoder120ms> decoder_;
+ AudioFrame output_;
+ const uint32_t kPayloadType = 17;
+ const uint32_t kSamplingFreq_ = 48000;
+ uint16_t sequence_number_ = 1;
+};
+
+TEST_F(NetEqImplTest120ms, AudioRepetition) {
+ config_.playout_mode = kPlayoutFax;
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kAudioRepetition, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, AlternativePlc) {
+ config_.playout_mode = kPlayoutOff;
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kAlternativePlc, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, CodecInternalCng) {
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kComfortNoise);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kCodecInternalCng, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, Normal) {
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ EXPECT_EQ(kNormal, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, Merge) {
+ CreateInstanceWithDelayManagerMock();
+
+ Register120msCodec(AudioDecoder::kSpeech);
+ InsertPacket(first_timestamp());
+
+ GetFirstPacket();
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+
+ InsertPacket(first_timestamp() + 2 * timestamp_diff_between_packets());
+
+ // Delay manager reports a target level which should cause a Merge.
+ EXPECT_CALL(*mock_delay_manager_, TargetLevel()).WillOnce(Return(-10));
+
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kMerge, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, Expand) {
+ CreateInstanceNoMocks();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kExpand, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, FastAccelerate) {
+ CreateInstanceWithDelayManagerMock();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+ InsertPacket(first_timestamp() + timestamp_diff_between_packets());
+
+ // Delay manager report buffer limit which should cause a FastAccelerate.
+ EXPECT_CALL(*mock_delay_manager_, BufferLimits(_, _))
+ .Times(1)
+ .WillOnce(DoAll(SetArgPointee<0>(0), SetArgPointee<1>(0)));
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kFastAccelerate, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, PreemptiveExpand) {
+ CreateInstanceWithDelayManagerMock();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ InsertPacket(first_timestamp() + timestamp_diff_between_packets());
+
+ // Delay manager report buffer limit which should cause a PreemptiveExpand.
+ EXPECT_CALL(*mock_delay_manager_, BufferLimits(_, _))
+ .Times(1)
+ .WillOnce(DoAll(SetArgPointee<0>(100), SetArgPointee<1>(100)));
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kPreemptiveExpand, neteq_->last_operation_for_test());
+}
+
+TEST_F(NetEqImplTest120ms, Accelerate) {
+ CreateInstanceWithDelayManagerMock();
+ Register120msCodec(AudioDecoder::kSpeech);
+
+ InsertPacket(first_timestamp());
+ GetFirstPacket();
+
+ InsertPacket(first_timestamp() + timestamp_diff_between_packets());
+
+ // Delay manager report buffer limit which should cause a Accelerate.
+ EXPECT_CALL(*mock_delay_manager_, BufferLimits(_, _))
+ .Times(1)
+ .WillOnce(DoAll(SetArgPointee<0>(1), SetArgPointee<1>(2)));
+
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_, &muted));
+ EXPECT_EQ(kAccelerate, neteq_->last_operation_for_test());
+}
+
}// namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
index 770ebd57835..1a77abcd505 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_network_stats_unittest.cc
@@ -24,31 +24,36 @@ using ::testing::Return;
class MockAudioDecoder final : public AudioDecoder {
public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
static const int kPacketDuration = 960; // 48 kHz * 20 ms
explicit MockAudioDecoder(size_t num_channels)
: num_channels_(num_channels), fec_enabled_(false) {
}
- ~MockAudioDecoder() override { Die(); }
+ ~MockAudioDecoder() /* override */ { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD0(Reset, void());
int PacketDuration(const uint8_t* encoded,
- size_t encoded_len) const override {
+ size_t encoded_len) const /* override */ {
return kPacketDuration;
}
int PacketDurationRedundant(const uint8_t* encoded,
- size_t encoded_len) const override {
+ size_t encoded_len) const /* override */ {
return kPacketDuration;
}
- bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const override {
+ bool PacketHasFec(
+ const uint8_t* encoded, size_t encoded_len) const /* override */ {
return fec_enabled_;
}
- size_t Channels() const override { return num_channels_; }
+ size_t Channels() const /* override */ { return num_channels_; }
void set_fec_enabled(bool enable_fec) { fec_enabled_ = enable_fec; }
@@ -60,7 +65,7 @@ class MockAudioDecoder final : public AudioDecoder {
size_t encoded_len,
int /*sample_rate_hz*/,
int16_t* decoded,
- SpeechType* speech_type) override {
+ SpeechType* speech_type) /* override */ {
*speech_type = kSpeech;
memset(decoded, 0, sizeof(int16_t) * kPacketDuration * Channels());
return kPacketDuration * Channels();
@@ -70,7 +75,7 @@ class MockAudioDecoder final : public AudioDecoder {
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
- SpeechType* speech_type) override {
+ SpeechType* speech_type) /* override */ {
return DecodeInternal(encoded, encoded_len, sample_rate_hz, decoded,
speech_type);
}
@@ -294,7 +299,3 @@ TEST(NetEqNetworkStatsTest, NoiseExpansionTest) {
} // namespace test
} // namespace webrtc
-
-
-
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
index 4ee17d2a446..e1a9922b0b4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
@@ -212,11 +212,14 @@ class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
} while (Lost()); // If lost, immediately read the next packet.
}
// Get audio from mono instance.
- EXPECT_EQ(NetEq::kOK, neteq_mono_->GetAudio(&output_));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_mono_->GetAudio(&output_, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(1u, output_.num_channels_);
EXPECT_EQ(output_size_samples_, output_.samples_per_channel_);
// Get audio from multi-channel instance.
- ASSERT_EQ(NetEq::kOK, neteq_->GetAudio(&output_multi_channel_));
+ ASSERT_EQ(NetEq::kOK, neteq_->GetAudio(&output_multi_channel_, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(num_channels_, output_multi_channel_.num_channels_);
EXPECT_EQ(output_size_samples_,
output_multi_channel_.samples_per_channel_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
index f02d3deee9b..bb316e8a81d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
@@ -14,9 +14,12 @@
'target_name': 'rtc_event_log_source',
'type': 'static_library',
'dependencies': [
- '<(webrtc_root)/webrtc.gyp:rtc_event_log',
+ '<(webrtc_root)/webrtc.gyp:rtc_event_log_parser',
'<(webrtc_root)/webrtc.gyp:rtc_event_log_proto',
],
+ 'export_dependent_settings': [
+ '<(webrtc_root)/webrtc.gyp:rtc_event_log_parser',
+ ],
'sources': [
'tools/rtc_event_log_source.h',
'tools/rtc_event_log_source.cc',
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
index b6efe7d7d6b..cf8e5b474c2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
@@ -8,10 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * This file includes unit tests for NetEQ.
- */
-
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include <math.h>
@@ -26,6 +22,8 @@
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/sha1digest.h"
+#include "webrtc/base/stringencode.h"
#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
@@ -45,6 +43,23 @@ DEFINE_bool(gen_ref, false, "Generate reference files.");
namespace {
+const std::string& PlatformChecksum(const std::string& checksum_general,
+ const std::string& checksum_android,
+ const std::string& checksum_win_32,
+ const std::string& checksum_win_64) {
+#ifdef WEBRTC_ANDROID
+ return checksum_android;
+#elif WEBRTC_WIN
+ #ifdef WEBRTC_ARCH_64_BITS
+ return checksum_win_64;
+ #else
+ return checksum_win_32;
+ #endif // WEBRTC_ARCH_64_BITS
+#else
+ return checksum_general;
+#endif // WEBRTC_WIN
+}
+
bool IsAllZero(const int16_t* buf, size_t buf_length) {
bool all_zero = true;
for (size_t n = 0; n < buf_length && all_zero; ++n)
@@ -89,186 +104,141 @@ void Convert(const webrtc::RtcpStatistics& stats_raw,
stats->set_jitter(stats_raw.jitter);
}
-void WriteMessage(FILE* file, const std::string& message) {
+void AddMessage(FILE* file, rtc::MessageDigest* digest,
+ const std::string& message) {
int32_t size = message.length();
- ASSERT_EQ(1u, fwrite(&size, sizeof(size), 1, file));
- if (size <= 0)
- return;
- ASSERT_EQ(static_cast<size_t>(size),
- fwrite(message.data(), sizeof(char), size, file));
+ if (file)
+ ASSERT_EQ(1u, fwrite(&size, sizeof(size), 1, file));
+ digest->Update(&size, sizeof(size));
+
+ if (file)
+ ASSERT_EQ(static_cast<size_t>(size),
+ fwrite(message.data(), sizeof(char), size, file));
+ digest->Update(message.data(), sizeof(char) * size);
}
-void ReadMessage(FILE* file, std::string* message) {
- int32_t size;
- ASSERT_EQ(1u, fread(&size, sizeof(size), 1, file));
- if (size <= 0)
- return;
- std::unique_ptr<char[]> buffer(new char[size]);
- ASSERT_EQ(static_cast<size_t>(size),
- fread(buffer.get(), sizeof(char), size, file));
- message->assign(buffer.get(), size);
-}
#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
+void LoadDecoders(webrtc::NetEq* neteq) {
+ // Load PCMu.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderPCMu,
+ "pcmu", 0));
+ // Load PCMa.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderPCMa,
+ "pcma", 8));
+#ifdef WEBRTC_CODEC_ILBC
+ // Load iLBC.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderILBC,
+ "ilbc", 102));
+#endif
+#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
+ // Load iSAC.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderISAC,
+ "isac", 103));
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+ // Load iSAC SWB.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderISACswb,
+ "isac-swb", 104));
+#endif
+#ifdef WEBRTC_CODEC_OPUS
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderOpus,
+ "opus", 111));
+#endif
+ // Load PCM16B nb.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderPCM16B,
+ "pcm16-nb", 93));
+ // Load PCM16B wb.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(
+ webrtc::NetEqDecoder::kDecoderPCM16Bwb, "pcm16-wb", 94));
+ // Load PCM16B swb32.
+ ASSERT_EQ(
+ 0, neteq->RegisterPayloadType(
+ webrtc::NetEqDecoder::kDecoderPCM16Bswb32kHz, "pcm16-swb32", 95));
+ // Load CNG 8 kHz.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderCNGnb,
+ "cng-nb", 13));
+ // Load CNG 16 kHz.
+ ASSERT_EQ(0, neteq->RegisterPayloadType(webrtc::NetEqDecoder::kDecoderCNGwb,
+ "cng-wb", 98));
+}
} // namespace
namespace webrtc {
-class RefFiles {
+class ResultSink {
public:
- RefFiles(const std::string& input_file, const std::string& output_file);
- ~RefFiles();
- template<class T> void ProcessReference(const T& test_results);
- template<typename T, size_t n> void ProcessReference(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void WriteToFile(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void ReadFromFileAndCompare(
+ explicit ResultSink(const std::string& output_file);
+ ~ResultSink();
+
+ template<typename T, size_t n> void AddResult(
const T (&test_results)[n],
size_t length);
- void WriteToFile(const NetEqNetworkStatistics& stats);
- void ReadFromFileAndCompare(const NetEqNetworkStatistics& stats);
- void WriteToFile(const RtcpStatistics& stats);
- void ReadFromFileAndCompare(const RtcpStatistics& stats);
- FILE* input_fp_;
+ void AddResult(const NetEqNetworkStatistics& stats);
+ void AddResult(const RtcpStatistics& stats);
+
+ void VerifyChecksum(const std::string& ref_check_sum);
+
+ private:
FILE* output_fp_;
+ std::unique_ptr<rtc::MessageDigest> digest_;
};
-RefFiles::RefFiles(const std::string &input_file,
- const std::string &output_file)
- : input_fp_(NULL),
- output_fp_(NULL) {
- if (!input_file.empty()) {
- input_fp_ = fopen(input_file.c_str(), "rb");
- EXPECT_TRUE(input_fp_ != NULL);
- }
+ResultSink::ResultSink(const std::string &output_file)
+ : output_fp_(nullptr),
+ digest_(new rtc::Sha1Digest()) {
if (!output_file.empty()) {
output_fp_ = fopen(output_file.c_str(), "wb");
EXPECT_TRUE(output_fp_ != NULL);
}
}
-RefFiles::~RefFiles() {
- if (input_fp_) {
- EXPECT_EQ(EOF, fgetc(input_fp_)); // Make sure that we reached the end.
- fclose(input_fp_);
- }
- if (output_fp_) fclose(output_fp_);
-}
-
-template<class T>
-void RefFiles::ProcessReference(const T& test_results) {
- WriteToFile(test_results);
- ReadFromFileAndCompare(test_results);
+ResultSink::~ResultSink() {
+ if (output_fp_)
+ fclose(output_fp_);
}
template<typename T, size_t n>
-void RefFiles::ProcessReference(const T (&test_results)[n], size_t length) {
- WriteToFile(test_results, length);
- ReadFromFileAndCompare(test_results, length);
-}
-
-template<typename T, size_t n>
-void RefFiles::WriteToFile(const T (&test_results)[n], size_t length) {
+void ResultSink::AddResult(const T (&test_results)[n], size_t length) {
if (output_fp_) {
ASSERT_EQ(length, fwrite(&test_results, sizeof(T), length, output_fp_));
}
+ digest_->Update(&test_results, sizeof(T) * length);
}
-template<typename T, size_t n>
-void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
- size_t length) {
- if (input_fp_) {
- // Read from ref file.
- T* ref = new T[length];
- ASSERT_EQ(length, fread(ref, sizeof(T), length, input_fp_));
- // Compare
- ASSERT_EQ(0, memcmp(&test_results, ref, sizeof(T) * length));
- delete [] ref;
- }
-}
-
-void RefFiles::WriteToFile(const NetEqNetworkStatistics& stats_raw) {
+void ResultSink::AddResult(const NetEqNetworkStatistics& stats_raw) {
#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- if (!output_fp_)
- return;
neteq_unittest::NetEqNetworkStatistics stats;
Convert(stats_raw, &stats);
std::string stats_string;
ASSERT_TRUE(stats.SerializeToString(&stats_string));
- WriteMessage(output_fp_, stats_string);
+ AddMessage(output_fp_, digest_.get(), stats_string);
#else
FAIL() << "Writing to reference file requires Proto Buffer.";
#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
-void RefFiles::ReadFromFileAndCompare(
- const NetEqNetworkStatistics& stats) {
+void ResultSink::AddResult(const RtcpStatistics& stats_raw) {
#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- if (!input_fp_)
- return;
-
- std::string stats_string;
- ReadMessage(input_fp_, &stats_string);
- neteq_unittest::NetEqNetworkStatistics ref_stats;
- ASSERT_TRUE(ref_stats.ParseFromString(stats_string));
-
- // Compare
- ASSERT_EQ(stats.current_buffer_size_ms, ref_stats.current_buffer_size_ms());
- ASSERT_EQ(stats.preferred_buffer_size_ms,
- ref_stats.preferred_buffer_size_ms());
- ASSERT_EQ(stats.jitter_peaks_found, ref_stats.jitter_peaks_found());
- ASSERT_EQ(stats.packet_loss_rate, ref_stats.packet_loss_rate());
- ASSERT_EQ(stats.packet_discard_rate, ref_stats.packet_discard_rate());
- ASSERT_EQ(stats.expand_rate, ref_stats.expand_rate());
- ASSERT_EQ(stats.preemptive_rate, ref_stats.preemptive_rate());
- ASSERT_EQ(stats.accelerate_rate, ref_stats.accelerate_rate());
- ASSERT_EQ(stats.clockdrift_ppm, ref_stats.clockdrift_ppm());
- ASSERT_EQ(stats.added_zero_samples, ref_stats.added_zero_samples());
- ASSERT_EQ(stats.secondary_decoded_rate, ref_stats.secondary_decoded_rate());
- ASSERT_LE(stats.speech_expand_rate, ref_stats.expand_rate());
-#else
- FAIL() << "Reading from reference file requires Proto Buffer.";
-#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
-}
-
-void RefFiles::WriteToFile(const RtcpStatistics& stats_raw) {
-#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- if (!output_fp_)
- return;
neteq_unittest::RtcpStatistics stats;
Convert(stats_raw, &stats);
std::string stats_string;
ASSERT_TRUE(stats.SerializeToString(&stats_string));
- WriteMessage(output_fp_, stats_string);
+ AddMessage(output_fp_, digest_.get(), stats_string);
#else
FAIL() << "Writing to reference file requires Proto Buffer.";
#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
}
-void RefFiles::ReadFromFileAndCompare(const RtcpStatistics& stats) {
-#ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT
- if (!input_fp_)
- return;
- std::string stats_string;
- ReadMessage(input_fp_, &stats_string);
- neteq_unittest::RtcpStatistics ref_stats;
- ASSERT_TRUE(ref_stats.ParseFromString(stats_string));
-
- // Compare
- ASSERT_EQ(stats.fraction_lost, ref_stats.fraction_lost());
- ASSERT_EQ(stats.cumulative_lost, ref_stats.cumulative_lost());
- ASSERT_EQ(stats.extended_max_sequence_number,
- ref_stats.extended_max_sequence_number());
- ASSERT_EQ(stats.jitter, ref_stats.jitter());
-#else
- FAIL() << "Reading from reference file requires Proto Buffer.";
-#endif // WEBRTC_NETEQ_UNITTEST_BITEXACT
+void ResultSink::VerifyChecksum(const std::string& checksum) {
+ std::vector<char> buffer;
+ buffer.resize(digest_->Size());
+ digest_->Finish(&buffer[0], buffer.size());
+ const std::string result = rtc::hex_encode(&buffer[0], digest_->Size());
+ EXPECT_EQ(checksum, result);
}
class NetEqDecodingTest : public ::testing::Test {
@@ -286,14 +256,14 @@ class NetEqDecodingTest : public ::testing::Test {
virtual void SetUp();
virtual void TearDown();
void SelectDecoders(NetEqDecoder* used_codec);
- void LoadDecoders();
void OpenInputFile(const std::string &rtp_file);
void Process();
void DecodeAndCompare(const std::string& rtp_file,
- const std::string& ref_file,
- const std::string& stat_ref_file,
- const std::string& rtcp_ref_file);
+ const std::string& output_checksum,
+ const std::string& network_stats_checksum,
+ const std::string& rtcp_stats_checksum,
+ bool gen_ref);
static void PopulateRtpInfo(int frame_index,
int timestamp,
@@ -350,56 +320,13 @@ void NetEqDecodingTest::SetUp() {
ASSERT_EQ(0, neteq_->NetworkStatistics(&stat));
algorithmic_delay_ms_ = stat.current_buffer_size_ms;
ASSERT_TRUE(neteq_);
- LoadDecoders();
+ LoadDecoders(neteq_);
}
void NetEqDecodingTest::TearDown() {
delete neteq_;
}
-void NetEqDecodingTest::LoadDecoders() {
- // Load PCMu.
- ASSERT_EQ(0,
- neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCMu, "pcmu", 0));
- // Load PCMa.
- ASSERT_EQ(0,
- neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCMa, "pcma", 8));
-#ifdef WEBRTC_CODEC_ILBC
- // Load iLBC.
- ASSERT_EQ(
- 0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderILBC, "ilbc", 102));
-#endif
-#if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)
- // Load iSAC.
- ASSERT_EQ(
- 0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISAC, "isac", 103));
-#endif
-#ifdef WEBRTC_CODEC_ISAC
- // Load iSAC SWB.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderISACswb,
- "isac-swb", 104));
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderOpus,
- "opus", 111));
-#endif
- // Load PCM16B nb.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16B,
- "pcm16-nb", 93));
- // Load PCM16B wb.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bwb,
- "pcm16-wb", 94));
- // Load PCM16B swb32.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderPCM16Bswb32kHz,
- "pcm16-swb32", 95));
- // Load CNG 8 kHz.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGnb,
- "cng-nb", 13));
- // Load CNG 16 kHz.
- ASSERT_EQ(0, neteq_->RegisterPayloadType(NetEqDecoder::kDecoderCNGwb,
- "cng-wb", 98));
-}
-
void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
rtp_source_.reset(test::RtpFileSource::Create(rtp_file));
}
@@ -426,7 +353,9 @@ void NetEqDecodingTest::Process() {
}
// Get audio from NetEq.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
+ ASSERT_FALSE(muted);
ASSERT_TRUE((out_frame_.samples_per_channel_ == kBlockSize8kHz) ||
(out_frame_.samples_per_channel_ == kBlockSize16kHz) ||
(out_frame_.samples_per_channel_ == kBlockSize32kHz) ||
@@ -438,29 +367,25 @@ void NetEqDecodingTest::Process() {
sim_clock_ += kTimeStepMs;
}
-void NetEqDecodingTest::DecodeAndCompare(const std::string& rtp_file,
- const std::string& ref_file,
- const std::string& stat_ref_file,
- const std::string& rtcp_ref_file) {
+void NetEqDecodingTest::DecodeAndCompare(
+ const std::string& rtp_file,
+ const std::string& output_checksum,
+ const std::string& network_stats_checksum,
+ const std::string& rtcp_stats_checksum,
+ bool gen_ref) {
OpenInputFile(rtp_file);
- std::string ref_out_file = "";
- if (ref_file.empty()) {
- ref_out_file = webrtc::test::OutputPath() + "neteq_universal_ref.pcm";
- }
- RefFiles ref_files(ref_file, ref_out_file);
+ std::string ref_out_file =
+ gen_ref ? webrtc::test::OutputPath() + "neteq_universal_ref.pcm" : "";
+ ResultSink output(ref_out_file);
- std::string stat_out_file = "";
- if (stat_ref_file.empty()) {
- stat_out_file = webrtc::test::OutputPath() + "neteq_network_stats.dat";
- }
- RefFiles network_stat_files(stat_ref_file, stat_out_file);
+ std::string stat_out_file =
+ gen_ref ? webrtc::test::OutputPath() + "neteq_network_stats.dat" : "";
+ ResultSink network_stats(stat_out_file);
- std::string rtcp_out_file = "";
- if (rtcp_ref_file.empty()) {
- rtcp_out_file = webrtc::test::OutputPath() + "neteq_rtcp_stats.dat";
- }
- RefFiles rtcp_stat_files(rtcp_ref_file, rtcp_out_file);
+ std::string rtcp_out_file =
+ gen_ref ? webrtc::test::OutputPath() + "neteq_rtcp_stats.dat" : "";
+ ResultSink rtcp_stats(rtcp_out_file);
packet_.reset(rtp_source_->NextPacket());
int i = 0;
@@ -469,25 +394,33 @@ void NetEqDecodingTest::DecodeAndCompare(const std::string& rtp_file,
ss << "Lap number " << i++ << " in DecodeAndCompare while loop";
SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
ASSERT_NO_FATAL_FAILURE(Process());
- ASSERT_NO_FATAL_FAILURE(ref_files.ProcessReference(
+ ASSERT_NO_FATAL_FAILURE(output.AddResult(
out_frame_.data_, out_frame_.samples_per_channel_));
// Query the network statistics API once per second
if (sim_clock_ % 1000 == 0) {
// Process NetworkStatistics.
- NetEqNetworkStatistics network_stats;
- ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- ASSERT_NO_FATAL_FAILURE(
- network_stat_files.ProcessReference(network_stats));
+ NetEqNetworkStatistics current_network_stats;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&current_network_stats));
+ ASSERT_NO_FATAL_FAILURE(network_stats.AddResult(current_network_stats));
+
// Compare with CurrentDelay, which should be identical.
- EXPECT_EQ(network_stats.current_buffer_size_ms, neteq_->CurrentDelayMs());
+ EXPECT_EQ(current_network_stats.current_buffer_size_ms,
+ neteq_->CurrentDelayMs());
// Process RTCPstat.
- RtcpStatistics rtcp_stats;
- neteq_->GetRtcpStatistics(&rtcp_stats);
- ASSERT_NO_FATAL_FAILURE(rtcp_stat_files.ProcessReference(rtcp_stats));
+ RtcpStatistics current_rtcp_stats;
+ neteq_->GetRtcpStatistics(&current_rtcp_stats);
+ ASSERT_NO_FATAL_FAILURE(rtcp_stats.AddResult(current_rtcp_stats));
}
}
+
+ SCOPED_TRACE("Check output audio.");
+ output.VerifyChecksum(output_checksum);
+ SCOPED_TRACE("Check network stats.");
+ network_stats.VerifyChecksum(network_stats_checksum);
+ SCOPED_TRACE("Check rtcp stats.");
+ rtcp_stats.VerifyChecksum(rtcp_stats_checksum);
}
void NetEqDecodingTest::PopulateRtpInfo(int frame_index,
@@ -525,31 +458,30 @@ void NetEqDecodingTest::PopulateCng(int frame_index,
TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) {
const std::string input_rtp_file =
webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp");
- // Note that neteq4_universal_ref.pcm and neteq4_universal_ref_win_32.pcm
- // are identical. The latter could have been removed, but if clients still
- // have a copy of the file, the test will fail.
- const std::string input_ref_file =
- webrtc::test::ResourcePath("audio_coding/neteq4_universal_ref", "pcm");
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string network_stat_ref_file = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq4_network_stats.dat";
-#else
- const std::string network_stat_ref_file =
- webrtc::test::ResourcePath("audio_coding/neteq4_network_stats", "dat");
-#endif
- const std::string rtcp_stat_ref_file =
- webrtc::test::ResourcePath("audio_coding/neteq4_rtcp_stats", "dat");
-
- if (FLAGS_gen_ref) {
- DecodeAndCompare(input_rtp_file, "", "", "");
- } else {
- DecodeAndCompare(input_rtp_file,
- input_ref_file,
- network_stat_ref_file,
- rtcp_stat_ref_file);
- }
+
+ const std::string output_checksum = PlatformChecksum(
+ "472ebe1126f41fdb6b5c63c87f625a52e7604e49",
+ "d2a6b6ff54b340cf9f961c7f07768d86b3761073",
+ "472ebe1126f41fdb6b5c63c87f625a52e7604e49",
+ "f9749813dbc3fb59dae761de518fec65b8407c5b");
+
+ const std::string network_stats_checksum = PlatformChecksum(
+ "2cf380a05ee07080bd72471e8ec7777a39644ec9",
+ "01be67dc4c3b8e74743a45cbd8684c0535dec9ad",
+ "2cf380a05ee07080bd72471e8ec7777a39644ec9",
+ "2cf380a05ee07080bd72471e8ec7777a39644ec9");
+
+ const std::string rtcp_stats_checksum = PlatformChecksum(
+ "b8880bf9fed2487efbddcb8d94b9937a29ae521d",
+ "f3f7b3d3e71d7e635240b5373b57df6a7e4ce9d4",
+ "b8880bf9fed2487efbddcb8d94b9937a29ae521d",
+ "b8880bf9fed2487efbddcb8d94b9937a29ae521d");
+
+ DecodeAndCompare(input_rtp_file,
+ output_checksum,
+ network_stats_checksum,
+ rtcp_stats_checksum,
+ FLAGS_gen_ref);
}
#if !defined(WEBRTC_IOS) && !defined(WEBRTC_ANDROID) && \
@@ -562,26 +494,30 @@ TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) {
TEST_F(NetEqDecodingTest, MAYBE_TestOpusBitExactness) {
const std::string input_rtp_file =
webrtc::test::ResourcePath("audio_coding/neteq_opus", "rtp");
- const std::string input_ref_file =
- // The pcm files were generated by using Opus v1.1.2 to decode the RTC
- // file generated by Opus v1.1
- webrtc::test::ResourcePath("audio_coding/neteq4_opus_ref", "pcm");
- const std::string network_stat_ref_file =
- // The network stats file was generated when using Opus v1.1.2 to decode
- // the RTC file generated by Opus v1.1
- webrtc::test::ResourcePath("audio_coding/neteq4_opus_network_stats",
- "dat");
- const std::string rtcp_stat_ref_file =
- webrtc::test::ResourcePath("audio_coding/neteq4_opus_rtcp_stats", "dat");
-
- if (FLAGS_gen_ref) {
- DecodeAndCompare(input_rtp_file, "", "", "");
- } else {
- DecodeAndCompare(input_rtp_file,
- input_ref_file,
- network_stat_ref_file,
- rtcp_stat_ref_file);
- }
+
+ const std::string output_checksum = PlatformChecksum(
+ "19ad24b4a1eb7a9620e6da09f98c49aa5792ade4",
+ "19ad24b4a1eb7a9620e6da09f98c49aa5792ade4",
+ "19ad24b4a1eb7a9620e6da09f98c49aa5792ade4",
+ "19ad24b4a1eb7a9620e6da09f98c49aa5792ade4");
+
+ const std::string network_stats_checksum = PlatformChecksum(
+ "6eab76efbde753d4dde38983445ca16b4ce59b39",
+ "6eab76efbde753d4dde38983445ca16b4ce59b39",
+ "6eab76efbde753d4dde38983445ca16b4ce59b39",
+ "6eab76efbde753d4dde38983445ca16b4ce59b39");
+
+ const std::string rtcp_stats_checksum = PlatformChecksum(
+ "e37c797e3de6a64dda88c9ade7a013d022a2e1e0",
+ "e37c797e3de6a64dda88c9ade7a013d022a2e1e0",
+ "e37c797e3de6a64dda88c9ade7a013d022a2e1e0",
+ "e37c797e3de6a64dda88c9ade7a013d022a2e1e0");
+
+ DecodeAndCompare(input_rtp_file,
+ output_checksum,
+ network_stats_checksum,
+ rtcp_stats_checksum,
+ FLAGS_gen_ref);
}
// Use fax mode to avoid time-scaling. This is to simplify the testing of
@@ -610,7 +546,8 @@ TEST_F(NetEqDecodingTestFaxMode, TestFrameWaitingTimeStatistics) {
}
// Pull out all data.
for (size_t i = 0; i < num_frames; ++i) {
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -651,7 +588,8 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -678,7 +616,8 @@ TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -699,6 +638,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
const size_t kPayloadBytes = kSamples * 2;
double next_input_time_ms = 0.0;
double t_ms;
+ bool muted;
// Insert speech for 5 seconds.
const int kSpeechDurationMs = 5000;
@@ -715,7 +655,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
next_input_time_ms += static_cast<double>(kFrameSizeMs) * drift_factor;
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -744,7 +684,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
next_input_time_ms += static_cast<double>(kCngPeriodMs) * drift_factor;
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
@@ -757,7 +697,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
const double loop_end_time = t_ms + network_freeze_ms;
for (; t_ms < loop_end_time; t_ms += 10) {
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
}
@@ -769,7 +709,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
if (pull_once && next_input_time_ms >= pull_time_ms) {
pull_once = false;
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
t_ms += 10;
@@ -803,7 +743,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
next_input_time_ms += kFrameSizeMs * drift_factor;
}
// Pull out data once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
// Increase clock.
t_ms += 10;
@@ -931,7 +871,9 @@ TEST_F(NetEqDecodingTest, MAYBE_DecoderError) {
for (size_t i = 0; i < AudioFrame::kMaxDataSizeSamples; ++i) {
out_frame_.data_[i] = 1;
}
- EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&out_frame_, &muted));
+ ASSERT_FALSE(muted);
// Verify that there is a decoder error to check.
EXPECT_EQ(NetEq::kDecoderErrorCode, neteq_->LastError());
@@ -968,7 +910,9 @@ TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) {
for (size_t i = 0; i < AudioFrame::kMaxDataSizeSamples; ++i) {
out_frame_.data_[i] = 1;
}
- EXPECT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
+ ASSERT_FALSE(muted);
// Verify that the first block of samples is set to 0.
static const int kExpectedOutputLength =
kInitSampleRateHz / 100; // 10 ms at initial sample rate.
@@ -1020,6 +964,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
rtp_info.header.payloadType = payload_type;
uint32_t receive_timestamp = 0;
+ bool muted;
for (int n = 0; n < 10; ++n) { // Insert few packets and get audio.
auto block = input.GetNextBlock();
ASSERT_EQ(expected_samples_per_channel, block.size());
@@ -1031,7 +976,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
payload, enc_len_bytes),
receive_timestamp));
output.Reset();
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(1u, output.num_channels_);
ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_);
ASSERT_EQ(AudioFrame::kNormalSpeech, output.speech_type_);
@@ -1047,7 +992,7 @@ class NetEqBgnTest : public NetEqDecodingTest {
// Get audio without inserting packets, expecting PLC and PLC-to-CNG. Pull
// one frame without checking speech-type. This is the first frame pulled
// without inserting any packet, and might not be labeled as PLC.
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(1u, output.num_channels_);
ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_);
@@ -1062,7 +1007,8 @@ class NetEqBgnTest : public NetEqDecodingTest {
for (int n = 0; n < kFadingThreshold + kNumPlcToCngTestFrames; ++n) {
output.Reset();
memset(output.data_, 1, sizeof(output.data_)); // Set to non-zero.
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
ASSERT_EQ(1u, output.num_channels_);
ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_);
if (output.speech_type_ == AudioFrame::kPLCCNG) {
@@ -1236,9 +1182,10 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
// Insert some packets which decode to noise. We are not interested in
// actual decoded values.
uint32_t receive_timestamp = 0;
+ bool muted;
for (int n = 0; n < 100; ++n) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
@@ -1254,7 +1201,8 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
// Insert sync-packets, the decoded sequence should be all-zero.
for (int n = 0; n < kNumSyncPackets; ++n) {
ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
if (n > algorithmic_frame_delay) {
@@ -1270,7 +1218,8 @@ TEST_F(NetEqDecodingTest, SyncPacketDecode) {
// network statistics would show some packet loss.
for (int n = 0; n <= algorithmic_frame_delay + 10; ++n) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
if (n >= algorithmic_frame_delay + 1) {
// Expect that this frame contain samples from regular RTP.
EXPECT_TRUE(IsAllNonZero(
@@ -1306,9 +1255,10 @@ TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
// actual decoded values.
uint32_t receive_timestamp = 0;
int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
+ bool muted;
for (int n = 0; n < algorithmic_frame_delay; ++n) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
rtp_info.header.sequenceNumber++;
@@ -1345,7 +1295,8 @@ TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
// Decode.
for (int n = 0; n < kNumSyncPackets; ++n) {
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
+ ASSERT_FALSE(muted);
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
EXPECT_TRUE(IsAllNonZero(
@@ -1412,7 +1363,8 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
}
// Pull out data once.
AudioFrame output;
- ASSERT_EQ(0, neteq_->GetAudio(&output));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&output, &muted));
ASSERT_EQ(kBlockSize16kHz, output.samples_per_channel_);
ASSERT_EQ(1u, output.num_channels_);
@@ -1468,6 +1420,7 @@ void NetEqDecodingTest::DuplicateCng() {
// correct.
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
+ bool muted;
for (int i = 0; i < 3; ++i) {
PopulateRtpInfo(seq_no, timestamp, &rtp_info);
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
@@ -1475,7 +1428,7 @@ void NetEqDecodingTest::DuplicateCng() {
timestamp += kSamples;
// Pull audio once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
// Verify speech output.
@@ -1492,7 +1445,7 @@ void NetEqDecodingTest::DuplicateCng() {
rtp_info, rtc::ArrayView<const uint8_t>(payload, payload_len), 0));
// Pull audio once and make sure CNG is played.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
EXPECT_FALSE(PlayoutTimestamp()); // Returns empty value during CNG.
@@ -1508,7 +1461,7 @@ void NetEqDecodingTest::DuplicateCng() {
// Pull audio until we have played |kCngPeriodMs| of CNG. Start at 10 ms since
// we have already pulled out CNG once.
for (int cng_time_ms = 10; cng_time_ms < kCngPeriodMs; cng_time_ms += 10) {
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
EXPECT_FALSE(PlayoutTimestamp()); // Returns empty value during CNG.
@@ -1523,7 +1476,7 @@ void NetEqDecodingTest::DuplicateCng() {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
// Pull audio once and verify that the output is speech again.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
rtc::Optional<uint32_t> playout_timestamp = PlayoutTimestamp();
@@ -1561,7 +1514,8 @@ TEST_F(NetEqDecodingTest, CngFirst) {
timestamp += kCngPeriodSamples;
// Pull audio once and make sure CNG is played.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ bool muted;
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_);
@@ -1573,10 +1527,261 @@ TEST_F(NetEqDecodingTest, CngFirst) {
timestamp += kSamples;
// Pull audio once.
- ASSERT_EQ(0, neteq_->GetAudio(&out_frame_));
+ ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_);
}
// Verify speech output.
EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
}
+
+class NetEqDecodingTestWithMutedState : public NetEqDecodingTest {
+ public:
+ NetEqDecodingTestWithMutedState() : NetEqDecodingTest() {
+ config_.enable_muted_state = true;
+ }
+
+ protected:
+ static constexpr size_t kSamples = 10 * 16;
+ static constexpr size_t kPayloadBytes = kSamples * 2;
+
+ void InsertPacket(uint32_t rtp_timestamp) {
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, rtp_timestamp, &rtp_info);
+ EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
+ }
+
+ bool GetAudioReturnMuted() {
+ bool muted;
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
+ return muted;
+ }
+
+ void GetAudioUntilMuted() {
+ while (!GetAudioReturnMuted()) {
+ ASSERT_LT(counter_++, 1000) << "Test timed out";
+ }
+ }
+
+ void GetAudioUntilNormal() {
+ bool muted = false;
+ while (out_frame_.speech_type_ != AudioFrame::kNormalSpeech) {
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted));
+ ASSERT_LT(counter_++, 1000) << "Test timed out";
+ }
+ EXPECT_FALSE(muted);
+ }
+
+ int counter_ = 0;
+};
+
+// Verifies that NetEq goes in and out of muted state as expected.
+TEST_F(NetEqDecodingTestWithMutedState, MutedState) {
+ // Insert one speech packet.
+ InsertPacket(0);
+ // Pull out audio once and expect it not to be muted.
+ EXPECT_FALSE(GetAudioReturnMuted());
+ // Pull data until faded out.
+ GetAudioUntilMuted();
+
+ // Verify that output audio is not written during muted mode. Other parameters
+ // should be correct, though.
+ AudioFrame new_frame;
+ for (auto& d : new_frame.data_) {
+ d = 17;
+ }
+ bool muted;
+ EXPECT_EQ(0, neteq_->GetAudio(&new_frame, &muted));
+ EXPECT_TRUE(muted);
+ for (auto d : new_frame.data_) {
+ EXPECT_EQ(17, d);
+ }
+ EXPECT_EQ(out_frame_.timestamp_ + out_frame_.samples_per_channel_,
+ new_frame.timestamp_);
+ EXPECT_EQ(out_frame_.samples_per_channel_, new_frame.samples_per_channel_);
+ EXPECT_EQ(out_frame_.sample_rate_hz_, new_frame.sample_rate_hz_);
+ EXPECT_EQ(out_frame_.num_channels_, new_frame.num_channels_);
+ EXPECT_EQ(out_frame_.speech_type_, new_frame.speech_type_);
+ EXPECT_EQ(out_frame_.vad_activity_, new_frame.vad_activity_);
+
+ // Insert new data. Timestamp is corrected for the time elapsed since the last
+ // packet. Verify that normal operation resumes.
+ InsertPacket(kSamples * counter_);
+ GetAudioUntilNormal();
+
+ NetEqNetworkStatistics stats;
+ EXPECT_EQ(0, neteq_->NetworkStatistics(&stats));
+ // NetEqNetworkStatistics::expand_rate tells the fraction of samples that were
+ // concealment samples, in Q14 (16384 = 100%) .The vast majority should be
+ // concealment samples in this test.
+ EXPECT_GT(stats.expand_rate, 14000);
+ // And, it should be greater than the speech_expand_rate.
+ EXPECT_GT(stats.expand_rate, stats.speech_expand_rate);
+}
+
+// Verifies that NetEq goes out of muted state when given a delayed packet.
+TEST_F(NetEqDecodingTestWithMutedState, MutedStateDelayedPacket) {
+ // Insert one speech packet.
+ InsertPacket(0);
+ // Pull out audio once and expect it not to be muted.
+ EXPECT_FALSE(GetAudioReturnMuted());
+ // Pull data until faded out.
+ GetAudioUntilMuted();
+ // Insert new data. Timestamp is only corrected for the half of the time
+ // elapsed since the last packet. That is, the new packet is delayed. Verify
+ // that normal operation resumes.
+ InsertPacket(kSamples * counter_ / 2);
+ GetAudioUntilNormal();
+}
+
+// Verifies that NetEq goes out of muted state when given a future packet.
+TEST_F(NetEqDecodingTestWithMutedState, MutedStateFuturePacket) {
+ // Insert one speech packet.
+ InsertPacket(0);
+ // Pull out audio once and expect it not to be muted.
+ EXPECT_FALSE(GetAudioReturnMuted());
+ // Pull data until faded out.
+ GetAudioUntilMuted();
+ // Insert new data. Timestamp is over-corrected for the time elapsed since the
+ // last packet. That is, the new packet is too early. Verify that normal
+ // operation resumes.
+ InsertPacket(kSamples * counter_ * 2);
+ GetAudioUntilNormal();
+}
+
+// Verifies that NetEq goes out of muted state when given an old packet.
+TEST_F(NetEqDecodingTestWithMutedState, MutedStateOldPacket) {
+ // Insert one speech packet.
+ InsertPacket(0);
+ // Pull out audio once and expect it not to be muted.
+ EXPECT_FALSE(GetAudioReturnMuted());
+ // Pull data until faded out.
+ GetAudioUntilMuted();
+
+ EXPECT_NE(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
+ // Insert packet which is older than the first packet.
+ InsertPacket(kSamples * (counter_ - 1000));
+ EXPECT_FALSE(GetAudioReturnMuted());
+ EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_);
+}
+
+class NetEqDecodingTestTwoInstances : public NetEqDecodingTest {
+ public:
+ NetEqDecodingTestTwoInstances() : NetEqDecodingTest() {}
+
+ void SetUp() override {
+ NetEqDecodingTest::SetUp();
+ config2_ = config_;
+ }
+
+ void CreateSecondInstance() {
+ neteq2_.reset(NetEq::Create(config2_));
+ ASSERT_TRUE(neteq2_);
+ LoadDecoders(neteq2_.get());
+ }
+
+ protected:
+ std::unique_ptr<NetEq> neteq2_;
+ NetEq::Config config2_;
+};
+
+namespace {
+::testing::AssertionResult AudioFramesEqualExceptData(const AudioFrame& a,
+ const AudioFrame& b) {
+ if (a.timestamp_ != b.timestamp_)
+ return ::testing::AssertionFailure() << "timestamp_ diff (" << a.timestamp_
+ << " != " << b.timestamp_ << ")";
+ if (a.sample_rate_hz_ != b.sample_rate_hz_)
+ return ::testing::AssertionFailure() << "sample_rate_hz_ diff ("
+ << a.sample_rate_hz_
+ << " != " << b.sample_rate_hz_ << ")";
+ if (a.samples_per_channel_ != b.samples_per_channel_)
+ return ::testing::AssertionFailure()
+ << "samples_per_channel_ diff (" << a.samples_per_channel_
+ << " != " << b.samples_per_channel_ << ")";
+ if (a.num_channels_ != b.num_channels_)
+ return ::testing::AssertionFailure() << "num_channels_ diff ("
+ << a.num_channels_
+ << " != " << b.num_channels_ << ")";
+ if (a.speech_type_ != b.speech_type_)
+ return ::testing::AssertionFailure() << "speech_type_ diff ("
+ << a.speech_type_
+ << " != " << b.speech_type_ << ")";
+ if (a.vad_activity_ != b.vad_activity_)
+ return ::testing::AssertionFailure() << "vad_activity_ diff ("
+ << a.vad_activity_
+ << " != " << b.vad_activity_ << ")";
+ return ::testing::AssertionSuccess();
+}
+
+::testing::AssertionResult AudioFramesEqual(const AudioFrame& a,
+ const AudioFrame& b) {
+ ::testing::AssertionResult res = AudioFramesEqualExceptData(a, b);
+ if (!res)
+ return res;
+ if (memcmp(
+ a.data_, b.data_,
+ a.samples_per_channel_ * a.num_channels_ * sizeof(a.data_[0])) != 0) {
+ return ::testing::AssertionFailure() << "data_ diff";
+ }
+ return ::testing::AssertionSuccess();
+}
+
+} // namespace
+
+TEST_F(NetEqDecodingTestTwoInstances, CompareMutedStateOnOff) {
+ ASSERT_FALSE(config_.enable_muted_state);
+ config2_.enable_muted_state = true;
+ CreateSecondInstance();
+
+ // Insert one speech packet into both NetEqs.
+ const size_t kSamples = 10 * 16;
+ const size_t kPayloadBytes = kSamples * 2;
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ PopulateRtpInfo(0, 0, &rtp_info);
+ EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
+ EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload, 0));
+
+ AudioFrame out_frame1, out_frame2;
+ bool muted;
+ for (int i = 0; i < 1000; ++i) {
+ std::ostringstream ss;
+ ss << "i = " << i;
+ SCOPED_TRACE(ss.str()); // Print out the loop iterator on failure.
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame1, &muted));
+ EXPECT_FALSE(muted);
+ EXPECT_EQ(0, neteq2_->GetAudio(&out_frame2, &muted));
+ if (muted) {
+ EXPECT_TRUE(AudioFramesEqualExceptData(out_frame1, out_frame2));
+ } else {
+ EXPECT_TRUE(AudioFramesEqual(out_frame1, out_frame2));
+ }
+ }
+ EXPECT_TRUE(muted);
+
+ // Insert new data. Timestamp is corrected for the time elapsed since the last
+ // packet.
+ PopulateRtpInfo(0, kSamples * 1000, &rtp_info);
+ EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, 0));
+ EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload, 0));
+
+ int counter = 0;
+ while (out_frame1.speech_type_ != AudioFrame::kNormalSpeech) {
+ ASSERT_LT(counter++, 1000) << "Test timed out";
+ std::ostringstream ss;
+ ss << "counter = " << counter;
+ SCOPED_TRACE(ss.str()); // Print out the loop iterator on failure.
+ EXPECT_EQ(0, neteq_->GetAudio(&out_frame1, &muted));
+ EXPECT_FALSE(muted);
+ EXPECT_EQ(0, neteq2_->GetAudio(&out_frame2, &muted));
+ if (muted) {
+ EXPECT_TRUE(AudioFramesEqualExceptData(out_frame1, out_frame2));
+ } else {
+ EXPECT_TRUE(AudioFramesEqual(out_frame1, out_frame2));
+ }
+ }
+ EXPECT_FALSE(muted);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
index 9bddfe77657..f99b3f200f4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
@@ -16,7 +16,6 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
-#include "webrtc/modules/audio_coding/codecs/cng/webrtc_cng.h"
#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
@@ -43,7 +42,6 @@ int Normal::Process(const int16_t* input,
return 0;
}
output->PushBackInterleaved(input, length);
- int16_t* signal = &(*output)[0][0];
const int fs_mult = fs_hz_ / 8000;
assert(fs_mult > 0);
@@ -64,24 +62,26 @@ int Normal::Process(const int16_t* input,
expand_->Process(&expanded);
expand_->Reset();
+ size_t length_per_channel = length / output->Channels();
+ std::unique_ptr<int16_t[]> signal(new int16_t[length_per_channel]);
for (size_t channel_ix = 0; channel_ix < output->Channels(); ++channel_ix) {
// Adjust muting factor (main muting factor times expand muting factor).
external_mute_factor_array[channel_ix] = static_cast<int16_t>(
(external_mute_factor_array[channel_ix] *
expand_->MuteFactor(channel_ix)) >> 14);
- int16_t* signal = &(*output)[channel_ix][0];
- size_t length_per_channel = length / output->Channels();
+ (*output)[channel_ix].CopyTo(length_per_channel, 0, signal.get());
+
// Find largest absolute value in new data.
int16_t decoded_max =
- WebRtcSpl_MaxAbsValueW16(signal, length_per_channel);
+ WebRtcSpl_MaxAbsValueW16(signal.get(), length_per_channel);
// Adjust muting factor if needed (to BGN level).
size_t energy_length =
std::min(static_cast<size_t>(fs_mult * 64), length_per_channel);
int scaling = 6 + fs_shift
- WebRtcSpl_NormW32(decoded_max * decoded_max);
scaling = std::max(scaling, 0); // |scaling| should always be >= 0.
- int32_t energy = WebRtcSpl_DotProductWithScale(signal, signal,
+ int32_t energy = WebRtcSpl_DotProductWithScale(signal.get(), signal.get(),
energy_length, scaling);
int32_t scaled_energy_length =
static_cast<int32_t>(energy_length >> scaling);
@@ -149,19 +149,18 @@ int Normal::Process(const int16_t* input,
int16_t cng_output[kCngLength];
// Reset mute factor and start up fresh.
external_mute_factor_array[0] = 16384;
- AudioDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
+ ComfortNoiseDecoder* cng_decoder = decoder_database_->GetActiveCngDecoder();
if (cng_decoder) {
// Generate long enough for 32kHz.
- if (WebRtcCng_Generate(cng_decoder->CngDecoderInstance(), cng_output,
- kCngLength, 0) < 0) {
+ if (!cng_decoder->Generate(cng_output, 0)) {
// Error returned; set return vector to all zeros.
memset(cng_output, 0, sizeof(cng_output));
}
} else {
// If no CNG instance is defined, just copy from the decoded data.
// (This will result in interpolating the decoded with itself.)
- memcpy(cng_output, signal, fs_mult * 8 * sizeof(int16_t));
+ (*output)[0].CopyTo(fs_mult * 8, 0, cng_output);
}
// Interpolate the CNG into the new vector.
// (NB/WB/SWB32/SWB48 8/16/32/48 samples.)
@@ -171,8 +170,8 @@ int Normal::Process(const int16_t* input,
for (size_t i = 0; i < static_cast<size_t>(8 * fs_mult); i++) {
// TODO(hlundin): Add 16 instead of 8 for correct rounding. Keeping 8 now
// for legacy bit-exactness.
- signal[i] =
- (fraction * signal[i] + (32 - fraction) * cng_output[i] + 8) >> 5;
+ (*output)[0][i] = (fraction * (*output)[0][i] +
+ (32 - fraction) * cng_output[i] + 8) >> 5;
fraction += increment;
}
} else if (external_mute_factor_array[0] < 16384) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
index f98e99a82d8..5e1fc131e50 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
@@ -27,9 +27,20 @@
#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
using ::testing::_;
+using ::testing::Invoke;
namespace webrtc {
+namespace {
+
+int ExpandProcess120ms(AudioMultiVector* output) {
+ AudioMultiVector dummy_audio(1, 11520u);
+ dummy_audio.CopyTo(output);
+ return 0;
+}
+
+} // namespace
+
TEST(Normal, CreateAndDestroy) {
MockDecoderDatabase db;
int fs = 8000;
@@ -121,6 +132,45 @@ TEST(Normal, InputLengthAndChannelsDoNotMatch) {
EXPECT_CALL(expand, Die()); // Called when |expand| goes out of scope.
}
+TEST(Normal, LastModeExpand120msPacket) {
+ WebRtcSpl_Init();
+ MockDecoderDatabase db;
+ const int kFs = 48000;
+ const size_t kPacketsizeBytes = 11520u;
+ const size_t kChannels = 1;
+ BackgroundNoise bgn(kChannels);
+ SyncBuffer sync_buffer(kChannels, 1000);
+ RandomVector random_vector;
+ StatisticsCalculator statistics;
+ MockExpand expand(&bgn, &sync_buffer, &random_vector, &statistics, kFs,
+ kChannels);
+ Normal normal(kFs, &db, bgn, &expand);
+
+ int16_t input[kPacketsizeBytes] = {0};
+
+ std::unique_ptr<int16_t[]> mute_factor_array(new int16_t[kChannels]);
+ for (size_t i = 0; i < kChannels; ++i) {
+ mute_factor_array[i] = 16384;
+ }
+
+ AudioMultiVector output(kChannels);
+
+ EXPECT_CALL(expand, SetParametersForNormalAfterExpand());
+ EXPECT_CALL(expand, Process(_)).WillOnce(Invoke(ExpandProcess120ms));
+ EXPECT_CALL(expand, Reset());
+ EXPECT_EQ(static_cast<int>(kPacketsizeBytes),
+ normal.Process(input,
+ kPacketsizeBytes,
+ kModeExpand,
+ mute_factor_array.get(),
+ &output));
+
+ EXPECT_EQ(kPacketsizeBytes, output.Size());
+
+ EXPECT_CALL(db, Die()); // Called when |db| goes out of scope.
+ EXPECT_CALL(expand, Die()); // Called when |expand| goes out of scope.
+}
+
// TODO(hlundin): Write more tests.
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_android.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.cc
index c62a62f39aa..8a19fe4d592 100644
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_android.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.cc
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-int main(int argc, char* argv[]) {
- // TODO(leozwang): Video render test app is not ready on android,
- // make it dummy test now, will add android specific tests
- return 0;
-}
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+
+namespace webrtc {
+
+Packet::Packet() = default;
+
+Packet::~Packet() = default;
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
index 64b325e027a..d6f64c7e088 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
@@ -12,7 +12,9 @@
#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_H_
#include <list>
+#include <memory>
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
@@ -21,20 +23,15 @@ namespace webrtc {
// Struct for holding RTP packets.
struct Packet {
RTPHeader header;
- uint8_t* payload; // Datagram excluding RTP header and header extension.
- size_t payload_length;
- bool primary; // Primary, i.e., not redundant payload.
- int waiting_time;
- bool sync_packet;
+ // Datagram excluding RTP header and header extension.
+ uint8_t* payload = nullptr;
+ size_t payload_length = 0;
+ bool primary = true; // Primary, i.e., not redundant payload.
+ bool sync_packet = false;
+ std::unique_ptr<TickTimer::Stopwatch> waiting_time;
- // Constructor.
- Packet()
- : payload(NULL),
- payload_length(0),
- primary(true),
- waiting_time(0),
- sync_packet(false) {
- }
+ Packet();
+ ~Packet();
// Comparison operators. Establish a packet ordering based on (1) timestamp,
// (2) sequence number, (3) regular packet vs sync-packet and (4) redundancy.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
index c89de12318b..f1b898e34cf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
@@ -19,6 +19,7 @@
#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_coding/codecs/audio_decoder.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
namespace webrtc {
@@ -37,8 +38,9 @@ class NewTimestampIsLarger {
const Packet* new_packet_;
};
-PacketBuffer::PacketBuffer(size_t max_number_of_packets)
- : max_number_of_packets_(max_number_of_packets) {}
+PacketBuffer::PacketBuffer(size_t max_number_of_packets,
+ const TickTimer* tick_timer)
+ : max_number_of_packets_(max_number_of_packets), tick_timer_(tick_timer) {}
// Destructor. All packets in the buffer will be destroyed.
PacketBuffer::~PacketBuffer() {
@@ -65,6 +67,8 @@ int PacketBuffer::InsertPacket(Packet* packet) {
int return_val = kOK;
+ packet->waiting_time = tick_timer_->GetNewStopwatch();
+
if (buffer_.size() >= max_number_of_packets_) {
// Buffer is full. Flush it.
Flush();
@@ -268,13 +272,6 @@ size_t PacketBuffer::NumSamplesInBuffer(DecoderDatabase* decoder_database,
return num_samples;
}
-void PacketBuffer::IncrementWaitingTimes(int inc) {
- PacketList::iterator it;
- for (it = buffer_.begin(); it != buffer_.end(); ++it) {
- (*it)->waiting_time += inc;
- }
-}
-
bool PacketBuffer::DeleteFirstPacket(PacketList* packet_list) {
if (packet_list->empty()) {
return false;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
index 03c11e61b6e..6867b4cb37e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
@@ -17,8 +17,8 @@
namespace webrtc {
-// Forward declaration.
class DecoderDatabase;
+class TickTimer;
// This is the actual buffer holding the packets before decoding.
class PacketBuffer {
@@ -34,7 +34,7 @@ class PacketBuffer {
// Constructor creates a buffer which can hold a maximum of
// |max_number_of_packets| packets.
- PacketBuffer(size_t max_number_of_packets);
+ PacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer);
// Deletes all packets in the buffer before destroying the buffer.
virtual ~PacketBuffer();
@@ -116,10 +116,6 @@ class PacketBuffer {
virtual size_t NumSamplesInBuffer(DecoderDatabase* decoder_database,
size_t last_decoded_length) const;
- // Increase the waiting time counter for every packet in the buffer by |inc|.
- // The default value for |inc| is 1.
- virtual void IncrementWaitingTimes(int inc = 1);
-
virtual void BufferStat(int* num_packets, int* max_num_packets) const;
// Static method that properly deletes the first packet, and its payload
@@ -148,6 +144,7 @@ class PacketBuffer {
private:
size_t max_number_of_packets_;
PacketList buffer_;
+ const TickTimer* tick_timer_;
RTC_DISALLOW_COPY_AND_ASSIGN(PacketBuffer);
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
index 435b6c848dc..da353010857 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
@@ -16,6 +16,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
using ::testing::Return;
using ::testing::_;
@@ -80,13 +81,15 @@ struct PacketsToInsert {
// Start of test definitions.
TEST(PacketBuffer, CreateAndDestroy) {
- PacketBuffer* buffer = new PacketBuffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer* buffer = new PacketBuffer(10, &tick_timer); // 10 packets.
EXPECT_TRUE(buffer->Empty());
delete buffer;
}
TEST(PacketBuffer, InsertPacket) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(17u, 4711u, 0, 10);
const int payload_len = 100;
@@ -107,7 +110,8 @@ TEST(PacketBuffer, InsertPacket) {
// Test to flush buffer.
TEST(PacketBuffer, FlushBuffer) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
const int payload_len = 10;
@@ -127,7 +131,8 @@ TEST(PacketBuffer, FlushBuffer) {
// Test to fill the buffer over the limits, and verify that it flushes.
TEST(PacketBuffer, OverfillBuffer) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
// Insert 10 small packets; should be ok.
@@ -156,7 +161,8 @@ TEST(PacketBuffer, OverfillBuffer) {
// Test inserting a list of packets.
TEST(PacketBuffer, InsertPacketList) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
PacketList list;
const int payload_len = 10;
@@ -192,7 +198,8 @@ TEST(PacketBuffer, InsertPacketList) {
// Expecting the buffer to flush.
// TODO(hlundin): Remove this test when legacy operation is no longer needed.
TEST(PacketBuffer, InsertPacketListChangePayloadType) {
- PacketBuffer buffer(10); // 10 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(10, &tick_timer); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
PacketList list;
const int payload_len = 10;
@@ -230,7 +237,8 @@ TEST(PacketBuffer, InsertPacketListChangePayloadType) {
}
TEST(PacketBuffer, ExtractOrderRedundancy) {
- PacketBuffer buffer(100); // 100 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(100, &tick_timer); // 100 packets.
const int kPackets = 18;
const int kFrameSize = 10;
const int kPayloadLength = 10;
@@ -289,7 +297,8 @@ TEST(PacketBuffer, ExtractOrderRedundancy) {
}
TEST(PacketBuffer, DiscardPackets) {
- PacketBuffer buffer(100); // 100 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(100, &tick_timer); // 100 packets.
const uint16_t start_seq_no = 17;
const uint32_t start_ts = 4711;
const uint32_t ts_increment = 10;
@@ -318,7 +327,8 @@ TEST(PacketBuffer, DiscardPackets) {
}
TEST(PacketBuffer, Reordering) {
- PacketBuffer buffer(100); // 100 packets.
+ TickTimer tick_timer;
+ PacketBuffer buffer(100, &tick_timer); // 100 packets.
const uint16_t start_seq_no = 17;
const uint32_t start_ts = 4711;
const uint32_t ts_increment = 10;
@@ -373,8 +383,9 @@ TEST(PacketBuffer, Failures) {
const uint32_t ts_increment = 10;
int payload_len = 100;
PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
+ TickTimer tick_timer;
- PacketBuffer* buffer = new PacketBuffer(100); // 100 packets.
+ PacketBuffer* buffer = new PacketBuffer(100, &tick_timer); // 100 packets.
Packet* packet = NULL;
EXPECT_EQ(PacketBuffer::kInvalidPacket, buffer->InsertPacket(packet));
packet = gen.NextPacket(payload_len);
@@ -404,7 +415,7 @@ TEST(PacketBuffer, Failures) {
// Insert packet list of three packets, where the second packet has an invalid
// payload. Expect first packet to be inserted, and the remaining two to be
// discarded.
- buffer = new PacketBuffer(100); // 100 packets.
+ buffer = new PacketBuffer(100, &tick_timer); // 100 packets.
PacketList list;
list.push_back(gen.NextPacket(payload_len)); // Valid packet.
packet = gen.NextPacket(payload_len);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
index 85307181341..530e9d064dc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
@@ -12,6 +12,7 @@
#include <assert.h>
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
@@ -143,8 +144,9 @@ int PayloadSplitter::SplitFec(PacketList* packet_list,
// Not an FEC packet.
AudioDecoder* decoder = decoder_database->GetDecoder(payload_type);
- // decoder should not return NULL.
- assert(decoder != NULL);
+ // decoder should not return NULL, except for comfort noise payloads which
+ // are handled separately.
+ assert(decoder != NULL || decoder_database->IsComfortNoise(payload_type));
if (!decoder ||
!decoder->PacketHasFec(packet->payload, packet->payload_length)) {
++it;
@@ -167,8 +169,9 @@ int PayloadSplitter::SplitFec(PacketList* packet_list,
memcpy(new_packet->payload, packet->payload, packet->payload_length);
new_packet->payload_length = packet->payload_length;
new_packet->primary = false;
- new_packet->waiting_time = packet->waiting_time;
new_packet->sync_packet = packet->sync_packet;
+ // Waiting time should not be set here.
+ RTC_DCHECK(!packet->waiting_time);
packet_list->insert(it, new_packet);
break;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
index a68e8d68a98..63772452da6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
@@ -18,6 +18,8 @@
#include <utility> // pair
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h"
+#include "webrtc/modules/audio_coding/codecs/mock/mock_audio_decoder_factory.h"
#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
#include "webrtc/modules/audio_coding/neteq/packet.h"
@@ -309,7 +311,8 @@ TEST(RedPayloadSplitter, CheckRedPayloads) {
// Use a real DecoderDatabase object here instead of a mock, since it is
// easier to just register the payload types and let the actual implementation
// do its job.
- DecoderDatabase decoder_database;
+ std::unique_ptr<MockAudioDecoderFactory> factory(new MockAudioDecoderFactory);
+ DecoderDatabase decoder_database(std::move(factory));
decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderCNGnb, "cng-nb");
decoder_database.RegisterPayload(1, NetEqDecoder::kDecoderPCMu, "pcmu");
decoder_database.RegisterPayload(2, NetEqDecoder::kDecoderAVT, "avt");
@@ -372,33 +375,33 @@ TEST(AudioPayloadSplitter, NonSplittable) {
// codec types.
// Use scoped pointers to avoid having to delete them later.
std::unique_ptr<DecoderDatabase::DecoderInfo> info0(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderISAC, 16000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderISAC, "", 16000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(0))
.WillRepeatedly(Return(info0.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info1(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderISACswb, 32000,
- NULL, false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderISACswb, "", 32000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(1))
.WillRepeatedly(Return(info1.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info2(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderRED, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderRED, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(2))
.WillRepeatedly(Return(info2.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info3(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderAVT, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderAVT, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(3))
.WillRepeatedly(Return(info3.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info4(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderCNGnb, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderCNGnb, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(4))
.WillRepeatedly(Return(info4.get()));
std::unique_ptr<DecoderDatabase::DecoderInfo> info5(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderArbitrary, 8000,
- NULL, false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderArbitrary, "",
+ 8000, nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(5))
.WillRepeatedly(Return(info5.get()));
@@ -536,7 +539,7 @@ TEST_P(SplitBySamplesTest, PayloadSizes) {
// Use scoped pointers to avoid having to delete them later.
// (Sample rate is set to 8000 Hz, but does not matter.)
std::unique_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(decoder_type_, 8000, NULL, false));
+ new DecoderDatabase::DecoderInfo(decoder_type_, "", 8000, nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(info.get()));
@@ -623,8 +626,8 @@ TEST_P(SplitIlbcTest, NumFrames) {
// codec types.
// Use scoped pointers to avoid having to delete them later.
std::unique_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(info.get()));
@@ -687,8 +690,8 @@ TEST(IlbcPayloadSplitter, TooLargePayload) {
MockDecoderDatabase decoder_database;
std::unique_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(info.get()));
@@ -719,8 +722,8 @@ TEST(IlbcPayloadSplitter, UnevenPayload) {
MockDecoderDatabase decoder_database;
std::unique_ptr<DecoderDatabase::DecoderInfo> info(
- new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, 8000, NULL,
- false));
+ new DecoderDatabase::DecoderInfo(NetEqDecoder::kDecoderILBC, "", 8000,
+ nullptr));
EXPECT_CALL(decoder_database, GetDecoderInfo(kPayloadType))
.WillRepeatedly(Return(info.get()));
@@ -743,7 +746,7 @@ TEST(IlbcPayloadSplitter, UnevenPayload) {
TEST(FecPayloadSplitter, MixedPayload) {
PacketList packet_list;
- DecoderDatabase decoder_database;
+ DecoderDatabase decoder_database(CreateBuiltinAudioDecoderFactory());
decoder_database.RegisterPayload(0, NetEqDecoder::kDecoderOpus, "opus");
decoder_database.RegisterPayload(1, NetEqDecoder::kDecoderPCMu, "pcmu");
@@ -798,7 +801,7 @@ TEST(FecPayloadSplitter, MixedPayload) {
TEST(FecPayloadSplitter, EmbedFecInRed) {
PacketList packet_list;
- DecoderDatabase decoder_database;
+ DecoderDatabase decoder_database(CreateBuiltinAudioDecoderFactory());
const int kTimestampOffset = 20 * 48; // 20 ms * 48 kHz.
uint8_t payload_types[] = {0, 0};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
index 45586ee111c..149f2826582 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
@@ -265,7 +265,7 @@ GSMFR_encinst_t* GSMFRenc_inst[2];
#endif
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
-CNG_enc_inst* CNGenc_inst[2];
+webrtc::ComfortNoiseEncoder *CNG_encoder[2];
#endif
#ifdef CODEC_SPEEX_8
SPEEX_encinst_t* SPEEX8enc_inst[2];
@@ -928,18 +928,8 @@ int NetEQTest_init_coders(webrtc::NetEqDecoder coder,
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
- ok = WebRtcCng_CreateEnc(&CNGenc_inst[k]);
- if (ok != 0) {
- printf("Error: Couldn't allocate memory for CNG encoding instance\n");
- exit(0);
- }
if (sampfreq <= 16000) {
- ok = WebRtcCng_InitEnc(CNGenc_inst[k], sampfreq, 200, 5);
- if (ok == -1) {
- printf("Error: Initialization of CNG struct failed. Error code %d\n",
- WebRtcCng_GetErrorCodeEnc(CNGenc_inst[k]));
- exit(0);
- }
+ CNG_encoder[k] = new webrtc::ComfortNoiseEncoder(sampfreq, 200, 5);
}
#endif
@@ -1461,7 +1451,8 @@ int NetEQTest_free_coders(webrtc::NetEqDecoder coder, size_t numChannels) {
WebRtcVad_Free(VAD_inst[k]);
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
- WebRtcCng_FreeEnc(CNGenc_inst[k]);
+ delete CNG_encoder[k];
+ CNG_encoder[k] = nullptr;
#endif
switch (coder) {
@@ -1600,7 +1591,7 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
size_t numChannels) {
size_t cdlen = 0;
int16_t* tempdata;
- static int first_cng = 1;
+ static bool first_cng = true;
size_t tempLen;
*vad = 1;
@@ -1608,9 +1599,9 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
if (useVAD) {
*vad = 0;
- size_t sampleRate_10 = static_cast<size_t>(10 * sampleRate / 1000);
- size_t sampleRate_20 = static_cast<size_t>(20 * sampleRate / 1000);
- size_t sampleRate_30 = static_cast<size_t>(30 * sampleRate / 1000);
+ const size_t sampleRate_10 = static_cast<size_t>(10 * sampleRate / 1000);
+ const size_t sampleRate_20 = static_cast<size_t>(20 * sampleRate / 1000);
+ const size_t sampleRate_30 = static_cast<size_t>(30 * sampleRate / 1000);
for (size_t k = 0; k < numChannels; k++) {
tempLen = frameLen;
tempdata = &indata[k * frameLen];
@@ -1642,16 +1633,22 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
if (!*vad) {
// all channels are silent
+ rtc::Buffer workaround;
cdlen = 0;
for (size_t k = 0; k < numChannels; k++) {
- WebRtcCng_Encode(CNGenc_inst[k], &indata[k * frameLen],
- (frameLen <= 640 ? frameLen : 640) /* max 640 */,
- encoded, &tempLen, first_cng);
+ workaround.Clear();
+ tempLen = CNG_encoder[k]->Encode(
+ rtc::ArrayView<const int16_t>(
+ &indata[k * frameLen],
+ (frameLen <= 640 ? frameLen : 640) /* max 640 */),
+ first_cng,
+ &workaround);
+ memcpy(encoded, workaround.data(), tempLen);
encoded += tempLen;
cdlen += tempLen;
}
*vad = 0;
- first_cng = 0;
+ first_cng = false;
return (cdlen);
}
}
@@ -1734,7 +1731,7 @@ size_t NetEQTest_encode(webrtc::NetEqDecoder coder,
} // end for
- first_cng = 1;
+ first_cng = true;
return (totalLen);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
index 2ebd1927bc4..62bfc1b3cb5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_isac_quality_test.cc
@@ -43,8 +43,8 @@ class NetEqIsacQualityTest : public NetEqQualityTest {
NetEqIsacQualityTest();
void SetUp() override;
void TearDown() override;
- virtual int EncodeBlock(int16_t* in_data, size_t block_size_samples,
- rtc::Buffer* payload, size_t max_bytes);
+ int EncodeBlock(int16_t* in_data, size_t block_size_samples,
+ rtc::Buffer* payload, size_t max_bytes) override;
private:
ISACFIX_MainStruct* isac_encoder_;
int bit_rate_kbps_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
index baa0d67aded..a6117a4c5b6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc
@@ -103,8 +103,8 @@ class NetEqOpusQualityTest : public NetEqQualityTest {
NetEqOpusQualityTest();
void SetUp() override;
void TearDown() override;
- virtual int EncodeBlock(int16_t* in_data, size_t block_size_samples,
- rtc::Buffer* payload, size_t max_bytes);
+ int EncodeBlock(int16_t* in_data, size_t block_size_samples,
+ rtc::Buffer* payload, size_t max_bytes) override;
private:
WebRtcOpusEncInst* opus_encoder_;
OpusRepacketizer* repacketizer_;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.cc
new file mode 100644
index 00000000000..4a1b9b7b1fe
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.cc
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
+
+namespace webrtc {
+
+TickTimer::Stopwatch::Stopwatch(const TickTimer& ticktimer)
+ : ticktimer_(ticktimer), starttick_(ticktimer.ticks()) {}
+
+TickTimer::Countdown::Countdown(const TickTimer& ticktimer,
+ uint64_t ticks_to_count)
+ : stopwatch_(ticktimer.GetNewStopwatch()),
+ ticks_to_count_(ticks_to_count) {}
+
+TickTimer::Countdown::~Countdown() = default;
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.h
new file mode 100644
index 00000000000..8f17f435967
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer.h
@@ -0,0 +1,110 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TICK_TIMER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TICK_TIMER_H_
+
+#include <memory>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Implements a time counter. The counter is advanced with the Increment()
+// methods, and is queried with the ticks() accessor. It is assumed that one
+// "tick" och the counter corresponds to 10 ms.
+// A TickTimer object can provide two types of associated time-measuring
+// objects: Stopwatch and Countdown.
+class TickTimer {
+ public:
+ // Stopwatch measures time elapsed since it was started, by querying the
+ // associated TickTimer for the current time. The intended use is to request a
+ // new Stopwatch object from a TickTimer object with the GetNewStopwatch()
+ // method. Note: since the Stopwatch object contains a reference to the
+ // TickTimer it is associated with, it cannot outlive the TickTimer.
+ class Stopwatch {
+ public:
+ explicit Stopwatch(const TickTimer& ticktimer);
+
+ uint64_t ElapsedTicks() const { return ticktimer_.ticks() - starttick_; }
+
+ uint64_t ElapsedMs() const {
+ const uint64_t elapsed_ticks = ticktimer_.ticks() - starttick_;
+ const int ms_per_tick = ticktimer_.ms_per_tick();
+ return elapsed_ticks < UINT64_MAX / ms_per_tick
+ ? elapsed_ticks * ms_per_tick
+ : UINT64_MAX;
+ }
+
+ private:
+ const TickTimer& ticktimer_;
+ const uint64_t starttick_;
+ };
+
+ // Countdown counts down from a given start value with each tick of the
+ // associated TickTimer, until zero is reached. The Finished() method will
+ // return true if zero has been reached, false otherwise. The intended use is
+ // to request a new Countdown object from a TickTimer object with the
+ // GetNewCountdown() method. Note: since the Countdown object contains a
+ // reference to the TickTimer it is associated with, it cannot outlive the
+ // TickTimer.
+ class Countdown {
+ public:
+ Countdown(const TickTimer& ticktimer, uint64_t ticks_to_count);
+
+ ~Countdown();
+
+ bool Finished() const {
+ return stopwatch_->ElapsedTicks() >= ticks_to_count_;
+ }
+
+ private:
+ const std::unique_ptr<Stopwatch> stopwatch_;
+ const uint64_t ticks_to_count_;
+ };
+
+ TickTimer() : TickTimer(10) {}
+ explicit TickTimer(int ms_per_tick) : ms_per_tick_(ms_per_tick) {
+ RTC_DCHECK_GT(ms_per_tick_, 0);
+ }
+
+ void Increment() { ++ticks_; }
+
+ // Mainly intended for testing.
+ void Increment(uint64_t x) { ticks_ += x; }
+
+ uint64_t ticks() const { return ticks_; }
+
+ int ms_per_tick() const { return ms_per_tick_; }
+
+ // Returns a new Stopwatch object, based on the current TickTimer. Note that
+ // the new Stopwatch object contains a reference to the current TickTimer,
+ // and must therefore not outlive the TickTimer.
+ std::unique_ptr<Stopwatch> GetNewStopwatch() const {
+ return std::unique_ptr<Stopwatch>(new Stopwatch(*this));
+ }
+
+ // Returns a new Countdown object, based on the current TickTimer. Note that
+ // the new Countdown object contains a reference to the current TickTimer,
+ // and must therefore not outlive the TickTimer.
+ std::unique_ptr<Countdown> GetNewCountdown(uint64_t ticks_to_count) const {
+ return std::unique_ptr<Countdown>(new Countdown(*this, ticks_to_count));
+ }
+
+ private:
+ uint64_t ticks_ = 0;
+ const int ms_per_tick_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(TickTimer);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TICK_TIMER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer_unittest.cc
new file mode 100644
index 00000000000..55edcf5b292
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tick_timer_unittest.cc
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "webrtc/modules/audio_coding/neteq/tick_timer.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace webrtc {
+
+// Verify that the default value for ms_per_tick is 10.
+TEST(TickTimer, DefaultMsPerTick) {
+ TickTimer tt;
+ EXPECT_EQ(10, tt.ms_per_tick());
+}
+
+TEST(TickTimer, CustomMsPerTick) {
+ TickTimer tt(17);
+ EXPECT_EQ(17, tt.ms_per_tick());
+}
+
+TEST(TickTimer, Increment) {
+ TickTimer tt;
+ EXPECT_EQ(0u, tt.ticks());
+ tt.Increment();
+ EXPECT_EQ(1u, tt.ticks());
+
+ for (int i = 0; i < 17; ++i) {
+ tt.Increment();
+ }
+ EXPECT_EQ(18u, tt.ticks());
+
+ tt.Increment(17);
+ EXPECT_EQ(35u, tt.ticks());
+}
+
+TEST(TickTimer, WrapAround) {
+ TickTimer tt;
+ tt.Increment(UINT64_MAX);
+ EXPECT_EQ(UINT64_MAX, tt.ticks());
+ tt.Increment();
+ EXPECT_EQ(0u, tt.ticks());
+}
+
+TEST(TickTimer, Stopwatch) {
+ TickTimer tt;
+ // Increment it a "random" number of steps.
+ tt.Increment(17);
+
+ std::unique_ptr<TickTimer::Stopwatch> sw = tt.GetNewStopwatch();
+ ASSERT_TRUE(sw);
+
+ EXPECT_EQ(0u, sw->ElapsedTicks()); // Starts at zero.
+ EXPECT_EQ(0u, sw->ElapsedMs());
+ tt.Increment();
+ EXPECT_EQ(1u, sw->ElapsedTicks()); // Increases with the TickTimer.
+ EXPECT_EQ(10u, sw->ElapsedMs());
+}
+
+TEST(TickTimer, StopwatchWrapAround) {
+ TickTimer tt;
+ tt.Increment(UINT64_MAX);
+
+ std::unique_ptr<TickTimer::Stopwatch> sw = tt.GetNewStopwatch();
+ ASSERT_TRUE(sw);
+
+ tt.Increment();
+ EXPECT_EQ(0u, tt.ticks());
+ EXPECT_EQ(1u, sw->ElapsedTicks());
+ EXPECT_EQ(10u, sw->ElapsedMs());
+
+ tt.Increment();
+ EXPECT_EQ(1u, tt.ticks());
+ EXPECT_EQ(2u, sw->ElapsedTicks());
+ EXPECT_EQ(20u, sw->ElapsedMs());
+}
+
+TEST(TickTimer, StopwatchMsOverflow) {
+ TickTimer tt;
+ std::unique_ptr<TickTimer::Stopwatch> sw = tt.GetNewStopwatch();
+ ASSERT_TRUE(sw);
+
+ tt.Increment(UINT64_MAX / 10);
+ EXPECT_EQ(UINT64_MAX, sw->ElapsedMs());
+
+ tt.Increment();
+ EXPECT_EQ(UINT64_MAX, sw->ElapsedMs());
+
+ tt.Increment(UINT64_MAX - tt.ticks());
+ EXPECT_EQ(UINT64_MAX, tt.ticks());
+ EXPECT_EQ(UINT64_MAX, sw->ElapsedMs());
+}
+
+TEST(TickTimer, StopwatchWithCustomTicktime) {
+ const int kMsPerTick = 17;
+ TickTimer tt(kMsPerTick);
+ std::unique_ptr<TickTimer::Stopwatch> sw = tt.GetNewStopwatch();
+ ASSERT_TRUE(sw);
+
+ EXPECT_EQ(0u, sw->ElapsedMs());
+ tt.Increment();
+ EXPECT_EQ(static_cast<uint64_t>(kMsPerTick), sw->ElapsedMs());
+}
+
+TEST(TickTimer, Countdown) {
+ TickTimer tt;
+ // Increment it a "random" number of steps.
+ tt.Increment(4711);
+
+ std::unique_ptr<TickTimer::Countdown> cd = tt.GetNewCountdown(17);
+ ASSERT_TRUE(cd);
+
+ EXPECT_FALSE(cd->Finished());
+ tt.Increment();
+ EXPECT_FALSE(cd->Finished());
+
+ tt.Increment(16); // Total increment is now 17.
+ EXPECT_TRUE(cd->Finished());
+
+ // Further increments do not change the state.
+ tt.Increment();
+ EXPECT_TRUE(cd->Finished());
+ tt.Increment(1234);
+ EXPECT_TRUE(cd->Finished());
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
index 6a91ea487b5..880b1f82ea5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
@@ -16,6 +16,7 @@
#include "webrtc/base/safe_conversions.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/cross_correlation.h"
#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
namespace webrtc {
@@ -158,20 +159,15 @@ TimeStretch::ReturnCodes TimeStretch::Process(const int16_t* input,
}
void TimeStretch::AutoCorrelation() {
- // Set scaling factor for cross correlation to protect against overflow.
- int scaling = kLogCorrelationLen - WebRtcSpl_NormW32(
- max_input_value_ * max_input_value_);
- scaling = std::max(0, scaling);
-
// Calculate correlation from lag kMinLag to lag kMaxLag in 4 kHz domain.
int32_t auto_corr[kCorrelationLen];
- WebRtcSpl_CrossCorrelation(auto_corr, &downsampled_input_[kMaxLag],
- &downsampled_input_[kMaxLag - kMinLag],
- kCorrelationLen, kMaxLag - kMinLag, scaling, -1);
+ CrossCorrelationWithAutoShift(
+ &downsampled_input_[kMaxLag], &downsampled_input_[kMaxLag - kMinLag],
+ kCorrelationLen, kMaxLag - kMinLag, -1, auto_corr);
// Normalize correlation to 14 bits and write to |auto_correlation_|.
int32_t max_corr = WebRtcSpl_MaxAbsValueW32(auto_corr, kCorrelationLen);
- scaling = std::max(0, 17 - WebRtcSpl_NormW32(max_corr));
+ int scaling = std::max(0, 17 - WebRtcSpl_NormW32(max_corr));
WebRtcSpl_VectorBitShiftW32ToW16(auto_correlation_, kCorrelationLen,
auto_corr, scaling);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
index b1cb45d2014..adaf16223b6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
@@ -23,9 +23,9 @@ namespace webrtc {
TEST(TimestampScaler, TestNoScaling) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type =
- NetEqDecoder::kDecoderPCMu; // Does not use scaled timestamps.
+ // Use PCMu, because it doesn't use scaled timestamps.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderPCMu, "", 8000,
+ nullptr);
static const uint8_t kRtpPayloadType = 0;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -44,9 +44,9 @@ TEST(TimestampScaler, TestNoScaling) {
TEST(TimestampScaler, TestNoScalingLargeStep) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type =
- NetEqDecoder::kDecoderPCMu; // Does not use scaled timestamps.
+ // Use PCMu, because it doesn't use scaled timestamps.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderPCMu, "", 8000,
+ nullptr);
static const uint8_t kRtpPayloadType = 0;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -70,8 +70,9 @@ TEST(TimestampScaler, TestNoScalingLargeStep) {
TEST(TimestampScaler, TestG722) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -94,8 +95,9 @@ TEST(TimestampScaler, TestG722) {
TEST(TimestampScaler, TestG722LargeStep) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -122,10 +124,11 @@ TEST(TimestampScaler, TestG722LargeStep) {
TEST(TimestampScaler, TestG722WithCng) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info_g722, info_cng;
- info_g722.codec_type =
- NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
- info_cng.codec_type = NetEqDecoder::kDecoderCNGwb;
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info_g722(NetEqDecoder::kDecoderG722, "",
+ 16000, nullptr);
+ const DecoderDatabase::DecoderInfo info_cng(NetEqDecoder::kDecoderCNGwb, "",
+ 16000, nullptr);
static const uint8_t kRtpPayloadTypeG722 = 17;
static const uint8_t kRtpPayloadTypeCng = 13;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadTypeG722))
@@ -164,9 +167,9 @@ TEST(TimestampScaler, TestG722WithCng) {
// as many tests here.
TEST(TimestampScaler, TestG722Packet) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type =
- NetEqDecoder::kDecoderG722; // Does uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -193,8 +196,9 @@ TEST(TimestampScaler, TestG722Packet) {
// we are not doing as many tests here.
TEST(TimestampScaler, TestG722PacketList) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -222,8 +226,9 @@ TEST(TimestampScaler, TestG722PacketList) {
TEST(TimestampScaler, TestG722Reset) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderG722; // Uses a factor 2 scaling.
+ // Use G722, which has a factor 2 scaling.
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderG722, "", 16000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
@@ -262,8 +267,8 @@ TEST(TimestampScaler, TestG722Reset) {
// timestamp scaler.
TEST(TimestampScaler, TestOpusLargeStep) {
MockDecoderDatabase db;
- DecoderDatabase::DecoderInfo info;
- info.codec_type = NetEqDecoder::kDecoderOpus;
+ const DecoderDatabase::DecoderInfo info(NetEqDecoder::kDecoderOpus, "", 48000,
+ nullptr);
static const uint8_t kRtpPayloadType = 17;
EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType))
.WillRepeatedly(Return(&info));
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
index 2608d9a03b7..7a51256af2d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_external_decoder_test.cc
@@ -45,7 +45,9 @@ void NetEqExternalDecoderTest::InsertPacket(
void NetEqExternalDecoderTest::GetOutputAudio(AudioFrame* output) {
// Get audio from regular instance.
- EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(output));
+ bool muted;
+ EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(output, &muted));
+ ASSERT_FALSE(muted);
EXPECT_EQ(channels_, output->num_channels_);
EXPECT_EQ(static_cast<size_t>(kOutputLengthMs * sample_rate_hz_ / 1000),
output->samples_per_channel_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
index 59402a2029b..d0052c28a8d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h"
+#include "webrtc/base/checks.h"
#include "webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.h"
#include "webrtc/modules/audio_coding/neteq/include/neteq.h"
#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
@@ -105,7 +106,9 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms,
}
// Get output audio, but don't do anything with it.
- int error = neteq->GetAudio(&out_frame);
+ bool muted;
+ int error = neteq->GetAudio(&out_frame, &muted);
+ RTC_CHECK(!muted);
if (error != NetEq::kOK)
return -1;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
index 5f874ad8dbe..2983cebe9d4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
@@ -391,7 +391,9 @@ int NetEqQualityTest::Transmit() {
}
int NetEqQualityTest::DecodeBlock() {
- int ret = neteq_->GetAudio(&out_frame_);
+ bool muted;
+ int ret = neteq_->GetAudio(&out_frame_, &muted);
+ RTC_CHECK(!muted);
if (ret != NetEq::kOK) {
return -1;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
index fdb66714cfb..1d462b3c9f2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
@@ -605,7 +605,9 @@ int main(int argc, char* argv[]) {
// Check if it is time to get output audio.
while (time_now_ms >= next_output_time_ms && output_event_available) {
webrtc::AudioFrame out_frame;
- int error = neteq->GetAudio(&out_frame);
+ bool muted;
+ int error = neteq->GetAudio(&out_frame, &muted);
+ RTC_CHECK(!muted);
if (error != NetEq::kOK) {
std::cerr << "GetAudio returned error code " <<
neteq->LastError() << std::endl;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
index dad72eaecd1..9192839be30 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.cc
@@ -16,51 +16,15 @@
#include <limits>
#include "webrtc/base/checks.h"
+#include "webrtc/call.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
-// Files generated at build-time by the protobuf compiler.
-#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
-#include "external/webrtc/webrtc/call/rtc_event_log.pb.h"
-#else
-#include "webrtc/call/rtc_event_log.pb.h"
-#endif
namespace webrtc {
namespace test {
-namespace {
-
-const rtclog::RtpPacket* GetRtpPacket(const rtclog::Event& event) {
- if (!event.has_type() || event.type() != rtclog::Event::RTP_EVENT)
- return nullptr;
- if (!event.has_timestamp_us() || !event.has_rtp_packet())
- return nullptr;
- const rtclog::RtpPacket& rtp_packet = event.rtp_packet();
- if (!rtp_packet.has_type() || rtp_packet.type() != rtclog::AUDIO ||
- !rtp_packet.has_incoming() || !rtp_packet.incoming() ||
- !rtp_packet.has_packet_length() || rtp_packet.packet_length() == 0 ||
- !rtp_packet.has_header() || rtp_packet.header().size() == 0 ||
- rtp_packet.packet_length() < rtp_packet.header().size())
- return nullptr;
- return &rtp_packet;
-}
-
-const rtclog::AudioPlayoutEvent* GetAudioPlayoutEvent(
- const rtclog::Event& event) {
- if (!event.has_type() || event.type() != rtclog::Event::AUDIO_PLAYOUT_EVENT)
- return nullptr;
- if (!event.has_timestamp_us() || !event.has_audio_playout_event())
- return nullptr;
- const rtclog::AudioPlayoutEvent& playout_event = event.audio_playout_event();
- if (!playout_event.has_local_ssrc())
- return nullptr;
- return &playout_event;
-}
-
-} // namespace
-
RtcEventLogSource* RtcEventLogSource::Create(const std::string& file_name) {
RtcEventLogSource* source = new RtcEventLogSource();
RTC_CHECK(source->OpenFile(file_name));
@@ -76,42 +40,57 @@ bool RtcEventLogSource::RegisterRtpHeaderExtension(RTPExtensionType type,
}
Packet* RtcEventLogSource::NextPacket() {
- while (rtp_packet_index_ < event_log_->stream_size()) {
- const rtclog::Event& event = event_log_->stream(rtp_packet_index_);
- const rtclog::RtpPacket* rtp_packet = GetRtpPacket(event);
- rtp_packet_index_++;
- if (rtp_packet) {
- uint8_t* packet_header = new uint8_t[rtp_packet->header().size()];
- memcpy(packet_header, rtp_packet->header().data(),
- rtp_packet->header().size());
- Packet* packet = new Packet(packet_header, rtp_packet->header().size(),
- rtp_packet->packet_length(),
- event.timestamp_us() / 1000, *parser_.get());
- if (packet->valid_header()) {
- // Check if the packet should not be filtered out.
- if (!filter_.test(packet->header().payloadType) &&
- !(use_ssrc_filter_ && packet->header().ssrc != ssrc_))
- return packet;
- } else {
- std::cout << "Warning: Packet with index " << (rtp_packet_index_ - 1)
- << " has an invalid header and will be ignored." << std::endl;
+ while (rtp_packet_index_ < parsed_stream_.GetNumberOfEvents()) {
+ if (parsed_stream_.GetEventType(rtp_packet_index_) ==
+ ParsedRtcEventLog::RTP_EVENT) {
+ PacketDirection direction;
+ MediaType media_type;
+ size_t header_length;
+ size_t packet_length;
+ uint64_t timestamp_us = parsed_stream_.GetTimestamp(rtp_packet_index_);
+ parsed_stream_.GetRtpHeader(rtp_packet_index_, &direction, &media_type,
+ nullptr, &header_length, &packet_length);
+ if (direction == kIncomingPacket && media_type == MediaType::AUDIO) {
+ uint8_t* packet_header = new uint8_t[header_length];
+ parsed_stream_.GetRtpHeader(rtp_packet_index_, nullptr, nullptr,
+ packet_header, nullptr, nullptr);
+ Packet* packet = new Packet(packet_header, header_length, packet_length,
+ static_cast<double>(timestamp_us) / 1000,
+ *parser_.get());
+ if (packet->valid_header()) {
+ // Check if the packet should not be filtered out.
+ if (!filter_.test(packet->header().payloadType) &&
+ !(use_ssrc_filter_ && packet->header().ssrc != ssrc_)) {
+ rtp_packet_index_++;
+ return packet;
+ }
+ } else {
+ std::cout << "Warning: Packet with index " << rtp_packet_index_
+ << " has an invalid header and will be ignored."
+ << std::endl;
+ }
+ // The packet has either an invalid header or needs to be filtered out,
+ // so it can be deleted.
+ delete packet;
}
- // The packet has either an invalid header or needs to be filtered out, so
- // it can be deleted.
- delete packet;
}
+ rtp_packet_index_++;
}
return nullptr;
}
int64_t RtcEventLogSource::NextAudioOutputEventMs() {
- while (audio_output_index_ < event_log_->stream_size()) {
- const rtclog::Event& event = event_log_->stream(audio_output_index_);
- const rtclog::AudioPlayoutEvent* playout_event =
- GetAudioPlayoutEvent(event);
+ while (audio_output_index_ < parsed_stream_.GetNumberOfEvents()) {
+ if (parsed_stream_.GetEventType(audio_output_index_) ==
+ ParsedRtcEventLog::AUDIO_PLAYOUT_EVENT) {
+ uint64_t timestamp_us = parsed_stream_.GetTimestamp(audio_output_index_);
+ // We call GetAudioPlayout only to check that the protobuf event is
+ // well-formed.
+ parsed_stream_.GetAudioPlayout(audio_output_index_, nullptr);
+ audio_output_index_++;
+ return timestamp_us / 1000;
+ }
audio_output_index_++;
- if (playout_event)
- return event.timestamp_us() / 1000;
}
return std::numeric_limits<int64_t>::max();
}
@@ -120,8 +99,7 @@ RtcEventLogSource::RtcEventLogSource()
: PacketSource(), parser_(RtpHeaderParser::Create()) {}
bool RtcEventLogSource::OpenFile(const std::string& file_name) {
- event_log_.reset(new rtclog::EventStream());
- return RtcEventLog::ParseRtcEventLog(file_name, event_log_.get());
+ return parsed_stream_.ParseFile(file_name);
}
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
index 312338ee087..ad7add154c5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtc_event_log_source.h
@@ -15,6 +15,7 @@
#include <string>
#include "webrtc/base/constructormagic.h"
+#include "webrtc/call/rtc_event_log_parser.h"
#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -22,10 +23,6 @@ namespace webrtc {
class RtpHeaderParser;
-namespace rtclog {
-class EventStream;
-} // namespace rtclog
-
namespace test {
class Packet;
@@ -55,10 +52,10 @@ class RtcEventLogSource : public PacketSource {
bool OpenFile(const std::string& file_name);
- int rtp_packet_index_ = 0;
- int audio_output_index_ = 0;
+ size_t rtp_packet_index_ = 0;
+ size_t audio_output_index_ = 0;
- std::unique_ptr<rtclog::EventStream> event_log_;
+ ParsedRtcEventLog parsed_stream_;
std::unique_ptr<RtpHeaderParser> parser_;
RTC_DISALLOW_COPY_AND_ASSIGN(RtcEventLogSource);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc b/chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc
index a2506ba0113..833398acddf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/APITest.cc
@@ -21,13 +21,13 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
#include "webrtc/modules/audio_coding/test/utility.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -323,7 +323,8 @@ bool APITest::APIThreadB(void* obj) {
bool APITest::PullAudioRunA() {
_pullEventA->Wait(100);
AudioFrame audioFrame;
- if (_acmA->PlayoutData10Ms(_outFreqHzA, &audioFrame) < 0) {
+ bool muted;
+ if (_acmA->PlayoutData10Ms(_outFreqHzA, &audioFrame, &muted) < 0) {
bool thereIsDecoder;
{
ReadLockScoped rl(_apiTestRWLock);
@@ -343,7 +344,8 @@ bool APITest::PullAudioRunA() {
bool APITest::PullAudioRunB() {
_pullEventB->Wait(100);
AudioFrame audioFrame;
- if (_acmB->PlayoutData10Ms(_outFreqHzB, &audioFrame) < 0) {
+ bool muted;
+ if (_acmB->PlayoutData10Ms(_outFreqHzB, &audioFrame, &muted) < 0) {
bool thereIsDecoder;
{
ReadLockScoped rl(_apiTestRWLock);
@@ -560,7 +562,7 @@ void APITest::Perform() {
// Keep main thread waiting for sender/receiver
// threads to complete
EventWrapper* completeEvent = EventWrapper::Create();
- uint64_t startTime = TickTime::MillisecondTimestamp();
+ uint64_t startTime = rtc::TimeMillis();
uint64_t currentTime;
// Run test in 2 minutes (120000 ms).
do {
@@ -570,7 +572,7 @@ void APITest::Perform() {
}
//fflush(stderr);
completeEvent->Wait(50);
- currentTime = TickTime::MillisecondTimestamp();
+ currentTime = rtc::TimeMillis();
} while ((currentTime - startTime) < 120000);
//completeEvent->Wait(0xFFFFFFFF);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc b/chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc
index 0507691fb4d..46c398b1b75 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/Channel.cc
@@ -14,7 +14,7 @@
#include <iostream>
#include "webrtc/base/format_macros.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/base/timeutils.h"
namespace webrtc {
@@ -234,7 +234,7 @@ Channel::Channel(int16_t chID)
_lastFrameSizeSample(0),
_packetLoss(0),
_useFECTestWithPacketLoss(false),
- _beginTime(TickTime::MillisecondTimestamp()),
+ _beginTime(rtc::TimeMillis()),
_totalBytes(0),
external_send_timestamp_(-1),
external_sequence_number_(-1),
@@ -286,7 +286,7 @@ void Channel::ResetStats() {
_payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
}
}
- _beginTime = TickTime::MillisecondTimestamp();
+ _beginTime = rtc::TimeMillis();
_totalBytes = 0;
_channelCritSect.Leave();
}
@@ -411,7 +411,7 @@ uint32_t Channel::LastInTimestamp() {
double Channel::BitRate() {
double rate;
- uint64_t currTime = TickTime::MillisecondTimestamp();
+ uint64_t currTime = rtc::TimeMillis();
_channelCritSect.Enter();
rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
_channelCritSect.Leave();
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc b/chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc
index e0632243bf4..724502354e9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/EncodeDecodeTest.cc
@@ -208,8 +208,12 @@ bool Receiver::IncomingPacket() {
bool Receiver::PlayoutData() {
AudioFrame audioFrame;
-
- int32_t ok =_acm->PlayoutData10Ms(_frequency, &audioFrame);
+ bool muted;
+ int32_t ok = _acm->PlayoutData10Ms(_frequency, &audioFrame, &muted);
+ if (muted) {
+ ADD_FAILURE();
+ return false;
+ }
EXPECT_EQ(0, ok);
if (ok < 0){
return false;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.cc b/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.cc
deleted file mode 100644
index c9f80808260..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.cc
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <stdio.h>
-#include <string.h>
-
-#include <math.h>
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/test/SpatialAudio.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-#define NUM_PANN_COEFFS 10
-
-SpatialAudio::SpatialAudio(int testMode)
- : _acmLeft(AudioCodingModule::Create(1)),
- _acmRight(AudioCodingModule::Create(2)),
- _acmReceiver(AudioCodingModule::Create(3)),
- _testMode(testMode) {
-}
-
-SpatialAudio::~SpatialAudio() {
- delete _channel;
- _inFile.Close();
- _outFile.Close();
-}
-
-int16_t SpatialAudio::Setup() {
- _channel = new Channel;
-
- // Register callback for the sender side.
- CHECK_ERROR(_acmLeft->RegisterTransportCallback(_channel));
- CHECK_ERROR(_acmRight->RegisterTransportCallback(_channel));
- // Register the receiver ACM in channel
- _channel->RegisterReceiverACM(_acmReceiver.get());
-
- uint16_t sampFreqHz = 32000;
-
- const std::string file_name = webrtc::test::ResourcePath(
- "audio_coding/testfile32kHz", "pcm");
- _inFile.Open(file_name, sampFreqHz, "rb", false);
-
- std::string output_file = webrtc::test::OutputPath()
- + "out_spatial_autotest.pcm";
- if (_testMode == 1) {
- output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
- printf("\n");
- printf("Enter the output file [%s]: ", output_file.c_str());
- PCMFile::ChooseFile(&output_file, MAX_FILE_NAME_LENGTH_BYTE, &sampFreqHz);
- } else {
- output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
- }
- _outFile.Open(output_file, sampFreqHz, "wb", false);
- _outFile.SaveStereo(true);
-
- // Register all available codes as receiving codecs.
- CodecInst codecInst;
- int status;
- uint8_t num_encoders = _acmReceiver->NumberOfCodecs();
- // Register all available codes as receiving codecs once more.
- for (uint8_t n = 0; n < num_encoders; n++) {
- status = _acmReceiver->Codec(n, &codecInst);
- if (status < 0) {
- printf("Error in Codec(), no matching codec found");
- }
- status = _acmReceiver->RegisterReceiveCodec(codecInst);
- if (status < 0) {
- printf("Error in RegisterReceiveCodec() for payload type %d",
- codecInst.pltype);
- }
- }
-
- return 0;
-}
-
-void SpatialAudio::Perform() {
- if (_testMode == 0) {
- printf("Running SpatialAudio Test");
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
- "---------- SpatialAudio ----------");
- }
-
- Setup();
-
- CodecInst codecInst;
- _acmLeft->Codec((uint8_t) 1, &codecInst);
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- EncodeDecode();
-
- int16_t pannCntr = 0;
-
- double leftPanning[NUM_PANN_COEFFS] = { 1.00, 0.95, 0.90, 0.85, 0.80, 0.75,
- 0.70, 0.60, 0.55, 0.50 };
- double rightPanning[NUM_PANN_COEFFS] = { 0.50, 0.55, 0.60, 0.70, 0.75, 0.80,
- 0.85, 0.90, 0.95, 1.00 };
-
- while ((pannCntr + 1) < NUM_PANN_COEFFS) {
- _acmLeft->Codec((uint8_t) 0, &codecInst);
- codecInst.pacsize = 480;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
-
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr++;
-
- // Change codec
- _acmLeft->Codec((uint8_t) 3, &codecInst);
- codecInst.pacsize = 320;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
-
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr++;
- if (_testMode == 0) {
- printf(".");
- }
- }
-
- _acmLeft->Codec((uint8_t) 4, &codecInst);
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- EncodeDecode();
-
- _acmLeft->Codec((uint8_t) 0, &codecInst);
- codecInst.pacsize = 480;
- CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
- CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
- pannCntr = NUM_PANN_COEFFS - 1;
- while (pannCntr >= 0) {
- EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
- pannCntr--;
- if (_testMode == 0) {
- printf(".");
- }
- }
- if (_testMode == 0) {
- printf("Done!\n");
- }
-}
-
-void SpatialAudio::EncodeDecode(const double leftPanning,
- const double rightPanning) {
- AudioFrame audioFrame;
- int32_t outFileSampFreq = _outFile.SamplingFrequency();
-
- const double rightToLeftRatio = rightPanning / leftPanning;
-
- _channel->SetIsStereo(true);
-
- while (!_inFile.EndOfFile()) {
- _inFile.Read10MsData(audioFrame);
- for (size_t n = 0; n < audioFrame.samples_per_channel_; n++) {
- audioFrame.data_[n] = (int16_t) floor(
- audioFrame.data_[n] * leftPanning + 0.5);
- }
- CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
-
- for (size_t n = 0; n < audioFrame.samples_per_channel_; n++) {
- audioFrame.data_[n] = (int16_t) floor(
- audioFrame.data_[n] * rightToLeftRatio + 0.5);
- }
- CHECK_ERROR(_acmRight->Add10MsData(audioFrame));
-
- CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, &audioFrame));
- _outFile.Write10MsData(audioFrame);
- }
- _inFile.Rewind();
-}
-
-void SpatialAudio::EncodeDecode() {
- AudioFrame audioFrame;
- int32_t outFileSampFreq = _outFile.SamplingFrequency();
-
- _channel->SetIsStereo(false);
-
- while (!_inFile.EndOfFile()) {
- _inFile.Read10MsData(audioFrame);
- CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
-
- CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, &audioFrame));
- _outFile.Write10MsData(audioFrame);
- }
- _inFile.Rewind();
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.h b/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.h
deleted file mode 100644
index 270c370cf46..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/test/SpatialAudio.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
-#define WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
-
-#include <memory>
-
-#include "webrtc/modules/audio_coding/include/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/test/ACMTest.h"
-#include "webrtc/modules/audio_coding/test/Channel.h"
-#include "webrtc/modules/audio_coding/test/PCMFile.h"
-#include "webrtc/modules/audio_coding/test/utility.h"
-
-#define MAX_FILE_NAME_LENGTH_BYTE 500
-
-namespace webrtc {
-
-class SpatialAudio : public ACMTest {
- public:
- SpatialAudio(int testMode);
- ~SpatialAudio();
-
- void Perform();
- private:
- int16_t Setup();
- void EncodeDecode(double leftPanning, double rightPanning);
- void EncodeDecode();
-
- std::unique_ptr<AudioCodingModule> _acmLeft;
- std::unique_ptr<AudioCodingModule> _acmRight;
- std::unique_ptr<AudioCodingModule> _acmReceiver;
- Channel* _channel;
- PCMFile _inFile;
- PCMFile _outFile;
- int _testMode;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_TEST_SPATIALAUDIO_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc
index bacfd371880..80a0464d385 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TestAllCodecs.cc
@@ -452,7 +452,9 @@ void TestAllCodecs::Run(TestPack* channel) {
}
// Run received side of ACM.
- CHECK_ERROR(acm_b_->PlayoutData10Ms(out_freq_hz, &audio_frame));
+ bool muted;
+ CHECK_ERROR(acm_b_->PlayoutData10Ms(out_freq_hz, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
// Write output speech to file.
outfile_b_.Write10MsData(audio_frame.data_,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc
index a1bdc04e531..24cda1122b9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TestRedFec.cc
@@ -461,7 +461,9 @@ void TestRedFec::Run() {
while (!_inFileA.EndOfFile()) {
EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ bool muted;
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileB.Write10MsData(audioFrame.data_, audioFrame.samples_per_channel_);
}
_inFileA.Rewind();
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc
index 9bf560d3237..85a2ae283b2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TestStereo.cc
@@ -792,7 +792,9 @@ void TestStereo::Run(TestPackStereo* channel, int in_channels, int out_channels,
}
// Run received side of ACM
- EXPECT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+ bool muted;
+ EXPECT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
// Write output speech to file
out_file_.Write10MsData(
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc
index 229dc2d4745..4f53e47cacc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TestVADDTX.cc
@@ -108,7 +108,9 @@ void TestVadDtx::Run(std::string in_filename, int frequency, int channels,
audio_frame.timestamp_ = time_stamp;
time_stamp += frame_size_samples;
EXPECT_GE(acm_send_->Add10MsData(audio_frame), 0);
- acm_receive_->PlayoutData10Ms(kOutputFreqHz, &audio_frame);
+ bool muted;
+ acm_receive_->PlayoutData10Ms(kOutputFreqHz, &audio_frame, &muted);
+ ASSERT_FALSE(muted);
out_file.Write10MsData(audio_frame);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc b/chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc
index 161491b0610..b59ff1fdccc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/TwoWayCommunication.cc
@@ -261,13 +261,18 @@ void TwoWayCommunication::Perform() {
EXPECT_GE(_acmB->Add10MsData(audioFrame), 0);
EXPECT_GE(_acmRefB->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmA->PlayoutData10Ms(outFreqHzA, &audioFrame));
+ bool muted;
+ EXPECT_EQ(0, _acmA->PlayoutData10Ms(outFreqHzA, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmRefA->PlayoutData10Ms(outFreqHzA, &audioFrame));
+ EXPECT_EQ(0, _acmRefA->PlayoutData10Ms(outFreqHzA, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileRefA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(outFreqHzB, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileB.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmRefB->PlayoutData10Ms(outFreqHzB, &audioFrame));
+ EXPECT_EQ(0, _acmRefB->PlayoutData10Ms(outFreqHzB, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileRefB.Write10MsData(audioFrame);
// Update time counters each time a second of data has passed.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc b/chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc
index 8fa1fb1a3d0..50702f96b4e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/delay_test.cc
@@ -204,7 +204,10 @@ class DelayTest {
in_file_a_.Read10MsData(audio_frame);
ASSERT_GE(acm_a_->Add10MsData(audio_frame), 0);
- ASSERT_EQ(0, acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+ bool muted;
+ ASSERT_EQ(0,
+ acm_b_->PlayoutData10Ms(out_freq_hz_b, &audio_frame, &muted));
+ RTC_DCHECK(!muted);
out_file_b_.Write10MsData(
audio_frame.data_,
audio_frame.samples_per_channel_ * audio_frame.num_channels_);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc b/chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc
index 9f223fb81fb..f06833c2d90 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/iSACTest.cc
@@ -26,7 +26,6 @@
#include "webrtc/modules/audio_coding/acm2/acm_common_defs.h"
#include "webrtc/modules/audio_coding/test/utility.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -199,9 +198,12 @@ void ISACTest::Run10ms() {
EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0);
EXPECT_GE(_acmA->Add10MsData(audioFrame), 0);
EXPECT_GE(_acmB->Add10MsData(audioFrame), 0);
- EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame));
+ bool muted;
+ EXPECT_EQ(0, _acmA->PlayoutData10Ms(32000, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileA.Write10MsData(audioFrame);
- EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame));
+ EXPECT_EQ(0, _acmB->PlayoutData10Ms(32000, &audioFrame, &muted));
+ ASSERT_FALSE(muted);
_outFileB.Write10MsData(audioFrame);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc b/chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc
index 966f4c636c6..27a8833ac67 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/insert_packet_with_timing.cc
@@ -141,8 +141,10 @@ class InsertPacketWithTiming {
// Is it time to pull audio?
if (time_to_playout_audio_ms_ == 0) {
time_to_playout_audio_ms_ = kPlayoutPeriodMs;
+ bool muted;
receive_acm_->PlayoutData10Ms(static_cast<int>(FLAGS_output_fs_hz),
- &frame_);
+ &frame_, &muted);
+ ASSERT_FALSE(muted);
fwrite(frame_.data_, sizeof(frame_.data_[0]),
frame_.samples_per_channel_ * frame_.num_channels_, pcm_out_fid_);
*action |= kAudioPlayedOut;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc b/chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc
index 104b5e587b2..5d250280f45 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/opus_test.cc
@@ -336,7 +336,10 @@ void OpusTest::Run(TestPackStereo* channel, size_t channels, int bitrate,
}
// Run received side of ACM.
- ASSERT_EQ(0, acm_receiver_->PlayoutData10Ms(out_freq_hz_b, &audio_frame));
+ bool muted;
+ ASSERT_EQ(
+ 0, acm_receiver_->PlayoutData10Ms(out_freq_hz_b, &audio_frame, &muted));
+ ASSERT_FALSE(muted);
// Write output speech to file.
out_file_.Write10MsData(
diff --git a/chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc
index 99c1c2da1ee..5de5bf262b2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/test/target_delay_unittest.cc
@@ -150,8 +150,10 @@ class TargetDelayTest : public ::testing::Test {
// Pull audio equivalent to the amount of audio in one RTP packet.
void Pull() {
AudioFrame frame;
+ bool muted;
for (int k = 0; k < kNum10msPerFrame; ++k) { // Pull one frame.
- ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &frame));
+ ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &frame, &muted));
+ ASSERT_FALSE(muted);
// Had to use ASSERT_TRUE, ASSERT_EQ generated error.
ASSERT_TRUE(kSampleRateHz == frame.sample_rate_hz_);
ASSERT_EQ(1u, frame.num_channels_);
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h
index 5d58f42435e..e1c5aedc887 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/include/audio_conference_mixer_defines.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
+#include "webrtc/base/checks.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
@@ -25,8 +26,34 @@ public:
// audio every time it's called.
//
// If it returns -1, the frame will not be added to the mix.
+ //
+ // NOTE: This function should not be called. It will remain for a short
+ // time so that subclasses can override it without getting warnings.
+ // TODO(henrik.lundin) Remove this function.
virtual int32_t GetAudioFrame(int32_t id,
- AudioFrame* audioFrame) = 0;
+ AudioFrame* audioFrame) {
+ RTC_CHECK(false);
+ return -1;
+ }
+
+
+ // The implementation of GetAudioFrameWithMuted should update audio_frame
+ // with new audio every time it's called. The return value will be
+ // interpreted as follows.
+ enum class AudioFrameInfo {
+ kNormal, // The samples in audio_frame are valid and should be used.
+ kMuted, // The samples in audio_frame should not be used, but should be
+ // implicitly interpreted as zero. Other fields in audio_frame
+ // may be read and should contain meaningful values.
+ kError // audio_frame will not be used.
+ };
+
+ virtual AudioFrameInfo GetAudioFrameWithMuted(int32_t id,
+ AudioFrame* audio_frame) {
+ return GetAudioFrame(id, audio_frame) == -1 ?
+ AudioFrameInfo::kError :
+ AudioFrameInfo::kNormal;
+ }
// Returns true if the participant was mixed this mix iteration.
bool IsMixed() const;
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
index 1d7602533e4..dce3d0b5451 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
@@ -19,12 +19,15 @@
namespace webrtc {
namespace {
-struct ParticipantFramePair {
+struct ParticipantFrameStruct {
+ ParticipantFrameStruct(MixerParticipant* p, AudioFrame* a, bool m)
+ : participant(p), audioFrame(a), muted(m) {}
MixerParticipant* participant;
AudioFrame* audioFrame;
+ bool muted;
};
-typedef std::list<ParticipantFramePair*> ParticipantFramePairList;
+typedef std::list<ParticipantFrameStruct*> ParticipantFrameStructList;
// Mix |frame| into |mixed_frame|, with saturation protection and upmixing.
// These effects are applied to |frame| itself prior to mixing. Assumes that
@@ -55,7 +58,7 @@ size_t MaxNumChannels(const AudioFrameList* list) {
for (AudioFrameList::const_iterator iter = list->begin();
iter != list->end();
++iter) {
- max_num_channels = std::max(max_num_channels, (*iter)->num_channels_);
+ max_num_channels = std::max(max_num_channels, (*iter).frame->num_channels_);
}
return max_num_channels;
}
@@ -529,8 +532,8 @@ void AudioConferenceMixerImpl::UpdateToMix(
AudioFrameList activeList;
// Struct needed by the passive lists to keep track of which AudioFrame
// belongs to which MixerParticipant.
- ParticipantFramePairList passiveWasNotMixedList;
- ParticipantFramePairList passiveWasMixedList;
+ ParticipantFrameStructList passiveWasNotMixedList;
+ ParticipantFrameStructList passiveWasMixedList;
for (MixerParticipantList::const_iterator participant =
_participantList.begin(); participant != _participantList.end();
++participant) {
@@ -552,12 +555,14 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
audioFrame->sample_rate_hz_ = _outputFrequency;
- if((*participant)->GetAudioFrame(_id, audioFrame) != 0) {
+ auto ret = (*participant)->GetAudioFrameWithMuted(_id, audioFrame);
+ if (ret == MixerParticipant::AudioFrameInfo::kError) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
- "failed to GetAudioFrame() from participant");
+ "failed to GetAudioFrameWithMuted() from participant");
_audioFramePool->PushMemory(audioFrame);
continue;
}
+ const bool muted = (ret == MixerParticipant::AudioFrameInfo::kMuted);
if (_participantList.size() != 1) {
// TODO(wu): Issue 3390, add support for multiple participants case.
audioFrame->ntp_time_ms_ = -1;
@@ -573,7 +578,7 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
if(audioFrame->vad_activity_ == AudioFrame::kVadActive) {
- if(!wasMixed) {
+ if(!wasMixed && !muted) {
RampIn(*audioFrame);
}
@@ -581,13 +586,15 @@ void AudioConferenceMixerImpl::UpdateToMix(
// There are already more active participants than should be
// mixed. Only keep the ones with the highest energy.
AudioFrameList::iterator replaceItem;
- uint32_t lowestEnergy = CalculateEnergy(*audioFrame);
+ uint32_t lowestEnergy =
+ muted ? 0 : CalculateEnergy(*audioFrame);
bool found_replace_item = false;
for (AudioFrameList::iterator iter = activeList.begin();
iter != activeList.end();
++iter) {
- const uint32_t energy = CalculateEnergy(**iter);
+ const uint32_t energy =
+ muted ? 0 : CalculateEnergy(*iter->frame);
if(energy < lowestEnergy) {
replaceItem = iter;
lowestEnergy = energy;
@@ -595,11 +602,12 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
}
if(found_replace_item) {
- AudioFrame* replaceFrame = *replaceItem;
+ RTC_DCHECK(!muted); // Cannot replace with a muted frame.
+ FrameAndMuteInfo replaceFrame = *replaceItem;
bool replaceWasMixed = false;
std::map<int, MixerParticipant*>::const_iterator it =
- mixParticipantList->find(replaceFrame->id_);
+ mixParticipantList->find(replaceFrame.frame->id_);
// When a frame is pushed to |activeList| it is also pushed
// to mixParticipantList with the frame's id. This means
@@ -607,26 +615,31 @@ void AudioConferenceMixerImpl::UpdateToMix(
assert(it != mixParticipantList->end());
replaceWasMixed = it->second->_mixHistory->WasMixed();
- mixParticipantList->erase(replaceFrame->id_);
+ mixParticipantList->erase(replaceFrame.frame->id_);
activeList.erase(replaceItem);
- activeList.push_front(audioFrame);
+ activeList.push_front(FrameAndMuteInfo(audioFrame, muted));
(*mixParticipantList)[audioFrame->id_] = *participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
if (replaceWasMixed) {
- RampOut(*replaceFrame);
+ if (!replaceFrame.muted) {
+ RampOut(*replaceFrame.frame);
+ }
rampOutList->push_back(replaceFrame);
assert(rampOutList->size() <=
kMaximumAmountOfMixedParticipants);
} else {
- _audioFramePool->PushMemory(replaceFrame);
+ _audioFramePool->PushMemory(replaceFrame.frame);
}
} else {
if(wasMixed) {
- RampOut(*audioFrame);
- rampOutList->push_back(audioFrame);
+ if (!muted) {
+ RampOut(*audioFrame);
+ }
+ rampOutList->push_back(FrameAndMuteInfo(audioFrame,
+ muted));
assert(rampOutList->size() <=
kMaximumAmountOfMixedParticipants);
} else {
@@ -634,23 +647,23 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
}
} else {
- activeList.push_front(audioFrame);
+ activeList.push_front(FrameAndMuteInfo(audioFrame, muted));
(*mixParticipantList)[audioFrame->id_] = *participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
}
} else {
if(wasMixed) {
- ParticipantFramePair* pair = new ParticipantFramePair;
- pair->audioFrame = audioFrame;
- pair->participant = *participant;
- passiveWasMixedList.push_back(pair);
+ ParticipantFrameStruct* part_struct =
+ new ParticipantFrameStruct(*participant, audioFrame, muted);
+ passiveWasMixedList.push_back(part_struct);
} else if(mustAddToPassiveList) {
- RampIn(*audioFrame);
- ParticipantFramePair* pair = new ParticipantFramePair;
- pair->audioFrame = audioFrame;
- pair->participant = *participant;
- passiveWasNotMixedList.push_back(pair);
+ if (!muted) {
+ RampIn(*audioFrame);
+ }
+ ParticipantFrameStruct* part_struct =
+ new ParticipantFrameStruct(*participant, audioFrame, muted);
+ passiveWasNotMixedList.push_back(part_struct);
} else {
_audioFramePool->PushMemory(audioFrame);
}
@@ -668,11 +681,12 @@ void AudioConferenceMixerImpl::UpdateToMix(
// Always mix a constant number of AudioFrames. If there aren't enough
// active participants mix passive ones. Starting with those that was mixed
// last iteration.
- for (ParticipantFramePairList::const_iterator
+ for (ParticipantFrameStructList::const_iterator
iter = passiveWasMixedList.begin(); iter != passiveWasMixedList.end();
++iter) {
if(mixList->size() < *maxAudioFrameCounter + mixListStartSize) {
- mixList->push_back((*iter)->audioFrame);
+ mixList->push_back(FrameAndMuteInfo((*iter)->audioFrame,
+ (*iter)->muted));
(*mixParticipantList)[(*iter)->audioFrame->id_] =
(*iter)->participant;
assert(mixParticipantList->size() <=
@@ -683,12 +697,13 @@ void AudioConferenceMixerImpl::UpdateToMix(
delete *iter;
}
// And finally the ones that have not been mixed for a while.
- for (ParticipantFramePairList::const_iterator iter =
+ for (ParticipantFrameStructList::const_iterator iter =
passiveWasNotMixedList.begin();
iter != passiveWasNotMixedList.end();
++iter) {
if(mixList->size() < *maxAudioFrameCounter + mixListStartSize) {
- mixList->push_back((*iter)->audioFrame);
+ mixList->push_back(FrameAndMuteInfo((*iter)->audioFrame,
+ (*iter)->muted));
(*mixParticipantList)[(*iter)->audioFrame->id_] =
(*iter)->participant;
assert(mixParticipantList->size() <=
@@ -706,10 +721,10 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
AudioFrameList* additionalFramesList) const {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"GetAdditionalAudio(additionalFramesList)");
- // The GetAudioFrame() callback may result in the participant being removed
- // from additionalParticipantList_. If that happens it will invalidate any
- // iterators. Create a copy of the participants list such that the list of
- // participants can be traversed safely.
+ // The GetAudioFrameWithMuted() callback may result in the participant being
+ // removed from additionalParticipantList_. If that happens it will
+ // invalidate any iterators. Create a copy of the participants list such
+ // that the list of participants can be traversed safely.
MixerParticipantList additionalParticipantList;
additionalParticipantList.insert(additionalParticipantList.begin(),
_additionalParticipantList.begin(),
@@ -727,9 +742,10 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
return;
}
audioFrame->sample_rate_hz_ = _outputFrequency;
- if((*participant)->GetAudioFrame(_id, audioFrame) != 0) {
+ auto ret = (*participant)->GetAudioFrameWithMuted(_id, audioFrame);
+ if (ret == MixerParticipant::AudioFrameInfo::kError) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
- "failed to GetAudioFrame() from participant");
+ "failed to GetAudioFrameWithMuted() from participant");
_audioFramePool->PushMemory(audioFrame);
continue;
}
@@ -738,7 +754,8 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
_audioFramePool->PushMemory(audioFrame);
continue;
}
- additionalFramesList->push_back(audioFrame);
+ additionalFramesList->push_back(FrameAndMuteInfo(
+ audioFrame, ret == MixerParticipant::AudioFrameInfo::kMuted));
}
}
@@ -775,7 +792,7 @@ void AudioConferenceMixerImpl::ClearAudioFrameList(
for (AudioFrameList::iterator iter = audioFrameList->begin();
iter != audioFrameList->end();
++iter) {
- _audioFramePool->PushMemory(*iter);
+ _audioFramePool->PushMemory(iter->frame);
}
audioFrameList->clear();
}
@@ -834,8 +851,9 @@ int32_t AudioConferenceMixerImpl::MixFromList(
uint32_t position = 0;
if (_numMixedParticipants == 1) {
- mixedAudio->timestamp_ = audioFrameList.front()->timestamp_;
- mixedAudio->elapsed_time_ms_ = audioFrameList.front()->elapsed_time_ms_;
+ mixedAudio->timestamp_ = audioFrameList.front().frame->timestamp_;
+ mixedAudio->elapsed_time_ms_ =
+ audioFrameList.front().frame->elapsed_time_ms_;
} else {
// TODO(wu): Issue 3390.
// Audio frame timestamp is only supported in one channel case.
@@ -857,7 +875,9 @@ int32_t AudioConferenceMixerImpl::MixFromList(
assert(false);
position = 0;
}
- MixFrames(mixedAudio, (*iter), use_limiter_);
+ if (!iter->muted) {
+ MixFrames(mixedAudio, iter->frame, use_limiter_);
+ }
position++;
}
@@ -877,7 +897,9 @@ int32_t AudioConferenceMixerImpl::MixAnonomouslyFromList(
for (AudioFrameList::const_iterator iter = audioFrameList.begin();
iter != audioFrameList.end();
++iter) {
- MixFrames(mixedAudio, *iter, use_limiter_);
+ if (!iter->muted) {
+ MixFrames(mixedAudio, iter->frame, use_limiter_);
+ }
}
return 0;
}
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
index 22c6a235351..e726c08f75a 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
@@ -25,7 +25,13 @@ namespace webrtc {
class AudioProcessing;
class CriticalSectionWrapper;
-typedef std::list<AudioFrame*> AudioFrameList;
+struct FrameAndMuteInfo {
+ FrameAndMuteInfo(AudioFrame* f, bool m) : frame(f), muted(m) {}
+ AudioFrame* frame;
+ bool muted;
+};
+
+typedef std::list<FrameAndMuteInfo> AudioFrameList;
typedef std::list<MixerParticipant*> MixerParticipantList;
// Cheshire cat implementation of MixerParticipant's non virtual functions.
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc
index 19f5bd8848a..30b2933b61c 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/audio_conference_mixer/source/time_scheduler.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
@@ -17,7 +18,7 @@ TimeScheduler::TimeScheduler(const int64_t periodicityInMs)
_isStarted(false),
_lastPeriodMark(),
_periodicityInMs(periodicityInMs),
- _periodicityInTicks(TickTime::MillisecondsToTicks(periodicityInMs)),
+ _periodicityInTicks(periodicityInMs * rtc::kNumNanosecsPerMillisec),
_missedPeriods(0)
{
}
@@ -33,7 +34,7 @@ int32_t TimeScheduler::UpdateScheduler()
if(!_isStarted)
{
_isStarted = true;
- _lastPeriodMark = TickTime::Now();
+ _lastPeriodMark = rtc::TimeNanos();
return 0;
}
// Don't perform any calculations until the debt of pending periods have
@@ -45,9 +46,9 @@ int32_t TimeScheduler::UpdateScheduler()
}
// Calculate the time that has past since previous call to this function.
- TickTime tickNow = TickTime::Now();
- TickInterval amassedTicks = tickNow - _lastPeriodMark;
- int64_t amassedMs = amassedTicks.Milliseconds();
+ int64_t tickNow = rtc::TimeNanos();
+ int64_t amassedTicks = tickNow - _lastPeriodMark;
+ int64_t amassedMs = amassedTicks / rtc::kNumNanosecsPerMillisec;
// Calculate the number of periods the time that has passed correspond to.
int64_t periodsToClaim = amassedMs / _periodicityInMs;
@@ -89,10 +90,10 @@ int32_t TimeScheduler::TimeToNextUpdate(
// Calculate the time (in ms) that has past since last call to
// UpdateScheduler()
- TickTime tickNow = TickTime::Now();
- TickInterval ticksSinceLastUpdate = tickNow - _lastPeriodMark;
+ int64_t tickNow = rtc::TimeNanos();
+ int64_t ticksSinceLastUpdate = tickNow - _lastPeriodMark;
const int64_t millisecondsSinceLastUpdate =
- ticksSinceLastUpdate.Milliseconds();
+ ticksSinceLastUpdate / rtc::kNumNanosecsPerMillisec;
updateTimeInMS = _periodicityInMs - millisecondsSinceLastUpdate;
updateTimeInMS = (updateTimeInMS < 0) ? 0 : updateTimeInMS;
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h
index 09d0caa66ae..d1897fa1001 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/time_scheduler.h
@@ -15,8 +15,6 @@
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
-#include "webrtc/system_wrappers/include/tick_util.h"
-
namespace webrtc {
class CriticalSectionWrapper;
class TimeScheduler
@@ -36,7 +34,7 @@ private:
CriticalSectionWrapper* _crit;
bool _isStarted;
- TickTime _lastPeriodMark;
+ int64_t _lastPeriodMark; // In ns
int64_t _periodicityInMs;
int64_t _periodicityInTicks;
diff --git a/chromium/third_party/webrtc/modules/audio_decoder_unittests_apk.isolate b/chromium/third_party/webrtc/modules/audio_decoder_unittests_apk.isolate
new file mode 100644
index 00000000000..427559dcb05
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_decoder_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'audio_decoder_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_audio_decoder_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/audio_decoder_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_audio_decoder_unittests',
+ 'audio_decoder_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/audio_device/BUILD.gn b/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
index 198b67d5200..d743a6a58f7 100644
--- a/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_device/BUILD.gn
@@ -138,7 +138,7 @@ source_set("audio_device") {
]
}
if (is_ios) {
- deps += [ "../../base:rtc_base_objc" ]
+ deps += [ "../../sdk:rtc_sdk_common_objc" ]
sources += [
"ios/audio_device_ios.h",
"ios/audio_device_ios.mm",
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc b/chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc
index 7655c820ada..48944f57726 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/audio_device_unittest.cc
@@ -562,7 +562,7 @@ class AudioDeviceTest : public ::testing::Test {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDevice(
AudioDeviceModule::AudioLayer audio_layer) {
rtc::scoped_refptr<AudioDeviceModule> module(
- AudioDeviceModuleImpl::Create(0, audio_layer));
+ AudioDeviceModule::Create(0, audio_layer));
return module;
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc b/chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc
index 9174a5b7ab1..01e5d5fe4f2 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/audio_manager.cc
@@ -66,7 +66,7 @@ bool AudioManager::JavaAudioManager::IsDeviceBlacklistedForOpenSLESUsage() {
// AudioManager implementation
AudioManager::AudioManager()
- : j_environment_(rtc::ScopedToUnique(JVM::GetInstance()->environment())),
+ : j_environment_(JVM::GetInstance()->environment()),
audio_layer_(AudioDeviceModule::kPlatformDefaultAudio),
initialized_(false),
hardware_aec_(false),
@@ -80,14 +80,14 @@ AudioManager::AudioManager()
{"nativeCacheAudioParameters",
"(IIZZZZIIJ)V",
reinterpret_cast<void*>(&webrtc::AudioManager::CacheAudioParameters)}};
- j_native_registration_ = rtc::ScopedToUnique(j_environment_->RegisterNatives(
- "org/webrtc/voiceengine/WebRtcAudioManager",
- native_methods, arraysize(native_methods)));
+ j_native_registration_ = j_environment_->RegisterNatives(
+ "org/webrtc/voiceengine/WebRtcAudioManager", native_methods,
+ arraysize(native_methods));
j_audio_manager_.reset(new JavaAudioManager(
j_native_registration_.get(),
- rtc::ScopedToUnique(j_native_registration_->NewObject(
+ j_native_registration_->NewObject(
"<init>", "(Landroid/content/Context;J)V",
- JVM::GetInstance()->context(), PointerTojlong(this)))));
+ JVM::GetInstance()->context(), PointerTojlong(this))));
}
AudioManager::~AudioManager() {
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/build_info.cc b/chromium/third_party/webrtc/modules/audio_device/android/build_info.cc
index c6cecc96c5b..455c12f7fd0 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/build_info.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/build_info.cc
@@ -15,10 +15,9 @@
namespace webrtc {
BuildInfo::BuildInfo()
- : j_environment_(rtc::ScopedToUnique(JVM::GetInstance()->environment())),
- j_build_info_(JVM::GetInstance()->GetClass(
- "org/webrtc/voiceengine/BuildInfo")) {
-}
+ : j_environment_(JVM::GetInstance()->environment()),
+ j_build_info_(
+ JVM::GetInstance()->GetClass("org/webrtc/voiceengine/BuildInfo")) {}
std::string BuildInfo::GetStringFromJava(const char* name) {
jmethodID id = j_build_info_.GetStaticMethodId(name, "()Ljava/lang/String;");
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h b/chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h
index 084546dbf7b..4058ff99948 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_player.h
@@ -132,7 +132,7 @@ class OpenSLESPlayer {
const AudioParameters audio_parameters_;
// Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
- // AudioDeviceModuleImpl class and called by AudioDeviceModuleImpl::Create().
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
AudioDeviceBuffer* audio_device_buffer_;
bool initialized_;
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi b/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
index abefcdb4507..a7c57896585 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
@@ -167,7 +167,10 @@
}],
['OS=="ios"', {
'dependencies': [
- '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_common_objc',
+ ],
+ 'export_dependent_settings': [
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_common_objc',
],
'sources': [
'ios/audio_device_ios.h',
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
index d881bcd40c9..7abc94d5f77 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
@@ -9,10 +9,11 @@
*/
#include "webrtc/base/refcount.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/base/trace_event.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_device/audio_device_config.h"
#include "webrtc/modules/audio_device/audio_device_impl.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include <assert.h>
#include <string.h>
@@ -75,12 +76,12 @@ namespace webrtc {
// AudioDeviceModule::Create()
// ----------------------------------------------------------------------------
-rtc::scoped_refptr<AudioDeviceModule> AudioDeviceModuleImpl::Create(
+rtc::scoped_refptr<AudioDeviceModule> AudioDeviceModule::Create(
const int32_t id,
- const AudioLayer audioLayer) {
+ const AudioLayer audio_layer) {
// Create the generic ref counted (platform independent) implementation.
rtc::scoped_refptr<AudioDeviceModuleImpl> audioDevice(
- new rtc::RefCountedObject<AudioDeviceModuleImpl>(id, audioLayer));
+ new rtc::RefCountedObject<AudioDeviceModuleImpl>(id, audio_layer));
// Ensure that the current platform is supported.
if (audioDevice->CheckPlatform() == -1)
@@ -122,7 +123,7 @@ AudioDeviceModuleImpl::AudioDeviceModuleImpl(const int32_t id, const AudioLayer
_ptrAudioDevice(NULL),
_id(id),
_platformAudioLayer(audioLayer),
- _lastProcessTime(TickTime::MillisecondTimestamp()),
+ _lastProcessTime(rtc::TimeMillis()),
_platformType(kPlatformNotSupported),
_initialized(false),
_lastError(kAdmErrNone)
@@ -406,7 +407,7 @@ AudioDeviceModuleImpl::~AudioDeviceModuleImpl()
int64_t AudioDeviceModuleImpl::TimeUntilNextProcess()
{
- int64_t now = TickTime::MillisecondTimestamp();
+ int64_t now = rtc::TimeMillis();
int64_t deltaProcess = kAdmMaxIdleTimeProcess - (now - _lastProcessTime);
return deltaProcess;
}
@@ -421,7 +422,7 @@ int64_t AudioDeviceModuleImpl::TimeUntilNextProcess()
void AudioDeviceModuleImpl::Process()
{
- _lastProcessTime = TickTime::MillisecondTimestamp();
+ _lastProcessTime = rtc::TimeMillis();
// kPlayoutWarning
if (_ptrAudioDevice->PlayoutWarning())
@@ -1454,6 +1455,7 @@ int32_t AudioDeviceModuleImpl::InitPlayout()
int32_t AudioDeviceModuleImpl::InitRecording()
{
+ TRACE_EVENT0("webrtc", "AudioDeviceModuleImpl::InitRecording");
CHECK_INITIALIZED();
_audioDeviceBuffer.InitRecording();
return (_ptrAudioDevice->InitRecording());
@@ -1515,6 +1517,7 @@ bool AudioDeviceModuleImpl::Playing() const
int32_t AudioDeviceModuleImpl::StartRecording()
{
+ TRACE_EVENT0("webrtc", "AudioDeviceModuleImpl::StartRecording");
CHECK_INITIALIZED();
return (_ptrAudioDevice->StartRecording());
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
index a112e3e3bf6..044ec4ed6b5 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
@@ -16,7 +16,6 @@
#include <memory>
#include "webrtc/base/checks.h"
-#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_device/audio_device_buffer.h"
#include "webrtc/modules/audio_device/include/audio_device.h"
@@ -48,11 +47,6 @@ class AudioDeviceModuleImpl : public AudioDeviceModule {
int64_t TimeUntilNextProcess() override;
void Process() override;
- // Factory methods (resource allocation/deallocation)
- static rtc::scoped_refptr<AudioDeviceModule> Create(
- const int32_t id,
- const AudioLayer audioLayer = kPlatformDefaultAudio);
-
// Retrieve the currently utilized audio layer
int32_t ActiveAudioLayer(AudioLayer* audioLayer) const override;
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
index 2a6ac1ffe9c..7c6d16f129d 100644
--- a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_device/dummy/file_audio_device_factory.h"
+#include <cstdlib>
#include <cstring>
#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
@@ -26,7 +27,7 @@ FileAudioDevice* FileAudioDeviceFactory::CreateFileAudioDevice(
if (!_isConfigured) {
printf("Was compiled with WEBRTC_DUMMY_AUDIO_PLAY_STATIC_FILE "
"but did not set input/output files to use. Bailing out.\n");
- exit(1);
+ std::exit(1);
}
return new FileAudioDevice(id, _inputAudioFilename, _outputAudioFilename);
}
@@ -45,7 +46,7 @@ void FileAudioDeviceFactory::SetFilenamesToUse(
// Sanity: must be compiled with the right define to run this.
printf("Trying to use dummy file devices, but is not compiled "
"with WEBRTC_DUMMY_FILE_DEVICES. Bailing out.\n");
- exit(1);
+ std::exit(1);
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h b/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
index d8df05cec89..8457a6b7718 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
@@ -11,6 +11,7 @@
#ifndef MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_
#define MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_
+#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
#include "webrtc/modules/include/module.h"
@@ -51,6 +52,11 @@ class AudioDeviceModule : public RefCountedModule {
};
public:
+ // Create an ADM.
+ static rtc::scoped_refptr<AudioDeviceModule> Create(
+ const int32_t id,
+ const AudioLayer audio_layer);
+
// Retrieve the currently utilized audio layer
virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
index bfa6372203e..9616a285c0a 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
@@ -13,8 +13,7 @@
#include <memory>
-#include "webrtc/base/asyncinvoker.h"
-#include "webrtc/base/objc/RTCMacros.h"
+#include "WebRTC/RTCMacros.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
@@ -42,7 +41,8 @@ class FineAudioBuffer;
// same thread.
class AudioDeviceIOS : public AudioDeviceGeneric,
public AudioSessionObserver,
- public VoiceProcessingAudioUnitObserver {
+ public VoiceProcessingAudioUnitObserver,
+ public rtc::MessageHandler {
public:
AudioDeviceIOS();
~AudioDeviceIOS();
@@ -162,7 +162,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
void OnInterruptionBegin() override;
void OnInterruptionEnd() override;
void OnValidRouteChange() override;
- void OnConfiguredForWebRTC() override;
+ void OnCanPlayOrRecordChange(bool can_play_or_record) override;
// VoiceProcessingAudioUnitObserver methods.
OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
@@ -176,12 +176,16 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
UInt32 num_frames,
AudioBufferList* io_data) override;
+ // Handles messages from posts.
+ void OnMessage(rtc::Message *msg) override;
+
private:
// Called by the relevant AudioSessionObserver methods on |thread_|.
void HandleInterruptionBegin();
void HandleInterruptionEnd();
void HandleValidRouteChange();
- void HandleConfiguredForWebRTC();
+ void HandleCanPlayOrRecordChange(bool can_play_or_record);
+ void HandleSampleRateChange(float sample_rate);
// Uses current |playout_parameters_| and |record_parameters_| to inform the
// audio device buffer (ADB) about our internal audio parameters.
@@ -197,9 +201,13 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
// Creates the audio unit.
bool CreateAudioUnit();
- // Restarts active audio streams using a new sample rate. Required when e.g.
- // a BT headset is enabled or disabled.
- bool RestartAudioUnit(float sample_rate);
+ // Updates the audio unit state based on current state.
+ void UpdateAudioUnit(bool can_play_or_record);
+
+ // Configures the audio session for WebRTC.
+ void ConfigureAudioSession();
+ // Unconfigures the audio session.
+ void UnconfigureAudioSession();
// Activates our audio session, creates and initializes the voice-processing
// audio unit and verifies that we got the preferred native audio parameters.
@@ -213,11 +221,9 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
rtc::ThreadChecker thread_checker_;
// Thread that this object is created on.
rtc::Thread* thread_;
- // Invoker used to execute methods on thread_.
- std::unique_ptr<rtc::AsyncInvoker> async_invoker_;
// Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
- // AudioDeviceModuleImpl class and called by AudioDeviceModuleImpl::Create().
+ // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
// The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance
// and therefore outlives this object.
AudioDeviceBuffer* audio_device_buffer_;
@@ -284,6 +290,9 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
// Audio interruption observer instance.
RTCAudioSessionDelegateAdapter* audio_session_observer_;
+
+ // Set to true if we've activated the audio session.
+ bool has_configured_session_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm
index f6c339fed00..8f6fb4d9b63 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.mm
@@ -27,7 +27,7 @@
#include "webrtc/modules/audio_device/fine_audio_buffer.h"
#include "webrtc/modules/utility/include/helpers_ios.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
@@ -61,6 +61,13 @@ namespace webrtc {
const UInt16 kFixedPlayoutDelayEstimate = 30;
const UInt16 kFixedRecordDelayEstimate = 30;
+enum AudioDeviceMessageType : uint32_t {
+ kMessageTypeInterruptionBegin,
+ kMessageTypeInterruptionEnd,
+ kMessageTypeValidRouteChange,
+ kMessageTypeCanPlayOrRecordChange,
+};
+
using ios::CheckAndLogError;
#if !defined(NDEBUG)
@@ -85,15 +92,15 @@ static void LogDeviceInfo() {
#endif // !defined(NDEBUG)
AudioDeviceIOS::AudioDeviceIOS()
- : async_invoker_(new rtc::AsyncInvoker()),
- audio_device_buffer_(nullptr),
+ : audio_device_buffer_(nullptr),
audio_unit_(nullptr),
recording_(0),
playing_(0),
initialized_(false),
rec_is_initialized_(false),
play_is_initialized_(false),
- is_interrupted_(false) {
+ is_interrupted_(false),
+ has_configured_session_(false) {
LOGI() << "ctor" << ios::GetCurrentThreadDescription();
thread_ = rtc::Thread::Current();
audio_session_observer_ =
@@ -191,6 +198,7 @@ int32_t AudioDeviceIOS::StartPlayout() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(play_is_initialized_);
RTC_DCHECK(!playing_);
+ RTC_DCHECK(audio_unit_);
if (fine_audio_buffer_) {
fine_audio_buffer_->ResetPlayout();
}
@@ -209,7 +217,11 @@ int32_t AudioDeviceIOS::StartPlayout() {
int32_t AudioDeviceIOS::StopPlayout() {
LOGI() << "StopPlayout";
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!play_is_initialized_ || !playing_) {
+ if (!play_is_initialized_) {
+ return 0;
+ }
+ if (!playing_) {
+ play_is_initialized_ = false;
return 0;
}
if (!recording_) {
@@ -225,6 +237,7 @@ int32_t AudioDeviceIOS::StartRecording() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(rec_is_initialized_);
RTC_DCHECK(!recording_);
+ RTC_DCHECK(audio_unit_);
if (fine_audio_buffer_) {
fine_audio_buffer_->ResetRecord();
}
@@ -243,7 +256,11 @@ int32_t AudioDeviceIOS::StartRecording() {
int32_t AudioDeviceIOS::StopRecording() {
LOGI() << "StopRecording";
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- if (!rec_is_initialized_ || !recording_) {
+ if (!rec_is_initialized_) {
+ return 0;
+ }
+ if (!recording_) {
+ rec_is_initialized_ = false;
return 0;
}
if (!playing_) {
@@ -318,51 +335,24 @@ int AudioDeviceIOS::GetRecordAudioParameters(AudioParameters* params) const {
}
void AudioDeviceIOS::OnInterruptionBegin() {
- RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
- if (thread_->IsCurrent()) {
- HandleInterruptionBegin();
- return;
- }
- async_invoker_->AsyncInvoke<void>(
- thread_,
- rtc::Bind(&webrtc::AudioDeviceIOS::HandleInterruptionBegin, this));
+ thread_->Post(this, kMessageTypeInterruptionBegin);
}
void AudioDeviceIOS::OnInterruptionEnd() {
- RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
- if (thread_->IsCurrent()) {
- HandleInterruptionEnd();
- return;
- }
- async_invoker_->AsyncInvoke<void>(
- thread_,
- rtc::Bind(&webrtc::AudioDeviceIOS::HandleInterruptionEnd, this));
+ thread_->Post(this, kMessageTypeInterruptionEnd);
}
void AudioDeviceIOS::OnValidRouteChange() {
- RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
- if (thread_->IsCurrent()) {
- HandleValidRouteChange();
- return;
- }
- async_invoker_->AsyncInvoke<void>(
- thread_,
- rtc::Bind(&webrtc::AudioDeviceIOS::HandleValidRouteChange, this));
+ thread_->Post(this, kMessageTypeValidRouteChange);
}
-void AudioDeviceIOS::OnConfiguredForWebRTC() {
- RTC_DCHECK(async_invoker_);
+void AudioDeviceIOS::OnCanPlayOrRecordChange(bool can_play_or_record) {
RTC_DCHECK(thread_);
- if (thread_->IsCurrent()) {
- HandleValidRouteChange();
- return;
- }
- async_invoker_->AsyncInvoke<void>(
- thread_,
- rtc::Bind(&webrtc::AudioDeviceIOS::HandleConfiguredForWebRTC, this));
+ thread_->Post(this, kMessageTypeCanPlayOrRecordChange,
+ new rtc::TypedMessageData<bool>(can_play_or_record));
}
OSStatus AudioDeviceIOS::OnDeliverRecordedData(
@@ -385,6 +375,9 @@ OSStatus AudioDeviceIOS::OnDeliverRecordedData(
RTCLogWarning(@"Expected %u frames but got %u",
static_cast<unsigned int>(frames_per_buffer),
static_cast<unsigned int>(num_frames));
+
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ RTCLogWarning(@"Session:\n %@", session);
return result;
}
@@ -447,12 +440,36 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
return noErr;
}
+void AudioDeviceIOS::OnMessage(rtc::Message *msg) {
+ switch (msg->message_id) {
+ case kMessageTypeInterruptionBegin:
+ HandleInterruptionBegin();
+ break;
+ case kMessageTypeInterruptionEnd:
+ HandleInterruptionEnd();
+ break;
+ case kMessageTypeValidRouteChange:
+ HandleValidRouteChange();
+ break;
+ case kMessageTypeCanPlayOrRecordChange: {
+ rtc::TypedMessageData<bool>* data =
+ static_cast<rtc::TypedMessageData<bool>*>(msg->pdata);
+ HandleCanPlayOrRecordChange(data->data());
+ delete data;
+ break;
+ }
+ }
+}
+
void AudioDeviceIOS::HandleInterruptionBegin() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- RTCLog(@"Stopping the audio unit due to interruption begin.");
- if (!audio_unit_->Stop()) {
- RTCLogError(@"Failed to stop the audio unit.");
+ if (audio_unit_ &&
+ audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ RTCLog(@"Stopping the audio unit due to interruption begin.");
+ if (!audio_unit_->Stop()) {
+ RTCLogError(@"Failed to stop the audio unit for interruption begin.");
+ }
}
is_interrupted_ = true;
}
@@ -460,66 +477,95 @@ void AudioDeviceIOS::HandleInterruptionBegin() {
void AudioDeviceIOS::HandleInterruptionEnd() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- RTCLog(@"Starting the audio unit due to interruption end.");
- if (!audio_unit_->Start()) {
- RTCLogError(@"Failed to start the audio unit.");
- }
is_interrupted_ = false;
+ RTCLog(@"Interruption ended. Updating audio unit state.");
+ UpdateAudioUnit([RTCAudioSession sharedInstance].canPlayOrRecord);
}
void AudioDeviceIOS::HandleValidRouteChange() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCAudioSession* session = [RTCAudioSession sharedInstance];
+ HandleSampleRateChange(session.sampleRate);
+}
+
+void AudioDeviceIOS::HandleCanPlayOrRecordChange(bool can_play_or_record) {
+ RTCLog(@"Handling CanPlayOrRecord change to: %d", can_play_or_record);
+ UpdateAudioUnit(can_play_or_record);
+}
+
+void AudioDeviceIOS::HandleSampleRateChange(float sample_rate) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCLog(@"Handling sample rate change to %f.", sample_rate);
+
// Don't do anything if we're interrupted.
if (is_interrupted_) {
+ RTCLog(@"Ignoring sample rate change to %f due to interruption.",
+ sample_rate);
return;
}
- // Only restart audio for a valid route change if the session sample rate
- // has changed.
- RTCAudioSession* session = [RTCAudioSession sharedInstance];
- const double current_sample_rate = playout_parameters_.sample_rate();
- const double session_sample_rate = session.sampleRate;
- if (current_sample_rate != session_sample_rate) {
- RTCLog(@"Route changed caused sample rate to change from %f to %f. "
- "Restarting audio unit.", current_sample_rate, session_sample_rate);
- if (!RestartAudioUnit(session_sample_rate)) {
- RTCLogError(@"Audio restart failed.");
- }
+ // If we don't have an audio unit yet, or the audio unit is uninitialized,
+ // there is no work to do.
+ if (!audio_unit_ ||
+ audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) {
+ return;
}
-}
-void AudioDeviceIOS::HandleConfiguredForWebRTC() {
- RTC_DCHECK(thread_checker_.CalledOnValidThread());
-
- // If we're not initialized we don't need to do anything. Audio unit will
- // be initialized on initialization.
- if (!rec_is_initialized_ && !play_is_initialized_)
+ // The audio unit is already initialized or started.
+ // Check to see if the sample rate or buffer size has changed.
+ RTCAudioSession* session = [RTCAudioSession sharedInstance];
+ const double session_sample_rate = session.sampleRate;
+ const NSTimeInterval session_buffer_duration = session.IOBufferDuration;
+ const size_t session_frames_per_buffer =
+ static_cast<size_t>(session_sample_rate * session_buffer_duration + .5);
+ const double current_sample_rate = playout_parameters_.sample_rate();
+ const size_t current_frames_per_buffer =
+ playout_parameters_.frames_per_buffer();
+ RTCLog(@"Handling playout sample rate change to: %f\n"
+ " Session sample rate: %f frames_per_buffer: %lu\n"
+ " ADM sample rate: %f frames_per_buffer: %lu",
+ sample_rate,
+ session_sample_rate, (unsigned long)session_frames_per_buffer,
+ current_sample_rate, (unsigned long)current_frames_per_buffer);;
+
+ // Sample rate and buffer size are the same, no work to do.
+ if (abs(current_sample_rate - session_sample_rate) <= DBL_EPSILON &&
+ current_frames_per_buffer == session_frames_per_buffer) {
return;
+ }
- // If we're initialized, we must have an audio unit.
- RTC_DCHECK(audio_unit_);
+ // We need to adjust our format and buffer sizes.
+ // The stream format is about to be changed and it requires that we first
+ // stop and uninitialize the audio unit to deallocate its resources.
+ RTCLog(@"Stopping and uninitializing audio unit to adjust buffers.");
+ bool restart_audio_unit = false;
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
+ audio_unit_->Stop();
+ restart_audio_unit = true;
+ }
+ if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
+ audio_unit_->Uninitialize();
+ }
- // Use configured audio session's settings to set up audio device buffer.
- // TODO(tkchin): Use RTCAudioSessionConfiguration to pick up settings and
- // pass it along.
+ // Allocate new buffers given the new stream format.
SetupAudioBuffersForActiveAudioSession();
- // Initialize the audio unit. This will affect any existing audio playback.
- if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) {
- RTCLogError(@"Failed to initialize audio unit after configuration.");
+ // Initialize the audio unit again with the new sample rate.
+ RTC_DCHECK_EQ(playout_parameters_.sample_rate(), session_sample_rate);
+ if (!audio_unit_->Initialize(session_sample_rate)) {
+ RTCLogError(@"Failed to initialize the audio unit with sample rate: %f",
+ session_sample_rate);
return;
}
- // If we haven't started playing or recording there's nothing more to do.
- if (!playing_ && !recording_)
- return;
-
- // We are in a play or record state, start the audio unit.
- if (!audio_unit_->Start()) {
- RTCLogError(@"Failed to start audio unit after configuration.");
+ // Restart the audio unit if it was already running.
+ if (restart_audio_unit && !audio_unit_->Start()) {
+ RTCLogError(@"Failed to start audio unit with sample rate: %f",
+ session_sample_rate);
return;
}
+ RTCLog(@"Successfully handled sample rate change.");
}
void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
@@ -597,6 +643,7 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
// at each input callback when calling AudioUnitRender().
const int data_byte_size = record_parameters_.GetBytesPerBuffer();
record_audio_buffer_.reset(new SInt8[data_byte_size]);
+ memset(record_audio_buffer_.get(), 0, data_byte_size);
audio_record_buffer_list_.mNumberBuffers = 1;
AudioBuffer* audio_buffer = &audio_record_buffer_list_.mBuffers[0];
audio_buffer->mNumberChannels = record_parameters_.channels();
@@ -616,46 +663,117 @@ bool AudioDeviceIOS::CreateAudioUnit() {
return true;
}
-bool AudioDeviceIOS::RestartAudioUnit(float sample_rate) {
- RTCLog(@"Restarting audio unit with new sample rate: %f", sample_rate);
+void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCLog(@"Updating audio unit state. CanPlayOrRecord=%d IsInterrupted=%d",
+ can_play_or_record, is_interrupted_);
- // Stop the active audio unit.
- if (!audio_unit_->Stop()) {
- RTCLogError(@"Failed to stop the audio unit.");
- return false;
+ if (is_interrupted_) {
+ RTCLog(@"Ignoring audio unit update due to interruption.");
+ return;
}
- // The stream format is about to be changed and it requires that we first
- // uninitialize it to deallocate its resources.
- if (!audio_unit_->Uninitialize()) {
- RTCLogError(@"Failed to uninitialize the audio unit.");
- return false;
+ // If we're not initialized we don't need to do anything. Audio unit will
+ // be initialized on initialization.
+ if (!rec_is_initialized_ && !play_is_initialized_)
+ return;
+
+ // If we're initialized, we must have an audio unit.
+ RTC_DCHECK(audio_unit_);
+
+ bool should_initialize_audio_unit = false;
+ bool should_uninitialize_audio_unit = false;
+ bool should_start_audio_unit = false;
+ bool should_stop_audio_unit = false;
+
+ switch (audio_unit_->GetState()) {
+ case VoiceProcessingAudioUnit::kInitRequired:
+ RTC_NOTREACHED();
+ break;
+ case VoiceProcessingAudioUnit::kUninitialized:
+ should_initialize_audio_unit = can_play_or_record;
+ should_start_audio_unit = should_initialize_audio_unit &&
+ (playing_ || recording_);
+ break;
+ case VoiceProcessingAudioUnit::kInitialized:
+ should_start_audio_unit =
+ can_play_or_record && (playing_ || recording_);
+ should_uninitialize_audio_unit = !can_play_or_record;
+ break;
+ case VoiceProcessingAudioUnit::kStarted:
+ RTC_DCHECK(playing_ || recording_);
+ should_stop_audio_unit = !can_play_or_record;
+ should_uninitialize_audio_unit = should_stop_audio_unit;
+ break;
+ }
+
+ if (should_initialize_audio_unit) {
+ RTCLog(@"Initializing audio unit for UpdateAudioUnit");
+ ConfigureAudioSession();
+ SetupAudioBuffersForActiveAudioSession();
+ if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) {
+ RTCLogError(@"Failed to initialize audio unit.");
+ return;
+ }
}
- // Allocate new buffers given the new stream format.
- SetupAudioBuffersForActiveAudioSession();
+ if (should_start_audio_unit) {
+ RTCLog(@"Starting audio unit for UpdateAudioUnit");
+ if (!audio_unit_->Start()) {
+ RTCLogError(@"Failed to start audio unit.");
+ return;
+ }
+ }
- // Initialize the audio unit again with the new sample rate.
- RTC_DCHECK_EQ(playout_parameters_.sample_rate(), sample_rate);
- if (!audio_unit_->Initialize(sample_rate)) {
- RTCLogError(@"Failed to initialize the audio unit with sample rate: %f",
- sample_rate);
- return false;
+ if (should_stop_audio_unit) {
+ RTCLog(@"Stopping audio unit for UpdateAudioUnit");
+ if (!audio_unit_->Stop()) {
+ RTCLogError(@"Failed to stop audio unit.");
+ return;
+ }
}
- // Restart the audio unit.
- if (!audio_unit_->Start()) {
- RTCLogError(@"Failed to start audio unit.");
- return false;
+ if (should_uninitialize_audio_unit) {
+ RTCLog(@"Uninitializing audio unit for UpdateAudioUnit");
+ audio_unit_->Uninitialize();
+ UnconfigureAudioSession();
}
- RTCLog(@"Successfully restarted audio unit.");
+}
- return true;
+void AudioDeviceIOS::ConfigureAudioSession() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCLog(@"Configuring audio session.");
+ if (has_configured_session_) {
+ RTCLogWarning(@"Audio session already configured.");
+ return;
+ }
+ RTCAudioSession* session = [RTCAudioSession sharedInstance];
+ [session lockForConfiguration];
+ [session configureWebRTCSession:nil];
+ [session unlockForConfiguration];
+ has_configured_session_ = true;
+ RTCLog(@"Configured audio session.");
+}
+
+void AudioDeviceIOS::UnconfigureAudioSession() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTCLog(@"Unconfiguring audio session.");
+ if (!has_configured_session_) {
+ RTCLogWarning(@"Audio session already unconfigured.");
+ return;
+ }
+ RTCAudioSession* session = [RTCAudioSession sharedInstance];
+ [session lockForConfiguration];
+ [session unconfigureWebRTCSession:nil];
+ [session unlockForConfiguration];
+ has_configured_session_ = false;
+ RTCLog(@"Unconfigured audio session.");
}
bool AudioDeviceIOS::InitPlayOrRecord() {
LOGI() << "InitPlayOrRecord";
+ // There should be no audio unit at this point.
if (!CreateAudioUnit()) {
return false;
}
@@ -674,14 +792,11 @@ bool AudioDeviceIOS::InitPlayOrRecord() {
return false;
}
- // If we are already configured properly, we can initialize the audio unit.
- if (session.isConfiguredForWebRTC) {
- [session unlockForConfiguration];
+ // If we are ready to play or record, initialize the audio unit.
+ if (session.canPlayOrRecord) {
+ ConfigureAudioSession();
SetupAudioBuffersForActiveAudioSession();
- // Audio session has been marked ready for WebRTC so we can initialize the
- // audio unit now.
audio_unit_->Initialize(playout_parameters_.sample_rate());
- return true;
}
// Release the lock.
@@ -694,9 +809,7 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() {
LOGI() << "ShutdownPlayOrRecord";
// Close and delete the voice-processing I/O unit.
- if (audio_unit_) {
- audio_unit_.reset();
- }
+ audio_unit_.reset();
// Remove audio session notification observers.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
@@ -705,6 +818,7 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() {
// All I/O should be stopped or paused prior to deactivating the audio
// session, hence we deactivate as last action.
[session lockForConfiguration];
+ UnconfigureAudioSession();
[session endWebRTCSession:nil];
[session unlockForConfiguration];
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
index 4dfb073fa9f..ec10119a113 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_unittest_ios.cc
@@ -537,7 +537,7 @@ class AudioDeviceTest : public ::testing::Test {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDevice(
AudioDeviceModule::AudioLayer audio_layer) {
rtc::scoped_refptr<AudioDeviceModule> module(
- AudioDeviceModuleImpl::Create(0, audio_layer));
+ AudioDeviceModule::Create(0, audio_layer));
return module;
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h b/chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h
index 6c4a9cd9e27..def8c2322b0 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_session_observer.h
@@ -28,8 +28,8 @@ class AudioSessionObserver {
// Called when audio route changes.
virtual void OnValidRouteChange() = 0;
- // Called when audio session has been configured for WebRTC.
- virtual void OnConfiguredForWebRTC() = 0;
+ // Called when the ability to play or record changes.
+ virtual void OnCanPlayOrRecordChange(bool can_play_or_record) = 0;
protected:
virtual ~AudioSessionObserver() {}
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm
index 06ddddd9bce..5a7600a5d39 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Configuration.mm
@@ -10,18 +10,34 @@
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
@implementation RTCAudioSession (Configuration)
-- (BOOL)isConfiguredForWebRTC {
- return self.savedConfiguration != nil;
+- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+ error:(NSError **)outError {
+ return [self setConfiguration:configuration
+ active:NO
+ shouldSetActive:NO
+ error:outError];
+}
+
+- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+ active:(BOOL)active
+ error:(NSError **)outError {
+ return [self setConfiguration:configuration
+ active:active
+ shouldSetActive:YES
+ error:outError];
}
+#pragma mark - Private
+
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
active:(BOOL)active
+ shouldSetActive:(BOOL)shouldSetActive
error:(NSError **)outError {
NSParameterAssert(configuration);
if (outError) {
@@ -61,8 +77,22 @@
}
}
- // self.sampleRate is accurate only if the audio session is active.
- if (!self.isActive || self.sampleRate != configuration.sampleRate) {
+ // Sometimes category options don't stick after setting mode.
+ if (self.categoryOptions != configuration.categoryOptions) {
+ NSError *categoryError = nil;
+ if (![self setCategory:configuration.category
+ withOptions:configuration.categoryOptions
+ error:&categoryError]) {
+ RTCLogError(@"Failed to set category options: %@",
+ categoryError.localizedDescription);
+ error = categoryError;
+ } else {
+ RTCLog(@"Set category options to: %ld",
+ (long)configuration.categoryOptions);
+ }
+ }
+
+ if (self.preferredSampleRate != configuration.sampleRate) {
NSError *sampleRateError = nil;
if (![self setPreferredSampleRate:configuration.sampleRate
error:&sampleRateError]) {
@@ -75,9 +105,7 @@
}
}
- // self.IOBufferDuration is accurate only if the audio session is active.
- if (!self.isActive ||
- self.IOBufferDuration != configuration.ioBufferDuration) {
+ if (self.preferredIOBufferDuration != configuration.ioBufferDuration) {
NSError *bufferDurationError = nil;
if (![self setPreferredIOBufferDuration:configuration.ioBufferDuration
error:&bufferDurationError]) {
@@ -90,11 +118,13 @@
}
}
- NSError *activeError = nil;
- if (![self setActive:active error:&activeError]) {
- RTCLogError(@"Failed to setActive to %d: %@",
- active, activeError.localizedDescription);
- error = activeError;
+ if (shouldSetActive) {
+ NSError *activeError = nil;
+ if (![self setActive:active error:&activeError]) {
+ RTCLogError(@"Failed to setActive to %d: %@",
+ active, activeError.localizedDescription);
+ error = activeError;
+ }
}
if (self.isActive &&
@@ -138,84 +168,4 @@
return error == nil;
}
-- (BOOL)configureWebRTCSession:(NSError **)outError {
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
- RTCLog(@"Configuring audio session for WebRTC.");
-
- if (self.isConfiguredForWebRTC) {
- RTCLogError(@"Already configured.");
- if (outError) {
- *outError =
- [self configurationErrorWithDescription:@"Already configured."];
- }
- return NO;
- }
-
- // Configure the AVAudioSession and activate it.
- // Provide an error even if there isn't one so we can log it.
- NSError *error = nil;
- RTCAudioSessionConfiguration *currentConfig =
- [RTCAudioSessionConfiguration currentConfiguration];
- RTCAudioSessionConfiguration *webRTCConfig =
- [RTCAudioSessionConfiguration webRTCConfiguration];
- self.savedConfiguration = currentConfig;
- if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
- RTCLogError(@"Failed to set WebRTC audio configuration: %@",
- error.localizedDescription);
- [self unconfigureWebRTCSession:nil];
- if (outError) {
- *outError = error;
- }
- return NO;
- }
-
- // Ensure that the device currently supports audio input.
- // TODO(tkchin): Figure out if this is really necessary.
- if (!self.inputAvailable) {
- RTCLogError(@"No audio input path is available!");
- [self unconfigureWebRTCSession:nil];
- if (outError) {
- *outError = [self configurationErrorWithDescription:@"No input path."];
- }
- return NO;
- }
-
- // Give delegates a chance to process the event. In particular, the audio
- // devices listening to this event will initialize their audio units.
- [self notifyDidConfigure];
-
- return YES;
-}
-
-- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
- if (outError) {
- *outError = nil;
- }
- if (![self checkLock:outError]) {
- return NO;
- }
- RTCLog(@"Unconfiguring audio session for WebRTC.");
-
- if (!self.isConfiguredForWebRTC) {
- RTCLogError(@"Already unconfigured.");
- if (outError) {
- *outError =
- [self configurationErrorWithDescription:@"Already unconfigured."];
- }
- return NO;
- }
-
- [self setConfiguration:self.savedConfiguration active:NO error:outError];
- self.savedConfiguration = nil;
-
- [self notifyDidUnconfigure];
-
- return YES;
-}
-
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h
index c6738e7a033..cb506c345a9 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h
@@ -28,11 +28,8 @@ NS_ASSUME_NONNULL_BEGIN
*/
@property(nonatomic, readonly) int webRTCSessionCount;
-/** The configuration of the audio session before configureWebRTCSession
- * was first called.
- */
-@property(nonatomic, strong, nullable)
- RTCAudioSessionConfiguration *savedConfiguration;
+/** Convenience BOOL that checks useManualAudio and isAudioEnebled. */
+@property(readonly) BOOL canPlayOrRecord;
- (BOOL)checkLock:(NSError **)outError;
@@ -55,6 +52,22 @@ NS_ASSUME_NONNULL_BEGIN
*/
- (BOOL)endWebRTCSession:(NSError **)outError;
+/** Configure the audio session for WebRTC. This call will fail if the session
+ * is already configured. On other failures, we will attempt to restore the
+ * previously used audio session configuration.
+ * |lockForConfiguration| must be called first.
+ * Successful calls to configureWebRTCSession must be matched by calls to
+ * |unconfigureWebRTCSession|.
+ */
+- (BOOL)configureWebRTCSession:(NSError **)outError;
+
+/** Unconfigures the session for WebRTC. This will attempt to restore the
+ * audio session to the settings used before |configureWebRTCSession| was
+ * called.
+ * |lockForConfiguration| must be called first.
+ */
+- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
+
/** Returns a configuration error with the given description. */
- (NSError *)configurationErrorWithDescription:(NSString *)description;
@@ -69,10 +82,9 @@ NS_ASSUME_NONNULL_BEGIN
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
- (void)notifyMediaServicesWereLost;
- (void)notifyMediaServicesWereReset;
-- (void)notifyShouldConfigure;
-- (void)notifyShouldUnconfigure;
-- (void)notifyDidConfigure;
-- (void)notifyDidUnconfigure;
+- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
+- (void)notifyDidStartPlayOrRecord;
+- (void)notifyDidStopPlayOrRecord;
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h
index ab8dbc8859e..274cc2bc978 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.h
@@ -58,29 +58,18 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
// TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification.
-/** Called on a WebRTC thread when WebRTC needs to take over audio. Applications
- * should call -[RTCAudioSession configureWebRTCSession] to allow WebRTC to
- * play and record audio. Will only occur if shouldDelayAudioConfiguration is
- * set to YES.
- */
-- (void)audioSessionShouldConfigure:(RTCAudioSession *)session;
-
-/** Called on a WebRTC thread when WebRTC no longer requires audio. Applications
- * should call -[RTCAudioSession unconfigureWebRTCSession] to restore their
- * audio session settings. Will only occur if shouldDelayAudioConfiguration is
- * set to YES.
- */
-- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session;
+- (void)audioSession:(RTCAudioSession *)session
+ didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
-/** Called on a WebRTC thread when WebRTC has configured the audio session for
- * WebRTC audio.
+/** Called on a WebRTC thread when the audio device is notified to begin
+ * playback or recording.
*/
-- (void)audioSessionDidConfigure:(RTCAudioSession *)session;
+- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session;
-/** Called on a WebRTC thread when WebRTC has unconfigured the audio session for
- * WebRTC audio.
+/** Called on a WebRTC thread when the audio device is notified to stop
+ * playback or recording.
*/
-- (void)audioSessionDidUnconfigure:(RTCAudioSession *)session;
+- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session;
@end
@@ -108,11 +97,24 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
/** If YES, WebRTC will not initialize the audio unit automatically when an
* audio track is ready for playout or recording. Instead, applications should
- * listen to the delegate method |audioSessionShouldConfigure| and configure
- * the session manually. This should be set before making WebRTC media calls
- * and should not be changed while a call is active.
+ * call setIsAudioEnabled. If NO, WebRTC will initialize the audio unit
+ * as soon as an audio track is ready for playout or recording.
*/
-@property(nonatomic, assign) BOOL shouldDelayAudioConfiguration;
+@property(nonatomic, assign) BOOL useManualAudio;
+
+/** This property is only effective if useManualAudio is YES.
+ * Represents permission for WebRTC to initialize the VoIP audio unit.
+ * When set to NO, if the VoIP audio unit used by WebRTC is active, it will be
+ * stopped and uninitialized. This will stop incoming and outgoing audio.
+ * When set to YES, WebRTC will initialize and start the audio unit when it is
+ * needed (e.g. due to establishing an audio connection).
+ * This property was introduced to work around an issue where if an AVPlayer is
+ * playing audio while the VoIP audio unit is initialized, its audio would be
+ * either cut off completely or played at a reduced volume. By preventing
+ * the audio unit from being initialized until after the audio has completed,
+ * we are able to prevent the abrupt cutoff.
+ */
+@property(nonatomic, assign) BOOL isAudioEnabled;
// Proxy properties.
@property(readonly) NSString *category;
@@ -134,12 +136,14 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
@property(readonly, nullable)
AVAudioSessionDataSourceDescription *outputDataSource;
@property(readonly) double sampleRate;
+@property(readonly) double preferredSampleRate;
@property(readonly) NSInteger inputNumberOfChannels;
@property(readonly) NSInteger outputNumberOfChannels;
@property(readonly) float outputVolume;
@property(readonly) NSTimeInterval inputLatency;
@property(readonly) NSTimeInterval outputLatency;
@property(readonly) NSTimeInterval IOBufferDuration;
+@property(readonly) NSTimeInterval preferredIOBufferDuration;
/** Default constructor. */
+ (instancetype)sharedInstance;
@@ -196,36 +200,20 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
@interface RTCAudioSession (Configuration)
-/** Whether or not |configureWebRTCSession| has been called without a balanced
- * call to |unconfigureWebRTCSession|. This is not an indication of whether the
- * audio session has the right settings.
- */
-@property(readonly) BOOL isConfiguredForWebRTC;
-
/** Applies the configuration to the current session. Attempts to set all
* properties even if previous ones fail. Only the last error will be
- * returned. Also calls setActive with |active|.
+ * returned.
* |lockForConfiguration| must be called first.
*/
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
- active:(BOOL)active
error:(NSError **)outError;
-/** Configure the audio session for WebRTC. This call will fail if the session
- * is already configured. On other failures, we will attempt to restore the
- * previously used audio session configuration.
- * |lockForConfiguration| must be called first.
- * Successful calls to configureWebRTCSession must be matched by calls to
- * |unconfigureWebRTCSession|.
- */
-- (BOOL)configureWebRTCSession:(NSError **)outError;
-
-/** Unconfigures the session for WebRTC. This will attempt to restore the
- * audio session to the settings used before |configureWebRTCSession| was
- * called.
+/** Convenience method that calls both setConfiguration and setActive.
* |lockForConfiguration| must be called first.
*/
-- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
+- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
+ active:(BOOL)active
+ error:(NSError **)outError;
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
index c6e3677b846..7ef5110f476 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSession.mm
@@ -15,8 +15,9 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/audio_device/ios/audio_device_ios.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
+#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession";
NSInteger const kRTCAudioSessionErrorLockRequired = -1;
@@ -32,12 +33,13 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
volatile int _lockRecursionCount;
volatile int _webRTCSessionCount;
BOOL _isActive;
- BOOL _shouldDelayAudioConfiguration;
+ BOOL _useManualAudio;
+ BOOL _isAudioEnabled;
+ BOOL _canPlayOrRecord;
}
@synthesize session = _session;
@synthesize delegates = _delegates;
-@synthesize savedConfiguration = _savedConfiguration;
+ (instancetype)sharedInstance {
static dispatch_once_t onceToken;
@@ -81,6 +83,9 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
- (NSString *)description {
NSString *format =
@"RTCAudioSession: {\n"
+ " category: %@\n"
+ " categoryOptions: %ld\n"
+ " mode: %@\n"
" isActive: %d\n"
" sampleRate: %.2f\n"
" IOBufferDuration: %f\n"
@@ -90,6 +95,7 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
" inputLatency: %f\n"
"}";
NSString *description = [NSString stringWithFormat:format,
+ self.category, (long)self.categoryOptions, self.mode,
self.isActive, self.sampleRate, self.IOBufferDuration,
self.outputNumberOfChannels, self.inputNumberOfChannels,
self.outputLatency, self.inputLatency];
@@ -112,20 +118,35 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return _lockRecursionCount > 0;
}
-- (void)setShouldDelayAudioConfiguration:(BOOL)shouldDelayAudioConfiguration {
+- (void)setUseManualAudio:(BOOL)useManualAudio {
@synchronized(self) {
- // No one should be changing this while an audio device is active.
- RTC_DCHECK(!self.isConfiguredForWebRTC);
- if (_shouldDelayAudioConfiguration == shouldDelayAudioConfiguration) {
+ if (_useManualAudio == useManualAudio) {
return;
}
- _shouldDelayAudioConfiguration = shouldDelayAudioConfiguration;
+ _useManualAudio = useManualAudio;
}
+ [self updateCanPlayOrRecord];
}
-- (BOOL)shouldDelayAudioConfiguration {
+- (BOOL)useManualAudio {
@synchronized(self) {
- return _shouldDelayAudioConfiguration;
+ return _useManualAudio;
+ }
+}
+
+- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
+ @synchronized(self) {
+ if (_isAudioEnabled == isAudioEnabled) {
+ return;
+ }
+ _isAudioEnabled = isAudioEnabled;
+ }
+ [self updateCanPlayOrRecord];
+}
+
+- (BOOL)isAudioEnabled {
+ @synchronized(self) {
+ return _isAudioEnabled;
}
}
@@ -147,7 +168,8 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
@synchronized(self) {
_delegates.erase(std::remove(_delegates.begin(),
_delegates.end(),
- delegate));
+ delegate),
+ _delegates.end());
[self removeZeroedDelegates];
}
}
@@ -231,6 +253,10 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return self.session.sampleRate;
}
+- (double)preferredSampleRate {
+ return self.session.preferredSampleRate;
+}
+
- (NSInteger)inputNumberOfChannels {
return self.session.inputNumberOfChannels;
}
@@ -255,6 +281,10 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return self.session.IOBufferDuration;
}
+- (NSTimeInterval)preferredIOBufferDuration {
+ return self.session.preferredIOBufferDuration;
+}
+
// TODO(tkchin): Simplify the amount of locking happening here. Likely that we
// can just do atomic increments / decrements.
- (BOOL)setActive:(BOOL)active
@@ -496,21 +526,6 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
}
-- (void)setSavedConfiguration:(RTCAudioSessionConfiguration *)configuration {
- @synchronized(self) {
- if (_savedConfiguration == configuration) {
- return;
- }
- _savedConfiguration = configuration;
- }
-}
-
-- (RTCAudioSessionConfiguration *)savedConfiguration {
- @synchronized(self) {
- return _savedConfiguration;
- }
-}
-
// TODO(tkchin): check for duplicates.
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
@synchronized(self) {
@@ -520,11 +535,11 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
- (void)removeZeroedDelegates {
@synchronized(self) {
- for (auto it = _delegates.begin(); it != _delegates.end(); ++it) {
- if (!*it) {
- _delegates.erase(it);
- }
- }
+ _delegates.erase(
+ std::remove_if(_delegates.begin(),
+ _delegates.end(),
+ [](id delegate) -> bool { return delegate == nil; }),
+ _delegates.end());
}
}
@@ -546,6 +561,10 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return _webRTCSessionCount;
}
+- (BOOL)canPlayOrRecord {
+ return !self.useManualAudio || self.isAudioEnabled;
+}
+
- (BOOL)checkLock:(NSError **)outError {
// Check ivar instead of trying to acquire lock so that we won't accidentally
// acquire lock if it hasn't already been called.
@@ -565,79 +584,70 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
if (![self checkLock:outError]) {
return NO;
}
- NSInteger sessionCount = rtc::AtomicOps::Increment(&_webRTCSessionCount);
- if (sessionCount > 1) {
- // Should already be configured.
- RTC_DCHECK(self.isConfiguredForWebRTC);
- return YES;
- }
+ rtc::AtomicOps::Increment(&_webRTCSessionCount);
+ [self notifyDidStartPlayOrRecord];
+ return YES;
+}
- // Only perform configuration steps once. Application might have already
- // configured the session.
- if (self.isConfiguredForWebRTC) {
- // Nothing more to do, already configured.
- return YES;
+- (BOOL)endWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
}
+ if (![self checkLock:outError]) {
+ return NO;
+ }
+ rtc::AtomicOps::Decrement(&_webRTCSessionCount);
+ [self notifyDidStopPlayOrRecord];
+ return YES;
+}
- // If application has prevented automatic configuration, return here and wait
- // for application to call configureWebRTCSession.
- if (self.shouldDelayAudioConfiguration) {
- [self notifyShouldConfigure];
- return YES;
+- (BOOL)configureWebRTCSession:(NSError **)outError {
+ if (outError) {
+ *outError = nil;
+ }
+ if (![self checkLock:outError]) {
+ return NO;
}
+ RTCLog(@"Configuring audio session for WebRTC.");
- // Configure audio session.
+ // Configure the AVAudioSession and activate it.
+ // Provide an error even if there isn't one so we can log it.
NSError *error = nil;
- if (![self configureWebRTCSession:&error]) {
- RTCLogError(@"Error configuring audio session: %@",
+ RTCAudioSessionConfiguration *webRTCConfig =
+ [RTCAudioSessionConfiguration webRTCConfiguration];
+ if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
+ RTCLogError(@"Failed to set WebRTC audio configuration: %@",
error.localizedDescription);
+ [self unconfigureWebRTCSession:nil];
if (outError) {
*outError = error;
}
return NO;
}
+ // Ensure that the device currently supports audio input.
+ // TODO(tkchin): Figure out if this is really necessary.
+ if (!self.inputAvailable) {
+ RTCLogError(@"No audio input path is available!");
+ [self unconfigureWebRTCSession:nil];
+ if (outError) {
+ *outError = [self configurationErrorWithDescription:@"No input path."];
+ }
+ return NO;
+ }
+
return YES;
}
-- (BOOL)endWebRTCSession:(NSError **)outError {
+- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
if (outError) {
*outError = nil;
}
if (![self checkLock:outError]) {
return NO;
}
- int sessionCount = rtc::AtomicOps::Decrement(&_webRTCSessionCount);
- RTC_DCHECK_GE(sessionCount, 0);
- if (sessionCount != 0) {
- // Should still be configured.
- RTC_DCHECK(self.isConfiguredForWebRTC);
- return YES;
- }
-
- // Only unconfigure if application has not done it.
- if (!self.isConfiguredForWebRTC) {
- // Nothing more to do, already unconfigured.
- return YES;
- }
-
- // If application has prevented automatic configuration, return here and wait
- // for application to call unconfigureWebRTCSession.
- if (self.shouldDelayAudioConfiguration) {
- [self notifyShouldUnconfigure];
- return YES;
- }
-
- // Unconfigure audio session.
- NSError *error = nil;
- if (![self unconfigureWebRTCSession:&error]) {
- RTCLogError(@"Error unconfiguring audio session: %@",
- error.localizedDescription);
- if (outError) {
- *outError = error;
- }
- return NO;
- }
+ RTCLog(@"Unconfiguring audio session for WebRTC.");
+ [self setActive:NO error:outError];
return YES;
}
@@ -666,6 +676,22 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
}
+- (void)updateCanPlayOrRecord {
+ BOOL canPlayOrRecord = NO;
+ BOOL shouldNotify = NO;
+ @synchronized(self) {
+ canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
+ if (_canPlayOrRecord == canPlayOrRecord) {
+ return;
+ }
+ _canPlayOrRecord = canPlayOrRecord;
+ shouldNotify = YES;
+ }
+ if (shouldNotify) {
+ [self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
+ }
+}
+
- (void)notifyDidBeginInterruption {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionDidBeginInterruption:);
@@ -716,38 +742,29 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
}
-- (void)notifyShouldConfigure {
- for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionShouldConfigure:);
- if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionShouldConfigure:self];
- }
- }
-}
-
-- (void)notifyShouldUnconfigure {
+- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionShouldUnconfigure:);
+ SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionShouldUnconfigure:self];
+ [delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
}
}
}
-- (void)notifyDidConfigure {
+- (void)notifyDidStartPlayOrRecord {
for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidConfigure:);
+ SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidConfigure:self];
+ [delegate audioSessionDidStartPlayOrRecord:self];
}
}
}
-- (void)notifyDidUnconfigure {
+- (void)notifyDidStopPlayOrRecord {
for (auto delegate : self.delegates) {
- SEL sel = @selector(audioSessionDidUnconfigure:);
+ SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
- [delegate audioSessionDidUnconfigure:self];
+ [delegate audioSessionDidStopPlayOrRecord:self];
}
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h
index 4273d4d8634..7832a82d4f3 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h
@@ -37,6 +37,8 @@ extern const double kRTCAudioSessionLowComplexityIOBufferDuration;
+ (instancetype)currentConfiguration;
/** Returns the configuration that WebRTC needs. */
+ (instancetype)webRTCConfiguration;
+/** Provide a way to override the default configuration. */
++ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration;
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m
index 086725172bd..d2d04835499 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.m
@@ -10,6 +10,8 @@
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
+#import "WebRTC/RTCDispatcher.h"
+
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
// Try to use mono to save resources. Also avoids channel format conversion
@@ -49,6 +51,8 @@ const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.01;
// TODO(henrika): monitor this size and determine if it should be modified.
const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
+static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
+
@implementation RTCAudioSessionConfiguration
@synthesize category = _category;
@@ -96,6 +100,10 @@ const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
return self;
}
++ (void)initialize {
+ gWebRTCConfiguration = [[self alloc] init];
+}
+
+ (instancetype)currentConfiguration {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTCAudioSessionConfiguration *config =
@@ -111,7 +119,15 @@ const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
}
+ (instancetype)webRTCConfiguration {
- return [[self alloc] init];
+ @synchronized(self) {
+ return (RTCAudioSessionConfiguration *)gWebRTCConfiguration;
+ }
+}
+
++ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration {
+ @synchronized(self) {
+ gWebRTCConfiguration = configuration;
+ }
}
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm
index 21e8c3e84b1..b554e51ece0 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.mm
@@ -12,7 +12,7 @@
#include "webrtc/modules/audio_device/ios/audio_session_observer.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
@implementation RTCAudioSessionDelegateAdapter {
webrtc::AudioSessionObserver *_observer;
@@ -70,14 +70,15 @@
- (void)audioSessionMediaServicesWereReset:(RTCAudioSession *)session {
}
-- (void)audioSessionShouldConfigure:(RTCAudioSession *)session {
+- (void)audioSession:(RTCAudioSession *)session
+ didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
+ _observer->OnCanPlayOrRecordChange(canPlayOrRecord);
}
-- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session {
+- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
}
-- (void)audioSessionDidConfigure:(RTCAudioSession *)session {
- _observer->OnConfiguredForWebRTC();
+- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
}
@end
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm
index 603e450c758..7cbd2a982a9 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/objc/RTCAudioSessionTest.mm
@@ -46,6 +46,28 @@
@end
+// A delegate that adds itself to the audio session on init and removes itself
+// in its dealloc.
+@interface RTCTestRemoveOnDeallocDelegate : RTCAudioSessionTestDelegate
+@end
+
+@implementation RTCTestRemoveOnDeallocDelegate
+
+- (instancetype)init {
+ if (self = [super init]) {
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ [session addDelegate:self];
+ }
+ return self;
+}
+
+- (void)dealloc {
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ [session removeDelegate:self];
+}
+
+@end
+
@interface RTCAudioSessionTest : NSObject
@@ -142,6 +164,18 @@
EXPECT_TRUE(session.delegates[0]);
}
+// Tests that we don't crash when removing delegates in dealloc.
+// Added as a regression test.
+- (void)testRemoveDelegateOnDealloc {
+ @autoreleasepool {
+ RTCTestRemoveOnDeallocDelegate *delegate =
+ [[RTCTestRemoveOnDeallocDelegate alloc] init];
+ EXPECT_TRUE(delegate);
+ }
+ RTCAudioSession *session = [RTCAudioSession sharedInstance];
+ EXPECT_EQ(0u, session.delegates.size());
+}
+
@end
namespace webrtc {
@@ -176,4 +210,9 @@ TEST_F(AudioSessionTest, ZeroingWeakDelegate) {
[test testZeroingWeakDelegate];
}
+TEST_F(AudioSessionTest, RemoveDelegateOnDealloc) {
+ RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
+ [test testRemoveDelegateOnDealloc];
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm b/chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm
index db756a4972c..db7f42edcb9 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/voice_processing_audio_unit.mm
@@ -12,7 +12,7 @@
#include "webrtc/base/checks.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
#if !defined(NDEBUG)
@@ -175,7 +175,7 @@ VoiceProcessingAudioUnit::State VoiceProcessingAudioUnit::GetState() const {
bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
RTC_DCHECK_GE(state_, kUninitialized);
- RTCLog(@"Initializing audio unit.");
+ RTCLog(@"Initializing audio unit with sample rate: %f", sample_rate);
OSStatus result = noErr;
AudioStreamBasicDescription format = GetFormat(sample_rate);
@@ -228,7 +228,9 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
[NSThread sleepForTimeInterval:0.1f];
result = AudioUnitInitialize(vpio_unit_);
}
- RTCLog(@"Voice Processing I/O unit is now initialized.");
+ if (result == noErr) {
+ RTCLog(@"Voice Processing I/O unit is now initialized.");
+ }
state_ = kInitialized;
return true;
}
@@ -241,6 +243,8 @@ bool VoiceProcessingAudioUnit::Start() {
if (result != noErr) {
RTCLogError(@"Failed to start audio unit. Error=%ld", (long)result);
return false;
+ } else {
+ RTCLog(@"Started audio unit");
}
state_ = kStarted;
return true;
@@ -254,7 +258,10 @@ bool VoiceProcessingAudioUnit::Stop() {
if (result != noErr) {
RTCLogError(@"Failed to stop audio unit. Error=%ld", (long)result);
return false;
+ } else {
+ RTCLog(@"Stopped audio unit");
}
+
state_ = kInitialized;
return true;
}
@@ -267,7 +274,11 @@ bool VoiceProcessingAudioUnit::Uninitialize() {
if (result != noErr) {
RTCLogError(@"Failed to uninitialize audio unit. Error=%ld", (long)result);
return false;
+ } else {
+ RTCLog(@"Uninitialized audio unit.");
}
+
+ state_ = kUninitialized;
return true;
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc b/chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc
index f37d89cd9ca..dad42a0c0b3 100644
--- a/chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/test/audio_device_test_api.cc
@@ -142,8 +142,7 @@ class AudioDeviceAPITest: public testing::Test {
virtual ~AudioDeviceAPITest() {}
static void SetUpTestCase() {
- process_thread_ =
- rtc::ScopedToUnique(ProcessThread::Create("ProcessThread"));
+ process_thread_ = ProcessThread::Create("ProcessThread");
process_thread_->Start();
// Windows:
@@ -154,75 +153,75 @@ class AudioDeviceAPITest: public testing::Test {
const int32_t kId = 444;
#if defined(_WIN32)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
TEST_LOG("WEBRTC_WINDOWS_CORE_AUDIO_BUILD is defined!\n\n");
// create default implementation (=Core Audio) instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
EXPECT_EQ(0, audio_device_.release()->Release());
// create non-default (=Wave Audio) instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) != NULL);
EXPECT_EQ(0, audio_device_.release()->Release());
// explicitly specify usage of Core Audio (same as default)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) != NULL);
#else
TEST_LOG("WEBRTC_WINDOWS_CORE_AUDIO_BUILD is *not* defined!\n");
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
// create default implementation (=Wave Audio) instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
EXPECT_EQ(0, audio_device_.release()->Release());
// explicitly specify usage of Wave Audio (same as default)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) != NULL);
#endif
#endif
#if defined(ANDROID)
// Fails tests
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxPulseAudio)) == NULL);
// Create default implementation instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
#elif defined(WEBRTC_LINUX)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
// create default implementation instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
EXPECT_EQ(0, audio_device_->Terminate());
EXPECT_EQ(0, audio_device_.release()->Release());
// explicitly specify usage of Pulse Audio (same as default)
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxPulseAudio)) != NULL);
#endif
#if defined(WEBRTC_MAC)
// Fails tests
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kLinuxPulseAudio)) == NULL);
// Create default implementation instance
- EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((audio_device_ = AudioDeviceModule::Create(
kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc b/chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc
index bb7686c6c18..f16f296011b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/test/func_test_manager.cc
@@ -594,16 +594,15 @@ FuncTestManager::~FuncTestManager()
int32_t FuncTestManager::Init()
{
- EXPECT_TRUE((_processThread = rtc::ScopedToUnique(
- ProcessThread::Create("ProcessThread"))) != NULL);
- if (_processThread == NULL)
- {
- return -1;
+ EXPECT_TRUE((_processThread = ProcessThread::Create("ProcessThread")) !=
+ NULL);
+ if (_processThread == NULL) {
+ return -1;
}
_processThread->Start();
// create the Audio Device module
- EXPECT_TRUE((_audioDevice = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((_audioDevice = AudioDeviceModule::Create(
555, ADM_AUDIO_LAYER)) != NULL);
if (_audioDevice == NULL)
{
@@ -832,8 +831,8 @@ int32_t FuncTestManager::TestAudioLayerSelection()
// ==================================================
// Next, try to make fresh start with new audio layer
- EXPECT_TRUE((_processThread = rtc::ScopedToUnique(
- ProcessThread::Create("ProcessThread"))) != NULL);
+ EXPECT_TRUE((_processThread = ProcessThread::Create("ProcessThread")) !=
+ NULL);
if (_processThread == NULL)
{
return -1;
@@ -843,12 +842,12 @@ int32_t FuncTestManager::TestAudioLayerSelection()
// create the Audio Device module based on selected audio layer
if (tryWinWave)
{
- _audioDevice = AudioDeviceModuleImpl::Create(
+ _audioDevice = AudioDeviceModule::Create(
555,
AudioDeviceModule::kWindowsWaveAudio);
} else if (tryWinCore)
{
- _audioDevice = AudioDeviceModuleImpl::Create(
+ _audioDevice = AudioDeviceModule::Create(
555,
AudioDeviceModule::kWindowsCoreAudio);
}
@@ -857,7 +856,7 @@ int32_t FuncTestManager::TestAudioLayerSelection()
{
TEST_LOG("\nERROR: Switch of audio layer failed!\n");
// restore default audio layer instead
- EXPECT_TRUE((_audioDevice = AudioDeviceModuleImpl::Create(
+ EXPECT_TRUE((_audioDevice = AudioDeviceModule::Create(
555, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
index 8079051184f..c1497089926 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/audio_device/audio_device_config.h"
#include "webrtc/modules/audio_device/win/audio_device_wave_win.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
#include <windows.h>
@@ -206,7 +206,7 @@ int32_t AudioDeviceWindowsWave::Init()
return 0;
}
- const uint32_t nowTime(TickTime::MillisecondTimestamp());
+ const uint32_t nowTime(rtc::TimeMillis());
_recordedBytes = 0;
_prevRecByteCheckTime = nowTime;
@@ -3038,7 +3038,7 @@ bool AudioDeviceWindowsWave::ThreadProcess()
return true;
}
- time = TickTime::MillisecondTimestamp();
+ time = rtc::TimeMillis();
if (_startPlay)
{
diff --git a/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn b/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
index 22c904d52f2..a9650f74296 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/audio_processing/BUILD.gn
@@ -24,17 +24,16 @@ source_set("audio_processing") {
sources = [
"aec/aec_core.cc",
"aec/aec_core.h",
- "aec/aec_core_internal.h",
- "aec/aec_rdft.c",
+ "aec/aec_core_optimized_methods.h",
+ "aec/aec_rdft.cc",
"aec/aec_rdft.h",
"aec/aec_resampler.cc",
"aec/aec_resampler.h",
"aec/echo_cancellation.cc",
"aec/echo_cancellation.h",
- "aec/echo_cancellation_internal.h",
- "aecm/aecm_core.c",
+ "aecm/aecm_core.cc",
"aecm/aecm_core.h",
- "aecm/echo_control_mobile.c",
+ "aecm/echo_control_mobile.cc",
"aecm/echo_control_mobile.h",
"agc/agc.cc",
"agc/agc.h",
@@ -81,9 +80,8 @@ source_set("audio_processing") {
"intelligibility/intelligibility_utils.h",
"level_estimator_impl.cc",
"level_estimator_impl.h",
- "logging/aec_logging.h",
- "logging/aec_logging_file_handling.cc",
- "logging/aec_logging_file_handling.h",
+ "logging/apm_data_dumper.cc",
+ "logging/apm_data_dumper.h",
"noise_suppression_impl.cc",
"noise_suppression_impl.h",
"render_queue_item_verifier.h",
@@ -110,10 +108,10 @@ source_set("audio_processing") {
"typing_detection.h",
"utility/block_mean_calculator.cc",
"utility/block_mean_calculator.h",
- "utility/delay_estimator.c",
+ "utility/delay_estimator.cc",
"utility/delay_estimator.h",
"utility/delay_estimator_internal.h",
- "utility/delay_estimator_wrapper.c",
+ "utility/delay_estimator_wrapper.cc",
"utility/delay_estimator_wrapper.h",
"vad/common.h",
"vad/gmm.cc",
@@ -149,7 +147,9 @@ source_set("audio_processing") {
]
if (aec_debug_dump) {
- defines += [ "WEBRTC_AEC_DEBUG_DUMP" ]
+ defines += [ "WEBRTC_AEC_DEBUG_DUMP=1" ]
+ } else {
+ defines += [ "WEBRTC_AEC_DEBUG_DUMP=0" ]
}
if (aec_untrusted_delay_for_testing) {
@@ -196,15 +196,15 @@ source_set("audio_processing") {
}
if (current_cpu == "mipsel") {
- sources += [ "aecm/aecm_core_mips.c" ]
+ sources += [ "aecm/aecm_core_mips.cc" ]
if (mips_float_abi == "hard") {
sources += [
"aec/aec_core_mips.cc",
- "aec/aec_rdft_mips.c",
+ "aec/aec_rdft_mips.cc",
]
}
} else {
- sources += [ "aecm/aecm_core_c.c" ]
+ sources += [ "aecm/aecm_core_c.cc" ]
}
if (is_win) {
@@ -241,7 +241,7 @@ if (current_cpu == "x86" || current_cpu == "x64") {
source_set("audio_processing_sse2") {
sources = [
"aec/aec_core_sse2.cc",
- "aec/aec_rdft_sse2.c",
+ "aec/aec_rdft_sse2.cc",
]
if (is_posix) {
@@ -250,6 +250,12 @@ if (current_cpu == "x86" || current_cpu == "x64") {
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
+
+ if (aec_debug_dump) {
+ defines = [ "WEBRTC_AEC_DEBUG_DUMP=1" ]
+ } else {
+ defines = [ "WEBRTC_AEC_DEBUG_DUMP=0" ]
+ }
}
}
@@ -257,8 +263,8 @@ if (rtc_build_with_neon) {
source_set("audio_processing_neon") {
sources = [
"aec/aec_core_neon.cc",
- "aec/aec_rdft_neon.c",
- "aecm/aecm_core_neon.c",
+ "aec/aec_rdft_neon.cc",
+ "aecm/aecm_core_neon.cc",
"ns/nsx_core_neon.c",
]
@@ -285,5 +291,11 @@ if (rtc_build_with_neon) {
deps = [
"../../common_audio",
]
+
+ if (aec_debug_dump) {
+ defines = [ "WEBRTC_AEC_DEBUG_DUMP=1" ]
+ } else {
+ defines = [ "WEBRTC_AEC_DEBUG_DUMP=0" ]
+ }
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc
index 1d1e67636bf..4c109d7d9ce 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.cc
@@ -14,10 +14,6 @@
#include "webrtc/modules/audio_processing/aec/aec_core.h"
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-#include <stdio.h>
-#endif
-
#include <algorithm>
#include <assert.h>
#include <math.h>
@@ -29,20 +25,45 @@
extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
}
+#include "webrtc/base/checks.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_processing/aec/aec_common.h"
-#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-}
-#include "webrtc/modules/audio_processing/logging/aec_logging.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/logging/apm_data_dumper.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
-}
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
+#include "webrtc/system_wrappers/include/metrics.h"
#include "webrtc/typedefs.h"
namespace webrtc {
+namespace {
+enum class DelaySource {
+ kSystemDelay, // The delay values come from the OS.
+ kDelayAgnostic, // The delay values come from the DA-AEC.
+};
+
+constexpr int kMinDelayLogValue = -200;
+constexpr int kMaxDelayLogValue = 200;
+constexpr int kNumDelayLogBuckets = 100;
+
+void MaybeLogDelayAdjustment(int moved_ms, DelaySource source) {
+ if (moved_ms == 0)
+ return;
+ switch (source) {
+ case DelaySource::kSystemDelay:
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AecDelayAdjustmentMsSystemValue",
+ moved_ms, kMinDelayLogValue, kMaxDelayLogValue,
+ kNumDelayLogBuckets);
+ return;
+ case DelaySource::kDelayAgnostic:
+ RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AecDelayAdjustmentMsAgnosticValue",
+ moved_ms, kMinDelayLogValue, kMaxDelayLogValue,
+ kNumDelayLogBuckets);
+ return;
+ }
+}
+} // namespace
// Buffer size (samples)
static const size_t kBufSizePartitions = 250; // 1 second of audio in 16 kHz.
@@ -55,8 +76,8 @@ static const int kDelayMetricsAggregationWindow = 1250; // 5 seconds at 16 kHz.
// Divergence metric is based on audio level, which gets updated every
// |kCountLen + 1| * 10 milliseconds. Divergence metric takes the statistics of
// |kDivergentFilterFractionAggregationWindowSize| samples. Current value
-// corresponds to 0.5 seconds at 16 kHz.
-static const int kDivergentFilterFractionAggregationWindowSize = 25;
+// corresponds to 1 second at 16 kHz.
+static const int kDivergentFilterFractionAggregationWindowSize = 50;
// Quantities to control H band scaling for SWB input
static const float cnScaleHband = 0.4f; // scale for comfort noise in H band.
@@ -136,16 +157,13 @@ const float WebRtcAec_kNormalSmoothingCoefficients[2][2] = {{0.9f, 0.1f},
// Number of partitions forming the NLP's "preferred" bands.
enum { kPrefBandSize = 24 };
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-extern int webrtc_aec_instance_count;
-#endif
-
WebRtcAecFilterFar WebRtcAec_FilterFar;
WebRtcAecScaleErrorSignal WebRtcAec_ScaleErrorSignal;
WebRtcAecFilterAdaptation WebRtcAec_FilterAdaptation;
-WebRtcAecOverdriveAndSuppress WebRtcAec_OverdriveAndSuppress;
-WebRtcAecComfortNoise WebRtcAec_ComfortNoise;
-WebRtcAecSubBandCoherence WebRtcAec_SubbandCoherence;
+WebRtcAecOverdrive WebRtcAec_Overdrive;
+WebRtcAecSuppress WebRtcAec_Suppress;
+WebRtcAecComputeCoherence WebRtcAec_ComputeCoherence;
+WebRtcAecUpdateCoherenceSpectra WebRtcAec_UpdateCoherenceSpectra;
WebRtcAecStoreAsComplex WebRtcAec_StoreAsComplex;
WebRtcAecPartitionDelay WebRtcAec_PartitionDelay;
WebRtcAecWindowData WebRtcAec_WindowData;
@@ -210,7 +228,10 @@ void DivergentFilterFraction::Clear() {
}
// TODO(minyue): Moving some initialization from WebRtcAec_CreateAec() to ctor.
-AecCore::AecCore() = default;
+AecCore::AecCore(int instance_index)
+ : data_dumper(new ApmDataDumper(instance_index)) {}
+
+AecCore::~AecCore() {}
static int CmpFloat(const void* a, const void* b) {
const float* da = (const float*)a;
@@ -316,19 +337,21 @@ static void FilterAdaptation(
}
}
-static void OverdriveAndSuppress(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]) {
- int i;
- for (i = 0; i < PART_LEN1; i++) {
+static void Overdrive(float overdrive_scaling,
+ const float hNlFb,
+ float hNl[PART_LEN1]) {
+ for (int i = 0; i < PART_LEN1; ++i) {
// Weight subbands
if (hNl[i] > hNlFb) {
hNl[i] = WebRtcAec_weightCurve[i] * hNlFb +
(1 - WebRtcAec_weightCurve[i]) * hNl[i];
}
- hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+ hNl[i] = powf(hNl[i], overdrive_scaling * WebRtcAec_overDriveCurve[i]);
+ }
+}
+static void Suppress(const float hNl[PART_LEN1], float efw[2][PART_LEN1]) {
+ for (int i = 0; i < PART_LEN1; ++i) {
// Suppress error signal
efw[0][i] *= hNl[i];
efw[1][i] *= hNl[i];
@@ -339,7 +362,9 @@ static void OverdriveAndSuppress(AecCore* aec,
}
}
-static int PartitionDelay(const AecCore* aec) {
+static int PartitionDelay(int num_partitions,
+ float h_fft_buf[2]
+ [kExtendedNumPartitions * PART_LEN1]) {
// Measures the energy in each filter partition and returns the partition with
// highest energy.
// TODO(bjornv): Spread computational cost by computing one partition per
@@ -348,13 +373,13 @@ static int PartitionDelay(const AecCore* aec) {
int i;
int delay = 0;
- for (i = 0; i < aec->num_partitions; i++) {
+ for (i = 0; i < num_partitions; i++) {
int j;
int pos = i * PART_LEN1;
float wfEn = 0;
for (j = 0; j < PART_LEN1; j++) {
- wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] +
- aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j];
+ wfEn += h_fft_buf[0][pos + j] * h_fft_buf[0][pos + j] +
+ h_fft_buf[1][pos + j] * h_fft_buf[1][pos + j];
}
if (wfEn > wfEnMax) {
@@ -365,10 +390,46 @@ static int PartitionDelay(const AecCore* aec) {
return delay;
}
+// Update metric with 10 * log10(numerator / denominator).
+static void UpdateLogRatioMetric(Stats* metric, float numerator,
+ float denominator) {
+ RTC_DCHECK(metric);
+ RTC_CHECK(numerator >= 0);
+ RTC_CHECK(denominator >= 0);
+
+ const float log_numerator = log10(numerator + 1e-10f);
+ const float log_denominator = log10(denominator + 1e-10f);
+ metric->instant = 10.0f * (log_numerator - log_denominator);
+
+ // Max.
+ if (metric->instant > metric->max)
+ metric->max = metric->instant;
+
+ // Min.
+ if (metric->instant < metric->min)
+ metric->min = metric->instant;
+
+ // Average.
+ metric->counter++;
+ // This is to protect overflow, which should almost never happen.
+ RTC_CHECK_NE(0u, metric->counter);
+ metric->sum += metric->instant;
+ metric->average = metric->sum / metric->counter;
+
+ // Upper mean.
+ if (metric->instant > metric->average) {
+ metric->hicounter++;
+ // This is to protect overflow, which should almost never happen.
+ RTC_CHECK_NE(0u, metric->hicounter);
+ metric->hisum += metric->instant;
+ metric->himean = metric->hisum / metric->hicounter;
+ }
+}
+
// Threshold to protect against the ill-effects of a zero far-end.
const float WebRtcAec_kMinFarendPSD = 15;
-// Updates the following smoothed Power Spectral Densities (PSD):
+// Updates the following smoothed Power Spectral Densities (PSD):
// - sd : near-end
// - se : residual echo
// - sx : far-end
@@ -377,53 +438,60 @@ const float WebRtcAec_kMinFarendPSD = 15;
//
// In addition to updating the PSDs, also the filter diverge state is
// determined.
-static void SmoothedPSD(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- int* extreme_filter_divergence) {
+static void UpdateCoherenceSpectra(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh =
- aec->extended_filter_enabled
- ? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
- : WebRtcAec_kNormalSmoothingCoefficients[aec->mult - 1];
+ extended_filter_enabled
+ ? WebRtcAec_kExtendedSmoothingCoefficients[mult - 1]
+ : WebRtcAec_kNormalSmoothingCoefficients[mult - 1];
int i;
float sdSum = 0, seSum = 0;
for (i = 0; i < PART_LEN1; i++) {
- aec->sd[i] = ptrGCoh[0] * aec->sd[i] +
- ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
- aec->se[i] = ptrGCoh[0] * aec->se[i] +
- ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
+ coherence_state->sd[i] =
+ ptrGCoh[0] * coherence_state->sd[i] +
+ ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
+ coherence_state->se[i] =
+ ptrGCoh[0] * coherence_state->se[i] +
+ ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
// We threshold here to protect against the ill-effects of a zero farend.
// The threshold is not arbitrarily chosen, but balances protection and
// adverse interaction with the algorithm's tuning.
// TODO(bjornv): investigate further why this is so sensitive.
- aec->sx[i] = ptrGCoh[0] * aec->sx[i] +
- ptrGCoh[1] * WEBRTC_SPL_MAX(
- xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
- WebRtcAec_kMinFarendPSD);
-
- aec->sde[i][0] =
- ptrGCoh[0] * aec->sde[i][0] +
+ coherence_state->sx[i] =
+ ptrGCoh[0] * coherence_state->sx[i] +
+ ptrGCoh[1] *
+ WEBRTC_SPL_MAX(xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
+ WebRtcAec_kMinFarendPSD);
+
+ coherence_state->sde[i][0] =
+ ptrGCoh[0] * coherence_state->sde[i][0] +
ptrGCoh[1] * (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]);
- aec->sde[i][1] =
- ptrGCoh[0] * aec->sde[i][1] +
+ coherence_state->sde[i][1] =
+ ptrGCoh[0] * coherence_state->sde[i][1] +
ptrGCoh[1] * (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]);
- aec->sxd[i][0] =
- ptrGCoh[0] * aec->sxd[i][0] +
+ coherence_state->sxd[i][0] =
+ ptrGCoh[0] * coherence_state->sxd[i][0] +
ptrGCoh[1] * (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]);
- aec->sxd[i][1] =
- ptrGCoh[0] * aec->sxd[i][1] +
+ coherence_state->sxd[i][1] =
+ ptrGCoh[0] * coherence_state->sxd[i][1] +
ptrGCoh[1] * (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]);
- sdSum += aec->sd[i];
- seSum += aec->se[i];
+ sdSum += coherence_state->sd[i];
+ seSum += coherence_state->se[i];
}
// Divergent filter safeguard update.
- aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
+ *filter_divergence_state =
+ (*filter_divergence_state ? 1.05f : 1.0f) * seSum > sdSum;
// Signal extreme filter divergence if the error is significantly larger
// than the nearend (13 dB).
@@ -454,26 +522,17 @@ __inline static void StoreAsComplex(const float* data,
data_complex[1][PART_LEN] = 0;
}
-static void SubbandCoherence(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- float* fft,
+static void ComputeCoherence(const CoherenceState* coherence_state,
float* cohde,
- float* cohxd,
- int* extreme_filter_divergence) {
- int i;
-
- SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
-
+ float* cohxd) {
// Subband coherence
- for (i = 0; i < PART_LEN1; i++) {
- cohde[i] =
- (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) /
- (aec->sd[i] * aec->se[i] + 1e-10f);
- cohxd[i] =
- (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) /
- (aec->sx[i] * aec->sd[i] + 1e-10f);
+ for (int i = 0; i < PART_LEN1; i++) {
+ cohde[i] = (coherence_state->sde[i][0] * coherence_state->sde[i][0] +
+ coherence_state->sde[i][1] * coherence_state->sde[i][1]) /
+ (coherence_state->sd[i] * coherence_state->se[i] + 1e-10f);
+ cohxd[i] = (coherence_state->sxd[i][0] * coherence_state->sxd[i][0] +
+ coherence_state->sxd[i][1] * coherence_state->sxd[i][1]) /
+ (coherence_state->sx[i] * coherence_state->sd[i] + 1e-10f);
}
}
@@ -487,94 +546,67 @@ static void GetHighbandGain(const float* lambda, float* nlpGainHband) {
*nlpGainHband /= static_cast<float>(PART_LEN1 - 1 - freqAvgIc);
}
-static void ComfortNoise(AecCore* aec,
- float efw[2][PART_LEN1],
- float comfortNoiseHband[2][PART_LEN1],
- const float* noisePow,
- const float* lambda) {
- int i, num;
- float rand[PART_LEN];
- float noise, noiseAvg, tmp, tmpAvg;
+static void GenerateComplexNoise(uint32_t* seed, float noise[2][PART_LEN1]) {
+ const float kPi2 = 6.28318530717959f;
int16_t randW16[PART_LEN];
- float u[2][PART_LEN1];
+ WebRtcSpl_RandUArray(randW16, PART_LEN, seed);
- const float pi2 = 6.28318530717959f;
-
- // Generate a uniform random array on [0 1]
- WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed);
- for (i = 0; i < PART_LEN; i++) {
- rand[i] = static_cast<float>(randW16[i]) / 32768;
+ noise[0][0] = 0;
+ noise[1][0] = 0;
+ for (size_t i = 1; i < PART_LEN1; i++) {
+ float tmp = kPi2 * randW16[i - 1] / 32768.f;
+ noise[0][i] = cosf(tmp);
+ noise[1][i] = -sinf(tmp);
}
+ noise[1][PART_LEN] = 0;
+}
- // Reject LF noise
- u[0][0] = 0;
- u[1][0] = 0;
- for (i = 1; i < PART_LEN1; i++) {
- tmp = pi2 * rand[i - 1];
-
- noise = sqrtf(noisePow[i]);
- u[0][i] = noise * cosf(tmp);
- u[1][i] = -noise * sinf(tmp);
- }
- u[1][PART_LEN] = 0;
-
- for (i = 0; i < PART_LEN1; i++) {
- // This is the proper weighting to match the background noise power
- tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
- // tmp = 1 - lambda[i];
- efw[0][i] += tmp * u[0][i];
- efw[1][i] += tmp * u[1][i];
- }
-
- // For H band comfort noise
- // TODO(peah): don't compute noise and "tmp" twice. Use the previous results.
- noiseAvg = 0.0;
- tmpAvg = 0.0;
- num = 0;
- if (aec->num_bands > 1) {
- // average noise scale
- // average over second half of freq spectrum (i.e., 4->8khz)
- // TODO(peah): we shouldn't need num. We know how many elements we're
- // summing.
- for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
- num++;
- noiseAvg += sqrtf(noisePow[i]);
- }
- noiseAvg /= static_cast<float>(num);
-
- // average nlp scale
- // average over second half of freq spectrum (i.e., 4->8khz)
- // TODO(peah): we shouldn't need num. We know how many elements
- // we're summing.
- num = 0;
- for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
- num++;
- tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
- }
- tmpAvg /= static_cast<float>(num);
-
- // Use average noise for H band
- // TODO(peah): we should probably have a new random vector here.
- // Reject LF noise
- u[0][0] = 0;
- u[1][0] = 0;
- for (i = 1; i < PART_LEN1; i++) {
- tmp = pi2 * rand[i - 1];
-
- // Use average noise for H band
- u[0][i] = noiseAvg * static_cast<float>(cos(tmp));
- u[1][i] = -noiseAvg * static_cast<float>(sin(tmp));
+static void ComfortNoise(bool generate_high_frequency_noise,
+ uint32_t* seed,
+ float e_fft[2][PART_LEN1],
+ float high_frequency_comfort_noise[2][PART_LEN1],
+ const float* noise_spectrum,
+ const float* suppressor_gain) {
+ float complex_noise[2][PART_LEN1];
+
+ GenerateComplexNoise(seed, complex_noise);
+
+ // Shape, scale and add comfort noise.
+ for (int i = 1; i < PART_LEN1; ++i) {
+ float noise_scaling =
+ sqrtf(WEBRTC_SPL_MAX(1 - suppressor_gain[i] * suppressor_gain[i], 0)) *
+ sqrtf(noise_spectrum[i]);
+ e_fft[0][i] += noise_scaling * complex_noise[0][i];
+ e_fft[1][i] += noise_scaling * complex_noise[1][i];
+ }
+
+ // Form comfort noise for higher frequencies.
+ if (generate_high_frequency_noise) {
+ // Compute average noise power and nlp gain over the second half of freq
+ // spectrum (i.e., 4->8khz).
+ int start_avg_band = PART_LEN1 / 2;
+ float upper_bands_noise_power = 0.f;
+ float upper_bands_suppressor_gain = 0.f;
+ for (int i = start_avg_band; i < PART_LEN1; ++i) {
+ upper_bands_noise_power += sqrtf(noise_spectrum[i]);
+ upper_bands_suppressor_gain +=
+ sqrtf(WEBRTC_SPL_MAX(1 - suppressor_gain[i] * suppressor_gain[i], 0));
}
- u[1][PART_LEN] = 0;
-
- for (i = 0; i < PART_LEN1; i++) {
- // Use average NLP weight for H band
- comfortNoiseHband[0][i] = tmpAvg * u[0][i];
- comfortNoiseHband[1][i] = tmpAvg * u[1][i];
+ upper_bands_noise_power /= (PART_LEN1 - start_avg_band);
+ upper_bands_suppressor_gain /= (PART_LEN1 - start_avg_band);
+
+ // Shape, scale and add comfort noise.
+ float noise_scaling = upper_bands_suppressor_gain * upper_bands_noise_power;
+ high_frequency_comfort_noise[0][0] = 0;
+ high_frequency_comfort_noise[1][0] = 0;
+ for (int i = 1; i < PART_LEN1; ++i) {
+ high_frequency_comfort_noise[0][i] = noise_scaling * complex_noise[0][i];
+ high_frequency_comfort_noise[1][i] = noise_scaling * complex_noise[1][i];
}
+ high_frequency_comfort_noise[1][PART_LEN] = 0;
} else {
- memset(comfortNoiseHband, 0,
- 2 * PART_LEN1 * sizeof(comfortNoiseHband[0][0]));
+ memset(high_frequency_comfort_noise, 0,
+ 2 * PART_LEN1 * sizeof(high_frequency_comfort_noise[0][0]));
}
}
@@ -638,16 +670,12 @@ static void UpdateLevel(PowerLevel* level, float power) {
}
static void UpdateMetrics(AecCore* aec) {
- float dtmp;
-
const float actThresholdNoisy = 8.0f;
const float actThresholdClean = 40.0f;
- const float safety = 0.99995f;
const float noisyPower = 300000.0f;
float actThreshold;
- float echo, suppressedEcho;
if (aec->echoState) { // Check if echo is likely present
aec->stateCounter++;
@@ -674,95 +702,22 @@ static void UpdateMetrics(AecCore* aec) {
(aec->farlevel.framelevel.EndOfBlock()) &&
(far_average_level > (actThreshold * aec->farlevel.minlevel))) {
+ // ERL: error return loss.
const float near_average_level =
aec->nearlevel.averagelevel.GetLatestMean();
+ UpdateLogRatioMetric(&aec->erl, far_average_level, near_average_level);
- // Subtract noise power
- echo = near_average_level - safety * aec->nearlevel.minlevel;
-
- // ERL
- dtmp = 10 * static_cast<float>(log10(far_average_level /
- near_average_level + 1e-10f));
-
- aec->erl.instant = dtmp;
- if (dtmp > aec->erl.max) {
- aec->erl.max = dtmp;
- }
-
- if (dtmp < aec->erl.min) {
- aec->erl.min = dtmp;
- }
-
- aec->erl.counter++;
- aec->erl.sum += dtmp;
- aec->erl.average = aec->erl.sum / aec->erl.counter;
-
- // Upper mean
- if (dtmp > aec->erl.average) {
- aec->erl.hicounter++;
- aec->erl.hisum += dtmp;
- aec->erl.himean = aec->erl.hisum / aec->erl.hicounter;
- }
-
- // A_NLP
+ // A_NLP: error return loss enhanced before the nonlinear suppression.
const float linout_average_level =
aec->linoutlevel.averagelevel.GetLatestMean();
- dtmp = 10 * static_cast<float>(log10(near_average_level /
- linout_average_level + 1e-10f));
-
- // subtract noise power
- suppressedEcho =
- linout_average_level - safety * aec->linoutlevel.minlevel;
-
- aec->aNlp.instant =
- 10 * static_cast<float>(log10(echo / suppressedEcho + 1e-10f));
-
- if (dtmp > aec->aNlp.max) {
- aec->aNlp.max = dtmp;
- }
+ UpdateLogRatioMetric(&aec->aNlp, near_average_level,
+ linout_average_level);
- if (dtmp < aec->aNlp.min) {
- aec->aNlp.min = dtmp;
- }
-
- aec->aNlp.counter++;
- aec->aNlp.sum += dtmp;
- aec->aNlp.average = aec->aNlp.sum / aec->aNlp.counter;
-
- // Upper mean
- if (dtmp > aec->aNlp.average) {
- aec->aNlp.hicounter++;
- aec->aNlp.hisum += dtmp;
- aec->aNlp.himean = aec->aNlp.hisum / aec->aNlp.hicounter;
- }
-
- // ERLE
+ // ERLE: error return loss enhanced.
const float nlpout_average_level =
aec->nlpoutlevel.averagelevel.GetLatestMean();
- // subtract noise power
- suppressedEcho =
- nlpout_average_level - safety * aec->nlpoutlevel.minlevel;
- dtmp = 10 * static_cast<float>(log10(echo / suppressedEcho + 1e-10f));
-
- aec->erle.instant = dtmp;
- if (dtmp > aec->erle.max) {
- aec->erle.max = dtmp;
- }
-
- if (dtmp < aec->erle.min) {
- aec->erle.min = dtmp;
- }
-
- aec->erle.counter++;
- aec->erle.sum += dtmp;
- aec->erle.average = aec->erle.sum / aec->erle.counter;
-
- // Upper mean
- if (dtmp > aec->erle.average) {
- aec->erle.hicounter++;
- aec->erle.hisum += dtmp;
- aec->erle.himean = aec->erle.hisum / aec->erle.hicounter;
- }
+ UpdateLogRatioMetric(&aec->erle, near_average_level,
+ nlpout_average_level);
}
aec->stateCounter = 0;
@@ -963,9 +918,9 @@ static void RegressorPower(int num_partitions,
}
}
-static void EchoSubtraction(AecCore* aec,
- int num_partitions,
+static void EchoSubtraction(int num_partitions,
int extended_filter_enabled,
+ int* extreme_filter_divergence,
float filter_step_size,
float error_threshold,
float* x_fft,
@@ -1001,9 +956,10 @@ static void EchoSubtraction(AecCore* aec,
// Conditionally reset the echo subtraction filter if the filter has diverged
// significantly.
- if (!aec->extended_filter_enabled && aec->extreme_filter_divergence) {
- memset(aec->wfBuf, 0, sizeof(aec->wfBuf));
- aec->extreme_filter_divergence = 0;
+ if (!extended_filter_enabled && *extreme_filter_divergence) {
+ memset(h_fft_buf, 0,
+ 2 * kExtendedNumPartitions * PART_LEN1 * sizeof(h_fft_buf[0][0]));
+ *extreme_filter_divergence = 0;
}
// Produce echo estimate s_fft.
@@ -1024,9 +980,6 @@ static void EchoSubtraction(AecCore* aec,
memcpy(e_extended + PART_LEN, e, sizeof(float) * PART_LEN);
Fft(e_extended, e_fft);
- RTC_AEC_DEBUG_RAW_WRITE(aec->e_fft_file, &e_fft[0][0],
- sizeof(e_fft[0][0]) * PART_LEN1 * 2);
-
// Scale error signal inversely with far power.
WebRtcAec_ScaleErrorSignal(filter_step_size, error_threshold, x_pow, e_fft);
WebRtcAec_FilterAdaptation(num_partitions, *x_fft_buf_block_pos, x_fft_buf,
@@ -1034,92 +987,30 @@ static void EchoSubtraction(AecCore* aec,
memcpy(echo_subtractor_output, e, sizeof(float) * PART_LEN);
}
-static void EchoSuppression(AecCore* aec,
- float farend[PART_LEN2],
- float* echo_subtractor_output,
- float* output,
- float* const* outputH) {
- float efw[2][PART_LEN1];
- float xfw[2][PART_LEN1];
- float dfw[2][PART_LEN1];
- float comfortNoiseHband[2][PART_LEN1];
- float fft[PART_LEN2];
- float nlpGainHband;
- int i;
- size_t j;
-
- // Coherence and non-linear filter
- float cohde[PART_LEN1], cohxd[PART_LEN1];
+static void FormSuppressionGain(AecCore* aec,
+ float cohde[PART_LEN1],
+ float cohxd[PART_LEN1],
+ float hNl[PART_LEN1]) {
float hNlDeAvg, hNlXdAvg;
- float hNl[PART_LEN1];
float hNlPref[kPrefBandSize];
float hNlFb = 0, hNlFbLow = 0;
- const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f;
const int prefBandSize = kPrefBandSize / aec->mult;
+ const float prefBandQuant = 0.75f, prefBandQuantLow = 0.5f;
const int minPrefBand = 4 / aec->mult;
// Power estimate smoothing coefficients.
const float* min_overdrive = aec->extended_filter_enabled
? kExtendedMinOverDrive
: kNormalMinOverDrive;
- // Filter energy
- const int delayEstInterval = 10 * aec->mult;
-
- float* xfw_ptr = NULL;
-
- // Update eBuf with echo subtractor output.
- memcpy(aec->eBuf + PART_LEN, echo_subtractor_output,
- sizeof(float) * PART_LEN);
-
- // Analysis filter banks for the echo suppressor.
- // Windowed near-end ffts.
- WindowData(fft, aec->dBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, dfw);
-
- // Windowed echo suppressor output ffts.
- WindowData(fft, aec->eBuf);
- aec_rdft_forward_128(fft);
- StoreAsComplex(fft, efw);
-
- // NLP
-
- // Convert far-end partition to the frequency domain with windowing.
- WindowData(fft, farend);
- Fft(fft, xfw);
- xfw_ptr = &xfw[0][0];
-
- // Buffer far.
- memcpy(aec->xfwBuf, xfw_ptr, sizeof(float) * 2 * PART_LEN1);
-
- aec->delayEstCtr++;
- if (aec->delayEstCtr == delayEstInterval) {
- aec->delayEstCtr = 0;
- aec->delayIdx = WebRtcAec_PartitionDelay(aec);
- }
-
- // Use delayed far.
- memcpy(xfw, aec->xfwBuf + aec->delayIdx * PART_LEN1,
- sizeof(xfw[0][0]) * 2 * PART_LEN1);
-
- WebRtcAec_SubbandCoherence(aec, efw, dfw, xfw, fft, cohde, cohxd,
- &aec->extreme_filter_divergence);
-
- // Select the microphone signal as output if the filter is deemed to have
- // diverged.
- if (aec->divergeState) {
- memcpy(efw, dfw, sizeof(efw[0][0]) * 2 * PART_LEN1);
- }
-
hNlXdAvg = 0;
- for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) {
+ for (int i = minPrefBand; i < prefBandSize + minPrefBand; ++i) {
hNlXdAvg += cohxd[i];
}
hNlXdAvg /= prefBandSize;
hNlXdAvg = 1 - hNlXdAvg;
hNlDeAvg = 0;
- for (i = minPrefBand; i < prefBandSize + minPrefBand; i++) {
+ for (int i = minPrefBand; i < prefBandSize + minPrefBand; ++i) {
hNlDeAvg += cohde[i];
}
hNlDeAvg /= prefBandSize;
@@ -1139,11 +1030,11 @@ static void EchoSuppression(AecCore* aec,
aec->overDrive = min_overdrive[aec->nlp_mode];
if (aec->stNearState == 1) {
- memcpy(hNl, cohde, sizeof(hNl));
+ memcpy(hNl, cohde, sizeof(hNl[0]) * PART_LEN1);
hNlFb = hNlDeAvg;
hNlFbLow = hNlDeAvg;
} else {
- for (i = 0; i < PART_LEN1; i++) {
+ for (int i = 0; i < PART_LEN1; ++i) {
hNl[i] = 1 - cohxd[i];
}
hNlFb = hNlXdAvg;
@@ -1152,12 +1043,12 @@ static void EchoSuppression(AecCore* aec,
} else {
if (aec->stNearState == 1) {
aec->echoState = 0;
- memcpy(hNl, cohde, sizeof(hNl));
+ memcpy(hNl, cohde, sizeof(hNl[0]) * PART_LEN1);
hNlFb = hNlDeAvg;
hNlFbLow = hNlDeAvg;
} else {
aec->echoState = 1;
- for (i = 0; i < PART_LEN1; i++) {
+ for (int i = 0; i < PART_LEN1; ++i) {
hNl[i] = WEBRTC_SPL_MIN(cohde[i], 1 - cohxd[i]);
}
@@ -1197,16 +1088,96 @@ static void EchoSuppression(AecCore* aec,
}
// Smooth the overdrive.
- if (aec->overDrive < aec->overDriveSm) {
- aec->overDriveSm = 0.99f * aec->overDriveSm + 0.01f * aec->overDrive;
+ if (aec->overDrive < aec->overdrive_scaling) {
+ aec->overdrive_scaling =
+ 0.99f * aec->overdrive_scaling + 0.01f * aec->overDrive;
} else {
- aec->overDriveSm = 0.9f * aec->overDriveSm + 0.1f * aec->overDrive;
+ aec->overdrive_scaling =
+ 0.9f * aec->overdrive_scaling + 0.1f * aec->overDrive;
+ }
+
+ // Apply the overdrive.
+ WebRtcAec_Overdrive(aec->overdrive_scaling, hNlFb, hNl);
+}
+
+static void EchoSuppression(AecCore* aec,
+ float farend[PART_LEN2],
+ float* echo_subtractor_output,
+ float* output,
+ float* const* outputH) {
+ float efw[2][PART_LEN1];
+ float xfw[2][PART_LEN1];
+ float dfw[2][PART_LEN1];
+ float comfortNoiseHband[2][PART_LEN1];
+ float fft[PART_LEN2];
+ float nlpGainHband;
+ int i;
+ size_t j;
+
+ // Coherence and non-linear filter
+ float cohde[PART_LEN1], cohxd[PART_LEN1];
+ float hNl[PART_LEN1];
+
+ // Filter energy
+ const int delayEstInterval = 10 * aec->mult;
+
+ float* xfw_ptr = NULL;
+
+ // Update eBuf with echo subtractor output.
+ memcpy(aec->eBuf + PART_LEN, echo_subtractor_output,
+ sizeof(float) * PART_LEN);
+
+ // Analysis filter banks for the echo suppressor.
+ // Windowed near-end ffts.
+ WindowData(fft, aec->dBuf);
+ aec_rdft_forward_128(fft);
+ StoreAsComplex(fft, dfw);
+
+ // Windowed echo suppressor output ffts.
+ WindowData(fft, aec->eBuf);
+ aec_rdft_forward_128(fft);
+ StoreAsComplex(fft, efw);
+
+ // NLP
+
+ // Convert far-end partition to the frequency domain with windowing.
+ WindowData(fft, farend);
+ Fft(fft, xfw);
+ xfw_ptr = &xfw[0][0];
+
+ // Buffer far.
+ memcpy(aec->xfwBuf, xfw_ptr, sizeof(float) * 2 * PART_LEN1);
+
+ aec->delayEstCtr++;
+ if (aec->delayEstCtr == delayEstInterval) {
+ aec->delayEstCtr = 0;
+ aec->delayIdx = WebRtcAec_PartitionDelay(aec->num_partitions, aec->wfBuf);
+ }
+
+ // Use delayed far.
+ memcpy(xfw, aec->xfwBuf + aec->delayIdx * PART_LEN1,
+ sizeof(xfw[0][0]) * 2 * PART_LEN1);
+
+ WebRtcAec_UpdateCoherenceSpectra(aec->mult, aec->extended_filter_enabled == 1,
+ efw, dfw, xfw, &aec->coherence_state,
+ &aec->divergeState,
+ &aec->extreme_filter_divergence);
+
+ WebRtcAec_ComputeCoherence(&aec->coherence_state, cohde, cohxd);
+
+ // Select the microphone signal as output if the filter is deemed to have
+ // diverged.
+ if (aec->divergeState) {
+ memcpy(efw, dfw, sizeof(efw[0][0]) * 2 * PART_LEN1);
}
- WebRtcAec_OverdriveAndSuppress(aec, hNl, hNlFb, efw);
+ FormSuppressionGain(aec, cohde, cohxd, hNl);
+
+ WebRtcAec_Suppress(hNl, efw);
// Add comfort noise.
- WebRtcAec_ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl);
+ ComfortNoise(aec->num_bands > 1, &aec->seed, efw, comfortNoiseHband,
+ aec->noisePow, hNl);
// Inverse error fft.
ScaledInverseFft(efw, fft, 2.0f, 1);
@@ -1315,15 +1286,10 @@ static void ProcessBlock(AecCore* aec) {
WebRtc_ReadBuffer(aec->far_time_buf, reinterpret_cast<void**>(&farend_ptr),
farend, 1);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- {
- // TODO(minyue): |farend_ptr| starts from buffered samples. This will be
- // modified when |aec->far_time_buf| is revised.
- RTC_AEC_DEBUG_WAV_WRITE(aec->farFile, &farend_ptr[PART_LEN], PART_LEN);
-
- RTC_AEC_DEBUG_WAV_WRITE(aec->nearFile, nearend_ptr, PART_LEN);
- }
-#endif
+ aec->data_dumper->DumpWav("aec_far", PART_LEN, &farend_ptr[PART_LEN],
+ std::min(aec->sampFreq, 16000), 1);
+ aec->data_dumper->DumpWav("aec_near", PART_LEN, nearend_ptr,
+ std::min(aec->sampFreq, 16000), 1);
if (aec->metricsMode == 1) {
// Update power levels
@@ -1417,12 +1383,14 @@ static void ProcessBlock(AecCore* aec) {
}
// Perform echo subtraction.
- EchoSubtraction(aec, aec->num_partitions, aec->extended_filter_enabled,
- aec->filter_step_size, aec->error_threshold, &x_fft[0][0],
- &aec->xfBufBlockPos, aec->xfBuf, nearend_ptr, aec->xPow,
- aec->wfBuf, echo_subtractor_output);
+ EchoSubtraction(aec->num_partitions, aec->extended_filter_enabled,
+ &aec->extreme_filter_divergence, aec->filter_step_size,
+ aec->error_threshold, &x_fft[0][0], &aec->xfBufBlockPos,
+ aec->xfBuf, nearend_ptr, aec->xPow, aec->wfBuf,
+ echo_subtractor_output);
- RTC_AEC_DEBUG_WAV_WRITE(aec->outLinearFile, echo_subtractor_output, PART_LEN);
+ aec->data_dumper->DumpWav("aec_out_linear", PART_LEN, echo_subtractor_output,
+ std::min(aec->sampFreq, 16000), 1);
if (aec->metricsMode == 1) {
UpdateLevel(&aec->linoutlevel,
@@ -1444,12 +1412,14 @@ static void ProcessBlock(AecCore* aec) {
WebRtc_WriteBuffer(aec->outFrBufH[i], outputH[i], PART_LEN);
}
- RTC_AEC_DEBUG_WAV_WRITE(aec->outFile, output, PART_LEN);
+ aec->data_dumper->DumpWav("aec_out", PART_LEN, output,
+ std::min(aec->sampFreq, 16000), 1);
}
-AecCore* WebRtcAec_CreateAec() {
+AecCore* WebRtcAec_CreateAec(int instance_count) {
int i;
- AecCore* aec = new AecCore;
+ AecCore* aec = new AecCore(instance_count);
+
if (!aec) {
return NULL;
}
@@ -1493,12 +1463,6 @@ AecCore* WebRtcAec_CreateAec() {
return NULL;
}
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- aec->instance_index = webrtc_aec_instance_count;
-
- aec->farFile = aec->nearFile = aec->outFile = aec->outLinearFile = NULL;
- aec->debug_dump_count = 0;
-#endif
aec->delay_estimator_farend =
WebRtc_CreateDelayEstimatorFarend(PART_LEN1, kHistorySizeBlocks);
if (aec->delay_estimator_farend == NULL) {
@@ -1530,9 +1494,10 @@ AecCore* WebRtcAec_CreateAec() {
WebRtcAec_FilterFar = FilterFar;
WebRtcAec_ScaleErrorSignal = ScaleErrorSignal;
WebRtcAec_FilterAdaptation = FilterAdaptation;
- WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress;
- WebRtcAec_ComfortNoise = ComfortNoise;
- WebRtcAec_SubbandCoherence = SubbandCoherence;
+ WebRtcAec_Overdrive = Overdrive;
+ WebRtcAec_Suppress = Suppress;
+ WebRtcAec_ComputeCoherence = ComputeCoherence;
+ WebRtcAec_UpdateCoherenceSpectra = UpdateCoherenceSpectra;
WebRtcAec_StoreAsComplex = StoreAsComplex;
WebRtcAec_PartitionDelay = PartitionDelay;
WebRtcAec_WindowData = WindowData;
@@ -1549,10 +1514,6 @@ AecCore* WebRtcAec_CreateAec() {
#if defined(WEBRTC_HAS_NEON)
WebRtcAec_InitAec_neon();
-#elif defined(WEBRTC_DETECT_NEON)
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- WebRtcAec_InitAec_neon();
- }
#endif
aec_rdft_init();
@@ -1576,12 +1537,6 @@ void WebRtcAec_FreeAec(AecCore* aec) {
WebRtc_FreeBuffer(aec->far_time_buf);
- RTC_AEC_DEBUG_WAV_CLOSE(aec->farFile);
- RTC_AEC_DEBUG_WAV_CLOSE(aec->nearFile);
- RTC_AEC_DEBUG_WAV_CLOSE(aec->outFile);
- RTC_AEC_DEBUG_WAV_CLOSE(aec->outLinearFile);
- RTC_AEC_DEBUG_RAW_CLOSE(aec->e_fft_file);
-
WebRtc_FreeDelayEstimator(aec->delay_estimator);
WebRtc_FreeDelayEstimatorFarend(aec->delay_estimator_farend);
@@ -1626,6 +1581,7 @@ static void SetErrorThreshold(AecCore* aec) {
int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
int i;
+ aec->data_dumper->InitiateNewSetOfRecordings();
aec->sampFreq = sampFreq;
@@ -1648,27 +1604,6 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
// Initialize far-end buffers.
WebRtc_InitBuffer(aec->far_time_buf);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- {
- int process_rate = sampFreq > 16000 ? 16000 : sampFreq;
- RTC_AEC_DEBUG_WAV_REOPEN("aec_far", aec->instance_index,
- aec->debug_dump_count, process_rate,
- &aec->farFile);
- RTC_AEC_DEBUG_WAV_REOPEN("aec_near", aec->instance_index,
- aec->debug_dump_count, process_rate,
- &aec->nearFile);
- RTC_AEC_DEBUG_WAV_REOPEN("aec_out", aec->instance_index,
- aec->debug_dump_count, process_rate,
- &aec->outFile);
- RTC_AEC_DEBUG_WAV_REOPEN("aec_out_linear", aec->instance_index,
- aec->debug_dump_count, process_rate,
- &aec->outLinearFile);
- }
-
- RTC_AEC_DEBUG_RAW_OPEN("aec_e_fft", aec->debug_dump_count, &aec->e_fft_file);
-
- ++aec->debug_dump_count;
-#endif
aec->system_delay = 0;
if (WebRtc_InitDelayEstimatorFarend(aec->delay_estimator_farend) != 0) {
@@ -1749,18 +1684,18 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
// doesn't change the output at all and yields 0.4% overall speedup.
memset(aec->xfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1);
memset(aec->wfBuf, 0, sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1);
- memset(aec->sde, 0, sizeof(complex_t) * PART_LEN1);
- memset(aec->sxd, 0, sizeof(complex_t) * PART_LEN1);
+ memset(aec->coherence_state.sde, 0, sizeof(complex_t) * PART_LEN1);
+ memset(aec->coherence_state.sxd, 0, sizeof(complex_t) * PART_LEN1);
memset(aec->xfwBuf, 0,
sizeof(complex_t) * kExtendedNumPartitions * PART_LEN1);
- memset(aec->se, 0, sizeof(float) * PART_LEN1);
+ memset(aec->coherence_state.se, 0, sizeof(float) * PART_LEN1);
// To prevent numerical instability in the first block.
for (i = 0; i < PART_LEN1; i++) {
- aec->sd[i] = 1;
+ aec->coherence_state.sd[i] = 1;
}
for (i = 0; i < PART_LEN1; i++) {
- aec->sx[i] = 1;
+ aec->coherence_state.sx[i] = 1;
}
memset(aec->hNs, 0, sizeof(aec->hNs));
@@ -1772,7 +1707,7 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
aec->hNlNewMin = 0;
aec->hNlMinCtr = 0;
aec->overDrive = 2;
- aec->overDriveSm = 2;
+ aec->overdrive_scaling = 2;
aec->delayIdx = 0;
aec->stNearState = 0;
aec->echoState = 0;
@@ -1878,11 +1813,15 @@ void WebRtcAec_ProcessFrames(AecCore* aec,
// rounding, like -16.
int move_elements = (aec->knownDelay - knownDelay - 32) / PART_LEN;
int moved_elements = WebRtc_MoveReadPtr(aec->far_time_buf, move_elements);
+ MaybeLogDelayAdjustment(moved_elements * (aec->sampFreq == 8000 ? 8 : 4),
+ DelaySource::kSystemDelay);
aec->knownDelay -= moved_elements * PART_LEN;
} else {
// 2 b) Apply signal based delay correction.
int move_elements = SignalBasedDelayCorrection(aec);
int moved_elements = WebRtc_MoveReadPtr(aec->far_time_buf, move_elements);
+ MaybeLogDelayAdjustment(moved_elements * (aec->sampFreq == 8000 ? 8 : 4),
+ DelaySource::kDelayAgnostic);
int far_near_buffer_diff =
WebRtc_available_read(aec->far_time_buf) -
WebRtc_available_read(aec->nearFrBuf) / PART_LEN;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
index bd5b283eca0..1ab20201860 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
@@ -17,6 +17,15 @@
#include <stddef.h>
+#include <memory>
+
+extern "C" {
+#include "webrtc/common_audio/ring_buffer.h"
+}
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_audio/wav_file.h"
+#include "webrtc/modules/audio_processing/aec/aec_common.h"
+#include "webrtc/modules/audio_processing/utility/block_mean_calculator.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -27,6 +36,8 @@ namespace webrtc {
#define PART_LEN2 (PART_LEN * 2) // Length of partition * 2
#define NUM_HIGH_BANDS_MAX 2 // Max number of high bands
+class ApmDataDumper;
+
typedef float complex_t[2];
// For performance reasons, some arrays of complex numbers are replaced by twice
// as long arrays of float, all the real parts followed by all the imaginary
@@ -47,20 +58,188 @@ typedef struct Stats {
float sum;
float hisum;
float himean;
- int counter;
- int hicounter;
+ size_t counter;
+ size_t hicounter;
} Stats;
-typedef struct AecCore AecCore;
+// Number of partitions for the extended filter mode. The first one is an enum
+// to be used in array declarations, as it represents the maximum filter length.
+enum { kExtendedNumPartitions = 32 };
+static const int kNormalNumPartitions = 12;
+
+// Delay estimator constants, used for logging and delay compensation if
+// if reported delays are disabled.
+enum { kLookaheadBlocks = 15 };
+enum {
+ // 500 ms for 16 kHz which is equivalent with the limit of reported delays.
+ kHistorySizeBlocks = 125
+};
+
+typedef struct PowerLevel {
+ PowerLevel();
+
+ BlockMeanCalculator framelevel;
+ BlockMeanCalculator averagelevel;
+ float minlevel;
+} PowerLevel;
+
+class DivergentFilterFraction {
+ public:
+ DivergentFilterFraction();
+
+ // Reset.
+ void Reset();
+
+ void AddObservation(const PowerLevel& nearlevel,
+ const PowerLevel& linoutlevel,
+ const PowerLevel& nlpoutlevel);
+
+ // Return the latest fraction.
+ float GetLatestFraction() const;
+
+ private:
+ // Clear all values added.
+ void Clear();
+
+ size_t count_;
+ size_t occurrence_;
+ float fraction_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(DivergentFilterFraction);
+};
+
+typedef struct CoherenceState {
+ complex_t sde[PART_LEN1]; // cross-psd of nearend and error
+ complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
+ float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd
+} CoherenceState;
+
+struct AecCore {
+ explicit AecCore(int instance_index);
+ ~AecCore();
+
+ std::unique_ptr<ApmDataDumper> data_dumper;
+
+ CoherenceState coherence_state;
+
+ int farBufWritePos, farBufReadPos;
+
+ int knownDelay;
+ int inSamples, outSamples;
+ int delayEstCtr;
+
+ RingBuffer* nearFrBuf;
+ RingBuffer* outFrBuf;
+
+ RingBuffer* nearFrBufH[NUM_HIGH_BANDS_MAX];
+ RingBuffer* outFrBufH[NUM_HIGH_BANDS_MAX];
-AecCore* WebRtcAec_CreateAec(); // Returns NULL on error.
+ float dBuf[PART_LEN2]; // nearend
+ float eBuf[PART_LEN2]; // error
+
+ float dBufH[NUM_HIGH_BANDS_MAX][PART_LEN2]; // nearend
+
+ float xPow[PART_LEN1];
+ float dPow[PART_LEN1];
+ float dMinPow[PART_LEN1];
+ float dInitMinPow[PART_LEN1];
+ float* noisePow;
+
+ float xfBuf[2][kExtendedNumPartitions * PART_LEN1]; // farend fft buffer
+ float wfBuf[2][kExtendedNumPartitions * PART_LEN1]; // filter fft
+ // Farend windowed fft buffer.
+ complex_t xfwBuf[kExtendedNumPartitions * PART_LEN1];
+
+ float hNs[PART_LEN1];
+ float hNlFbMin, hNlFbLocalMin;
+ float hNlXdAvgMin;
+ int hNlNewMin, hNlMinCtr;
+ float overDrive;
+ float overdrive_scaling;
+ int nlp_mode;
+ float outBuf[PART_LEN];
+ int delayIdx;
+
+ short stNearState, echoState;
+ short divergeState;
+
+ int xfBufBlockPos;
+
+ RingBuffer* far_time_buf;
+
+ int system_delay; // Current system delay buffered in AEC.
+
+ int mult; // sampling frequency multiple
+ int sampFreq = 16000;
+ size_t num_bands;
+ uint32_t seed;
+
+ float filter_step_size; // stepsize
+ float error_threshold; // error threshold
+
+ int noiseEstCtr;
+
+ PowerLevel farlevel;
+ PowerLevel nearlevel;
+ PowerLevel linoutlevel;
+ PowerLevel nlpoutlevel;
+
+ int metricsMode;
+ int stateCounter;
+ Stats erl;
+ Stats erle;
+ Stats aNlp;
+ Stats rerl;
+ DivergentFilterFraction divergent_filter_fraction;
+
+ // Quantities to control H band scaling for SWB input
+ int freq_avg_ic; // initial bin for averaging nlp gain
+ int flag_Hband_cn; // for comfort noise
+ float cn_scale_Hband; // scale for comfort noise in H band
+
+ int delay_metrics_delivered;
+ int delay_histogram[kHistorySizeBlocks];
+ int num_delay_values;
+ int delay_median;
+ int delay_std;
+ float fraction_poor_delays;
+ int delay_logging_enabled;
+ void* delay_estimator_farend;
+ void* delay_estimator;
+ // Variables associated with delay correction through signal based delay
+ // estimation feedback.
+ int signal_delay_correction;
+ int previous_delay;
+ int delay_correction_count;
+ int shift_offset;
+ float delay_quality_threshold;
+ int frame_count;
+
+ // 0 = delay agnostic mode (signal based delay correction) disabled.
+ // Otherwise enabled.
+ int delay_agnostic_enabled;
+ // 1 = extended filter mode enabled, 0 = disabled.
+ int extended_filter_enabled;
+ // 1 = next generation aec mode enabled, 0 = disabled.
+ int aec3_enabled;
+ bool refined_adaptive_filter_enabled;
+
+ // Runtime selection of number of filter partitions.
+ int num_partitions;
+
+ // Flag that extreme filter divergence has been detected by the Echo
+ // Suppressor.
+ int extreme_filter_divergence;
+};
+
+AecCore* WebRtcAec_CreateAec(int instance_count); // Returns NULL on error.
void WebRtcAec_FreeAec(AecCore* aec);
int WebRtcAec_InitAec(AecCore* aec, int sampFreq);
void WebRtcAec_InitAec_SSE2(void);
#if defined(MIPS_FPU_LE)
void WebRtcAec_InitAec_mips(void);
#endif
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcAec_InitAec_neon(void);
#endif
@@ -97,9 +276,6 @@ void WebRtcAec_GetEchoStats(AecCore* self,
Stats* erle,
Stats* a_nlp,
float* divergent_filter_fraction);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-void* WebRtcAec_far_time_buf(AecCore* self);
-#endif
// Sets local configuration modes.
void WebRtcAec_SetConfigCore(AecCore* self,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
index 1f7b6b541fa..d4fad9e5e63 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
@@ -11,12 +11,16 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_INTERNAL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_INTERNAL_H_
+#include <memory>
+
extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
}
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/wav_file.h"
#include "webrtc/modules/audio_processing/aec/aec_common.h"
#include "webrtc/modules/audio_processing/aec/aec_core.h"
+#include "webrtc/modules/audio_processing/logging/apm_data_dumper.h"
#include "webrtc/modules/audio_processing/utility/block_mean_calculator.h"
#include "webrtc/typedefs.h"
@@ -68,8 +72,19 @@ class DivergentFilterFraction {
RTC_DISALLOW_COPY_AND_ASSIGN(DivergentFilterFraction);
};
+typedef struct CoherenceState {
+ complex_t sde[PART_LEN1]; // cross-psd of nearend and error
+ complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
+ float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd
+} CoherenceState;
+
struct AecCore {
- AecCore();
+ explicit AecCore(int instance_index);
+ ~AecCore();
+
+ std::unique_ptr<ApmDataDumper> data_dumper;
+
+ CoherenceState coherence_state;
int farBufWritePos, farBufReadPos;
@@ -96,17 +111,15 @@ struct AecCore {
float xfBuf[2][kExtendedNumPartitions * PART_LEN1]; // farend fft buffer
float wfBuf[2][kExtendedNumPartitions * PART_LEN1]; // filter fft
- complex_t sde[PART_LEN1]; // cross-psd of nearend and error
- complex_t sxd[PART_LEN1]; // cross-psd of farend and nearend
// Farend windowed fft buffer.
complex_t xfwBuf[kExtendedNumPartitions * PART_LEN1];
- float sx[PART_LEN1], sd[PART_LEN1], se[PART_LEN1]; // far, near, error psd
float hNs[PART_LEN1];
float hNlFbMin, hNlFbLocalMin;
float hNlXdAvgMin;
int hNlNewMin, hNlMinCtr;
- float overDrive, overDriveSm;
+ float overDrive;
+ float overdrive_scaling;
int nlp_mode;
float outBuf[PART_LEN];
int delayIdx;
@@ -181,22 +194,6 @@ struct AecCore {
// Flag that extreme filter divergence has been detected by the Echo
// Suppressor.
int extreme_filter_divergence;
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- // Sequence number of this AEC instance, so that different instances can
- // choose different dump file names.
- int instance_index;
-
- // Number of times we've restarted dumping; used to pick new dump file names
- // each time.
- int debug_dump_count;
-
- rtc_WavWriter* farFile;
- rtc_WavWriter* nearFile;
- rtc_WavWriter* outFile;
- rtc_WavWriter* outLinearFile;
- FILE* e_fft_file;
-#endif
};
typedef void (*WebRtcAecFilterFar)(
@@ -218,30 +215,34 @@ typedef void (*WebRtcAecFilterAdaptation)(
float e_fft[2][PART_LEN1],
float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
extern WebRtcAecFilterAdaptation WebRtcAec_FilterAdaptation;
-typedef void (*WebRtcAecOverdriveAndSuppress)(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]);
-extern WebRtcAecOverdriveAndSuppress WebRtcAec_OverdriveAndSuppress;
-
-typedef void (*WebRtcAecComfortNoise)(AecCore* aec,
- float efw[2][PART_LEN1],
- float comfortNoiseHband[2][PART_LEN1],
- const float* noisePow,
- const float* lambda);
-extern WebRtcAecComfortNoise WebRtcAec_ComfortNoise;
-
-typedef void (*WebRtcAecSubBandCoherence)(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- float* fft,
- float* cohde,
- float* cohxd,
- int* extreme_filter_divergence);
-extern WebRtcAecSubBandCoherence WebRtcAec_SubbandCoherence;
-typedef int (*WebRtcAecPartitionDelay)(const AecCore* aec);
+typedef void (*WebRtcAecOverdrive)(float overdrive_scaling,
+ const float hNlFb,
+ float hNl[PART_LEN1]);
+extern WebRtcAecOverdrive WebRtcAec_Overdrive;
+
+typedef void (*WebRtcAecSuppress)(const float hNl[PART_LEN1],
+ float efw[2][PART_LEN1]);
+extern WebRtcAecSuppress WebRtcAec_Suppress;
+
+typedef void (*WebRtcAecComputeCoherence)(const CoherenceState* coherence_state,
+ float* cohde,
+ float* cohxd);
+extern WebRtcAecComputeCoherence WebRtcAec_ComputeCoherence;
+
+typedef void (*WebRtcAecUpdateCoherenceSpectra)(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence);
+extern WebRtcAecUpdateCoherenceSpectra WebRtcAec_UpdateCoherenceSpectra;
+
+typedef int (*WebRtcAecPartitionDelay)(
+ int num_partitions,
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
extern WebRtcAecPartitionDelay WebRtcAec_PartitionDelay;
typedef void (*WebRtcAecStoreAsComplex)(const float* data,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc
index 5c6d8ebb73a..a9b5cd4e60f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.cc
@@ -19,314 +19,14 @@
extern "C" {
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
}
-#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-}
namespace webrtc {
extern const float WebRtcAec_weightCurve[65];
extern const float WebRtcAec_overDriveCurve[65];
-void WebRtcAec_ComfortNoise_mips(AecCore* aec,
- float efw[2][PART_LEN1],
- float comfortNoiseHband[2][PART_LEN1],
- const float* noisePow,
- const float* lambda) {
- int i, num;
- float rand[PART_LEN];
- float noise, noiseAvg, tmp, tmpAvg;
- int16_t randW16[PART_LEN];
- complex_t u[PART_LEN1];
-
- const float pi2 = 6.28318530717959f;
- const float pi2t = pi2 / 32768;
-
- // Generate a uniform random array on [0 1]
- WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed);
-
- int16_t* randWptr = randW16;
- float randTemp, randTemp2, randTemp3, randTemp4;
- int32_t tmp1s, tmp2s, tmp3s, tmp4s;
-
- for (i = 0; i < PART_LEN; i += 4) {
- __asm __volatile(
- ".set push \n\t"
- ".set noreorder \n\t"
- "lh %[tmp1s], 0(%[randWptr]) \n\t"
- "lh %[tmp2s], 2(%[randWptr]) \n\t"
- "lh %[tmp3s], 4(%[randWptr]) \n\t"
- "lh %[tmp4s], 6(%[randWptr]) \n\t"
- "mtc1 %[tmp1s], %[randTemp] \n\t"
- "mtc1 %[tmp2s], %[randTemp2] \n\t"
- "mtc1 %[tmp3s], %[randTemp3] \n\t"
- "mtc1 %[tmp4s], %[randTemp4] \n\t"
- "cvt.s.w %[randTemp], %[randTemp] \n\t"
- "cvt.s.w %[randTemp2], %[randTemp2] \n\t"
- "cvt.s.w %[randTemp3], %[randTemp3] \n\t"
- "cvt.s.w %[randTemp4], %[randTemp4] \n\t"
- "addiu %[randWptr], %[randWptr], 8 \n\t"
- "mul.s %[randTemp], %[randTemp], %[pi2t] \n\t"
- "mul.s %[randTemp2], %[randTemp2], %[pi2t] \n\t"
- "mul.s %[randTemp3], %[randTemp3], %[pi2t] \n\t"
- "mul.s %[randTemp4], %[randTemp4], %[pi2t] \n\t"
- ".set pop \n\t"
- : [randWptr] "+r" (randWptr), [randTemp] "=&f" (randTemp),
- [randTemp2] "=&f" (randTemp2), [randTemp3] "=&f" (randTemp3),
- [randTemp4] "=&f" (randTemp4), [tmp1s] "=&r" (tmp1s),
- [tmp2s] "=&r" (tmp2s), [tmp3s] "=&r" (tmp3s),
- [tmp4s] "=&r" (tmp4s)
- : [pi2t] "f" (pi2t)
- : "memory");
-
- u[i + 1][0] = cosf(randTemp);
- u[i + 1][1] = sinf(randTemp);
- u[i + 2][0] = cosf(randTemp2);
- u[i + 2][1] = sinf(randTemp2);
- u[i + 3][0] = cosf(randTemp3);
- u[i + 3][1] = sinf(randTemp3);
- u[i + 4][0] = cosf(randTemp4);
- u[i + 4][1] = sinf(randTemp4);
- }
-
- // Reject LF noise
- float* u_ptr = &u[1][0];
- float noise2, noise3, noise4;
- float tmp1f, tmp2f, tmp3f, tmp4f, tmp5f, tmp6f, tmp7f, tmp8f;
-
- u[0][0] = 0;
- u[0][1] = 0;
- for (i = 1; i < PART_LEN1; i += 4) {
- __asm __volatile(
- ".set push \n\t"
- ".set noreorder \n\t"
- "lwc1 %[noise], 4(%[noisePow]) \n\t"
- "lwc1 %[noise2], 8(%[noisePow]) \n\t"
- "lwc1 %[noise3], 12(%[noisePow]) \n\t"
- "lwc1 %[noise4], 16(%[noisePow]) \n\t"
- "sqrt.s %[noise], %[noise] \n\t"
- "sqrt.s %[noise2], %[noise2] \n\t"
- "sqrt.s %[noise3], %[noise3] \n\t"
- "sqrt.s %[noise4], %[noise4] \n\t"
- "lwc1 %[tmp1f], 0(%[u_ptr]) \n\t"
- "lwc1 %[tmp2f], 4(%[u_ptr]) \n\t"
- "lwc1 %[tmp3f], 8(%[u_ptr]) \n\t"
- "lwc1 %[tmp4f], 12(%[u_ptr]) \n\t"
- "lwc1 %[tmp5f], 16(%[u_ptr]) \n\t"
- "lwc1 %[tmp6f], 20(%[u_ptr]) \n\t"
- "lwc1 %[tmp7f], 24(%[u_ptr]) \n\t"
- "lwc1 %[tmp8f], 28(%[u_ptr]) \n\t"
- "addiu %[noisePow], %[noisePow], 16 \n\t"
- "mul.s %[tmp1f], %[tmp1f], %[noise] \n\t"
- "mul.s %[tmp2f], %[tmp2f], %[noise] \n\t"
- "mul.s %[tmp3f], %[tmp3f], %[noise2] \n\t"
- "mul.s %[tmp4f], %[tmp4f], %[noise2] \n\t"
- "mul.s %[tmp5f], %[tmp5f], %[noise3] \n\t"
- "mul.s %[tmp6f], %[tmp6f], %[noise3] \n\t"
- "swc1 %[tmp1f], 0(%[u_ptr]) \n\t"
- "swc1 %[tmp3f], 8(%[u_ptr]) \n\t"
- "mul.s %[tmp8f], %[tmp8f], %[noise4] \n\t"
- "mul.s %[tmp7f], %[tmp7f], %[noise4] \n\t"
- "neg.s %[tmp2f] \n\t"
- "neg.s %[tmp4f] \n\t"
- "neg.s %[tmp6f] \n\t"
- "neg.s %[tmp8f] \n\t"
- "swc1 %[tmp5f], 16(%[u_ptr]) \n\t"
- "swc1 %[tmp7f], 24(%[u_ptr]) \n\t"
- "swc1 %[tmp2f], 4(%[u_ptr]) \n\t"
- "swc1 %[tmp4f], 12(%[u_ptr]) \n\t"
- "swc1 %[tmp6f], 20(%[u_ptr]) \n\t"
- "swc1 %[tmp8f], 28(%[u_ptr]) \n\t"
- "addiu %[u_ptr], %[u_ptr], 32 \n\t"
- ".set pop \n\t"
- : [u_ptr] "+r" (u_ptr), [noisePow] "+r" (noisePow),
- [noise] "=&f" (noise), [noise2] "=&f" (noise2),
- [noise3] "=&f" (noise3), [noise4] "=&f" (noise4),
- [tmp1f] "=&f" (tmp1f), [tmp2f] "=&f" (tmp2f),
- [tmp3f] "=&f" (tmp3f), [tmp4f] "=&f" (tmp4f),
- [tmp5f] "=&f" (tmp5f), [tmp6f] "=&f" (tmp6f),
- [tmp7f] "=&f" (tmp7f), [tmp8f] "=&f" (tmp8f)
- :
- : "memory");
- }
- u[PART_LEN][1] = 0;
- noisePow -= PART_LEN;
-
- u_ptr = &u[0][0];
- float* u_ptr_end = &u[PART_LEN][0];
- float* efw_ptr_0 = &efw[0][0];
- float* efw_ptr_1 = &efw[1][0];
- float tmp9f, tmp10f;
- const float tmp1c = 1.0;
-
- __asm __volatile(
- ".set push \n\t"
- ".set noreorder \n\t"
- "1: \n\t"
- "lwc1 %[tmp1f], 0(%[lambda]) \n\t"
- "lwc1 %[tmp6f], 4(%[lambda]) \n\t"
- "addiu %[lambda], %[lambda], 8 \n\t"
- "c.lt.s %[tmp1f], %[tmp1c] \n\t"
- "bc1f 4f \n\t"
- " nop \n\t"
- "c.lt.s %[tmp6f], %[tmp1c] \n\t"
- "bc1f 3f \n\t"
- " nop \n\t"
- "2: \n\t"
- "mul.s %[tmp1f], %[tmp1f], %[tmp1f] \n\t"
- "mul.s %[tmp6f], %[tmp6f], %[tmp6f] \n\t"
- "sub.s %[tmp1f], %[tmp1c], %[tmp1f] \n\t"
- "sub.s %[tmp6f], %[tmp1c], %[tmp6f] \n\t"
- "sqrt.s %[tmp1f], %[tmp1f] \n\t"
- "sqrt.s %[tmp6f], %[tmp6f] \n\t"
- "lwc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
- "lwc1 %[tmp3f], 0(%[u_ptr]) \n\t"
- "lwc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
- "lwc1 %[tmp8f], 8(%[u_ptr]) \n\t"
- "lwc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
- "lwc1 %[tmp5f], 4(%[u_ptr]) \n\t"
- "lwc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
- "lwc1 %[tmp10f], 12(%[u_ptr]) \n\t"
-#if !defined(MIPS32_R2_LE)
- "mul.s %[tmp3f], %[tmp1f], %[tmp3f] \n\t"
- "add.s %[tmp2f], %[tmp2f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp1f], %[tmp5f] \n\t"
- "add.s %[tmp4f], %[tmp4f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp6f], %[tmp8f] \n\t"
- "add.s %[tmp7f], %[tmp7f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp6f], %[tmp10f] \n\t"
- "add.s %[tmp9f], %[tmp9f], %[tmp3f] \n\t"
-#else // #if !defined(MIPS32_R2_LE)
- "madd.s %[tmp2f], %[tmp2f], %[tmp1f], %[tmp3f] \n\t"
- "madd.s %[tmp4f], %[tmp4f], %[tmp1f], %[tmp5f] \n\t"
- "madd.s %[tmp7f], %[tmp7f], %[tmp6f], %[tmp8f] \n\t"
- "madd.s %[tmp9f], %[tmp9f], %[tmp6f], %[tmp10f] \n\t"
-#endif // #if !defined(MIPS32_R2_LE)
- "swc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
- "swc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
- "swc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
- "b 5f \n\t"
- " swc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
- "3: \n\t"
- "mul.s %[tmp1f], %[tmp1f], %[tmp1f] \n\t"
- "sub.s %[tmp1f], %[tmp1c], %[tmp1f] \n\t"
- "sqrt.s %[tmp1f], %[tmp1f] \n\t"
- "lwc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
- "lwc1 %[tmp3f], 0(%[u_ptr]) \n\t"
- "lwc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
- "lwc1 %[tmp5f], 4(%[u_ptr]) \n\t"
-#if !defined(MIPS32_R2_LE)
- "mul.s %[tmp3f], %[tmp1f], %[tmp3f] \n\t"
- "add.s %[tmp2f], %[tmp2f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp1f], %[tmp5f] \n\t"
- "add.s %[tmp4f], %[tmp4f], %[tmp3f] \n\t"
-#else // #if !defined(MIPS32_R2_LE)
- "madd.s %[tmp2f], %[tmp2f], %[tmp1f], %[tmp3f] \n\t"
- "madd.s %[tmp4f], %[tmp4f], %[tmp1f], %[tmp5f] \n\t"
-#endif // #if !defined(MIPS32_R2_LE)
- "swc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
- "b 5f \n\t"
- " swc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
- "4: \n\t"
- "c.lt.s %[tmp6f], %[tmp1c] \n\t"
- "bc1f 5f \n\t"
- " nop \n\t"
- "mul.s %[tmp6f], %[tmp6f], %[tmp6f] \n\t"
- "sub.s %[tmp6f], %[tmp1c], %[tmp6f] \n\t"
- "sqrt.s %[tmp6f], %[tmp6f] \n\t"
- "lwc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
- "lwc1 %[tmp8f], 8(%[u_ptr]) \n\t"
- "lwc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
- "lwc1 %[tmp10f], 12(%[u_ptr]) \n\t"
-#if !defined(MIPS32_R2_LE)
- "mul.s %[tmp3f], %[tmp6f], %[tmp8f] \n\t"
- "add.s %[tmp7f], %[tmp7f], %[tmp3f] \n\t"
- "mul.s %[tmp3f], %[tmp6f], %[tmp10f] \n\t"
- "add.s %[tmp9f], %[tmp9f], %[tmp3f] \n\t"
-#else // #if !defined(MIPS32_R2_LE)
- "madd.s %[tmp7f], %[tmp7f], %[tmp6f], %[tmp8f] \n\t"
- "madd.s %[tmp9f], %[tmp9f], %[tmp6f], %[tmp10f] \n\t"
-#endif // #if !defined(MIPS32_R2_LE)
- "swc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
- "swc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
- "5: \n\t"
- "addiu %[u_ptr], %[u_ptr], 16 \n\t"
- "addiu %[efw_ptr_0], %[efw_ptr_0], 8 \n\t"
- "bne %[u_ptr], %[u_ptr_end], 1b \n\t"
- " addiu %[efw_ptr_1], %[efw_ptr_1], 8 \n\t"
- ".set pop \n\t"
- : [lambda] "+r" (lambda), [u_ptr] "+r" (u_ptr),
- [efw_ptr_0] "+r" (efw_ptr_0), [efw_ptr_1] "+r" (efw_ptr_1),
- [tmp1f] "=&f" (tmp1f), [tmp2f] "=&f" (tmp2f), [tmp3f] "=&f" (tmp3f),
- [tmp4f] "=&f" (tmp4f), [tmp5f] "=&f" (tmp5f),
- [tmp6f] "=&f" (tmp6f), [tmp7f] "=&f" (tmp7f), [tmp8f] "=&f" (tmp8f),
- [tmp9f] "=&f" (tmp9f), [tmp10f] "=&f" (tmp10f)
- : [tmp1c] "f" (tmp1c), [u_ptr_end] "r" (u_ptr_end)
- : "memory");
-
- lambda -= PART_LEN;
- tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[PART_LEN] * lambda[PART_LEN], 0));
- // tmp = 1 - lambda[i];
- efw[0][PART_LEN] += tmp * u[PART_LEN][0];
- efw[1][PART_LEN] += tmp * u[PART_LEN][1];
-
- // For H band comfort noise
- // TODO(peah): don't compute noise and "tmp" twice. Use the previous results.
- noiseAvg = 0.0;
- tmpAvg = 0.0;
- num = 0;
- if (aec->num_bands > 1) {
- for (i = 0; i < PART_LEN; i++) {
- rand[i] = (static_cast<float>(randW16[i])) / 32768;
- }
-
- // average noise scale
- // average over second half of freq spectrum (i.e., 4->8khz)
- // TODO(peah): we shouldn't need num. We know how many elements we're
- // summing.
- for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
- num++;
- noiseAvg += sqrtf(noisePow[i]);
- }
- noiseAvg /= static_cast<float>(num);
-
- // average nlp scale
- // average over second half of freq spectrum (i.e., 4->8khz)
- // TODO(peah): we shouldn't need num. We know how many elements we're
- // summing.
- num = 0;
- for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
- num++;
- tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
- }
- tmpAvg /= static_cast<float>(num);
-
- // Use average noise for H band
- // TODO(peah): we should probably have a new random vector here.
- // Reject LF noise
- u[0][0] = 0;
- u[0][1] = 0;
- for (i = 1; i < PART_LEN1; i++) {
- tmp = pi2 * rand[i - 1];
-
- // Use average noise for H band
- u[i][0] = noiseAvg * static_cast<float>(cos(tmp));
- u[i][1] = -noiseAvg * static_cast<float>(sin(tmp));
- }
- u[PART_LEN][1] = 0;
-
- for (i = 0; i < PART_LEN1; i++) {
- // Use average NLP weight for H band
- comfortNoiseHband[0][i] = tmpAvg * u[i][0];
- comfortNoiseHband[1][i] = tmpAvg * u[i][1];
- }
- } else {
- memset(comfortNoiseHband, 0,
- 2 * PART_LEN1 * sizeof(comfortNoiseHband[0][0]));
- }
-}
-
void WebRtcAec_FilterFar_mips(
int num_partitions,
int x_fft_buf_block_pos,
@@ -644,24 +344,18 @@ void WebRtcAec_FilterAdaptation_mips(
}
}
-void WebRtcAec_OverdriveAndSuppress_mips(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]) {
- int i;
+void WebRtcAec_Overdrive_mips(float overdrive_scaling,
+ float hNlFb,
+ float hNl[PART_LEN1]) {
const float one = 1.0;
float* p_hNl;
- float* p_efw0;
- float* p_efw1;
const float* p_WebRtcAec_wC;
float temp1, temp2, temp3, temp4;
p_hNl = &hNl[0];
- p_efw0 = &efw[0][0];
- p_efw1 = &efw[1][0];
p_WebRtcAec_wC = &WebRtcAec_weightCurve[0];
- for (i = 0; i < PART_LEN1; i++) {
+ for (int i = 0; i < PART_LEN1; ++i) {
// Weight subbands
__asm __volatile(
".set push \n\t"
@@ -687,8 +381,22 @@ void WebRtcAec_OverdriveAndSuppress_mips(AecCore* aec,
: [hNlFb] "f" (hNlFb), [one] "f" (one), [p_hNl] "r" (p_hNl)
: "memory");
- hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+ hNl[i] = powf(hNl[i], overdrive_scaling * WebRtcAec_overDriveCurve[i]);
+ }
+}
+
+void WebRtcAec_Suppress_mips(const float hNl[PART_LEN1],
+ float efw[2][PART_LEN1]) {
+ const float* p_hNl;
+ float* p_efw0;
+ float* p_efw1;
+ float temp1, temp2, temp3, temp4;
+
+ p_hNl = &hNl[0];
+ p_efw0 = &efw[0][0];
+ p_efw1 = &efw[1][0];
+ for (int i = 0; i < PART_LEN1; ++i) {
__asm __volatile(
"lwc1 %[temp1], 0(%[p_hNl]) \n\t"
"lwc1 %[temp3], 0(%[p_efw1]) \n\t"
@@ -775,7 +483,7 @@ void WebRtcAec_InitAec_mips(void) {
WebRtcAec_FilterFar = WebRtcAec_FilterFar_mips;
WebRtcAec_FilterAdaptation = WebRtcAec_FilterAdaptation_mips;
WebRtcAec_ScaleErrorSignal = WebRtcAec_ScaleErrorSignal_mips;
- WebRtcAec_ComfortNoise = WebRtcAec_ComfortNoise_mips;
- WebRtcAec_OverdriveAndSuppress = WebRtcAec_OverdriveAndSuppress_mips;
+ WebRtcAec_Overdrive = WebRtcAec_Overdrive_mips;
+ WebRtcAec_Suppress = WebRtcAec_Suppress_mips;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc
index c08ee426e61..bc503ba3db0 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.cc
@@ -22,10 +22,8 @@ extern "C" {
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
}
#include "webrtc/modules/audio_processing/aec/aec_common.h"
-#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-}
namespace webrtc {
@@ -376,15 +374,13 @@ static float32x4_t vpowq_f32(float32x4_t a, float32x4_t b) {
return a_exp_b;
}
-static void OverdriveAndSuppressNEON(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]) {
+static void OverdriveNEON(float overdrive_scaling,
+ float hNlFb,
+ float hNl[PART_LEN1]) {
int i;
const float32x4_t vec_hNlFb = vmovq_n_f32(hNlFb);
const float32x4_t vec_one = vdupq_n_f32(1.0f);
- const float32x4_t vec_minus_one = vdupq_n_f32(-1.0f);
- const float32x4_t vec_overDriveSm = vmovq_n_f32(aec->overDriveSm);
+ const float32x4_t vec_overdrive_scaling = vmovq_n_f32(overdrive_scaling);
// vectorized code (four at once)
for (i = 0; i + 3 < PART_LEN1; i += 4) {
@@ -406,28 +402,12 @@ static void OverdriveAndSuppressNEON(AecCore* aec,
vec_hNl = vreinterpretq_f32_u32(vorrq_u32(vec_if0, vec_if1));
- {
- const float32x4_t vec_overDriveCurve =
- vld1q_f32(&WebRtcAec_overDriveCurve[i]);
- const float32x4_t vec_overDriveSm_overDriveCurve =
- vmulq_f32(vec_overDriveSm, vec_overDriveCurve);
- vec_hNl = vpowq_f32(vec_hNl, vec_overDriveSm_overDriveCurve);
- vst1q_f32(&hNl[i], vec_hNl);
- }
-
- // Suppress error signal
- {
- float32x4_t vec_efw_re = vld1q_f32(&efw[0][i]);
- float32x4_t vec_efw_im = vld1q_f32(&efw[1][i]);
- vec_efw_re = vmulq_f32(vec_efw_re, vec_hNl);
- vec_efw_im = vmulq_f32(vec_efw_im, vec_hNl);
-
- // Ooura fft returns incorrect sign on imaginary component. It matters
- // here because we are making an additive change with comfort noise.
- vec_efw_im = vmulq_f32(vec_efw_im, vec_minus_one);
- vst1q_f32(&efw[0][i], vec_efw_re);
- vst1q_f32(&efw[1][i], vec_efw_im);
- }
+ const float32x4_t vec_overDriveCurve =
+ vld1q_f32(&WebRtcAec_overDriveCurve[i]);
+ const float32x4_t vec_overDriveSm_overDriveCurve =
+ vmulq_f32(vec_overdrive_scaling, vec_overDriveCurve);
+ vec_hNl = vpowq_f32(vec_hNl, vec_overDriveSm_overDriveCurve);
+ vst1q_f32(&hNl[i], vec_hNl);
}
// scalar code for the remaining items.
@@ -438,9 +418,30 @@ static void OverdriveAndSuppressNEON(AecCore* aec,
(1 - WebRtcAec_weightCurve[i]) * hNl[i];
}
- hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+ hNl[i] = powf(hNl[i], overdrive_scaling * WebRtcAec_overDriveCurve[i]);
+ }
+}
- // Suppress error signal
+static void SuppressNEON(const float hNl[PART_LEN1], float efw[2][PART_LEN1]) {
+ int i;
+ const float32x4_t vec_minus_one = vdupq_n_f32(-1.0f);
+ // vectorized code (four at once)
+ for (i = 0; i + 3 < PART_LEN1; i += 4) {
+ float32x4_t vec_hNl = vld1q_f32(&hNl[i]);
+ float32x4_t vec_efw_re = vld1q_f32(&efw[0][i]);
+ float32x4_t vec_efw_im = vld1q_f32(&efw[1][i]);
+ vec_efw_re = vmulq_f32(vec_efw_re, vec_hNl);
+ vec_efw_im = vmulq_f32(vec_efw_im, vec_hNl);
+
+ // Ooura fft returns incorrect sign on imaginary component. It matters
+ // here because we are making an additive change with comfort noise.
+ vec_efw_im = vmulq_f32(vec_efw_im, vec_minus_one);
+ vst1q_f32(&efw[0][i], vec_efw_re);
+ vst1q_f32(&efw[1][i], vec_efw_im);
+ }
+
+ // scalar code for the remaining items.
+ for (; i < PART_LEN1; i++) {
efw[0][i] *= hNl[i];
efw[1][i] *= hNl[i];
@@ -450,7 +451,9 @@ static void OverdriveAndSuppressNEON(AecCore* aec,
}
}
-static int PartitionDelayNEON(const AecCore* aec) {
+static int PartitionDelayNEON(
+ int num_partitions,
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]) {
// Measures the energy in each filter partition and returns the partition with
// highest energy.
// TODO(bjornv): Spread computational cost by computing one partition per
@@ -459,15 +462,15 @@ static int PartitionDelayNEON(const AecCore* aec) {
int i;
int delay = 0;
- for (i = 0; i < aec->num_partitions; i++) {
+ for (i = 0; i < num_partitions; i++) {
int j;
int pos = i * PART_LEN1;
float wfEn = 0;
float32x4_t vec_wfEn = vdupq_n_f32(0.0f);
// vectorized code (four at once)
for (j = 0; j + 3 < PART_LEN1; j += 4) {
- const float32x4_t vec_wfBuf0 = vld1q_f32(&aec->wfBuf[0][pos + j]);
- const float32x4_t vec_wfBuf1 = vld1q_f32(&aec->wfBuf[1][pos + j]);
+ const float32x4_t vec_wfBuf0 = vld1q_f32(&h_fft_buf[0][pos + j]);
+ const float32x4_t vec_wfBuf1 = vld1q_f32(&h_fft_buf[1][pos + j]);
vec_wfEn = vmlaq_f32(vec_wfEn, vec_wfBuf0, vec_wfBuf0);
vec_wfEn = vmlaq_f32(vec_wfEn, vec_wfBuf1, vec_wfBuf1);
}
@@ -483,8 +486,8 @@ static int PartitionDelayNEON(const AecCore* aec) {
// scalar code for the remaining items.
for (; j < PART_LEN1; j++) {
- wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] +
- aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j];
+ wfEn += h_fft_buf[0][pos + j] * h_fft_buf[0][pos + j] +
+ h_fft_buf[1][pos + j] * h_fft_buf[1][pos + j];
}
if (wfEn > wfEnMax) {
@@ -504,16 +507,19 @@ static int PartitionDelayNEON(const AecCore* aec) {
//
// In addition to updating the PSDs, also the filter diverge state is determined
// upon actions are taken.
-static void SmoothedPSD(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- int* extreme_filter_divergence) {
+static void UpdateCoherenceSpectraNEON(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh =
- aec->extended_filter_enabled
- ? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
- : WebRtcAec_kNormalSmoothingCoefficients[aec->mult - 1];
+ extended_filter_enabled
+ ? WebRtcAec_kExtendedSmoothingCoefficients[mult - 1]
+ : WebRtcAec_kNormalSmoothingCoefficients[mult - 1];
int i;
float sdSum = 0, seSum = 0;
const float32x4_t vec_15 = vdupq_n_f32(WebRtcAec_kMinFarendPSD);
@@ -527,9 +533,12 @@ static void SmoothedPSD(AecCore* aec,
const float32x4_t vec_efw1 = vld1q_f32(&efw[1][i]);
const float32x4_t vec_xfw0 = vld1q_f32(&xfw[0][i]);
const float32x4_t vec_xfw1 = vld1q_f32(&xfw[1][i]);
- float32x4_t vec_sd = vmulq_n_f32(vld1q_f32(&aec->sd[i]), ptrGCoh[0]);
- float32x4_t vec_se = vmulq_n_f32(vld1q_f32(&aec->se[i]), ptrGCoh[0]);
- float32x4_t vec_sx = vmulq_n_f32(vld1q_f32(&aec->sx[i]), ptrGCoh[0]);
+ float32x4_t vec_sd =
+ vmulq_n_f32(vld1q_f32(&coherence_state->sd[i]), ptrGCoh[0]);
+ float32x4_t vec_se =
+ vmulq_n_f32(vld1q_f32(&coherence_state->se[i]), ptrGCoh[0]);
+ float32x4_t vec_sx =
+ vmulq_n_f32(vld1q_f32(&coherence_state->sx[i]), ptrGCoh[0]);
float32x4_t vec_dfw_sumsq = vmulq_f32(vec_dfw0, vec_dfw0);
float32x4_t vec_efw_sumsq = vmulq_f32(vec_efw0, vec_efw0);
float32x4_t vec_xfw_sumsq = vmulq_f32(vec_xfw0, vec_xfw0);
@@ -542,12 +551,12 @@ static void SmoothedPSD(AecCore* aec,
vec_se = vmlaq_n_f32(vec_se, vec_efw_sumsq, ptrGCoh[1]);
vec_sx = vmlaq_n_f32(vec_sx, vec_xfw_sumsq, ptrGCoh[1]);
- vst1q_f32(&aec->sd[i], vec_sd);
- vst1q_f32(&aec->se[i], vec_se);
- vst1q_f32(&aec->sx[i], vec_sx);
+ vst1q_f32(&coherence_state->sd[i], vec_sd);
+ vst1q_f32(&coherence_state->se[i], vec_se);
+ vst1q_f32(&coherence_state->sx[i], vec_sx);
{
- float32x4x2_t vec_sde = vld2q_f32(&aec->sde[i][0]);
+ float32x4x2_t vec_sde = vld2q_f32(&coherence_state->sde[i][0]);
float32x4_t vec_dfwefw0011 = vmulq_f32(vec_dfw0, vec_efw0);
float32x4_t vec_dfwefw0110 = vmulq_f32(vec_dfw0, vec_efw1);
vec_sde.val[0] = vmulq_n_f32(vec_sde.val[0], ptrGCoh[0]);
@@ -556,11 +565,11 @@ static void SmoothedPSD(AecCore* aec,
vec_dfwefw0110 = vmlsq_f32(vec_dfwefw0110, vec_dfw1, vec_efw0);
vec_sde.val[0] = vmlaq_n_f32(vec_sde.val[0], vec_dfwefw0011, ptrGCoh[1]);
vec_sde.val[1] = vmlaq_n_f32(vec_sde.val[1], vec_dfwefw0110, ptrGCoh[1]);
- vst2q_f32(&aec->sde[i][0], vec_sde);
+ vst2q_f32(&coherence_state->sde[i][0], vec_sde);
}
{
- float32x4x2_t vec_sxd = vld2q_f32(&aec->sxd[i][0]);
+ float32x4x2_t vec_sxd = vld2q_f32(&coherence_state->sxd[i][0]);
float32x4_t vec_dfwxfw0011 = vmulq_f32(vec_dfw0, vec_xfw0);
float32x4_t vec_dfwxfw0110 = vmulq_f32(vec_dfw0, vec_xfw1);
vec_sxd.val[0] = vmulq_n_f32(vec_sxd.val[0], ptrGCoh[0]);
@@ -569,7 +578,7 @@ static void SmoothedPSD(AecCore* aec,
vec_dfwxfw0110 = vmlsq_f32(vec_dfwxfw0110, vec_dfw1, vec_xfw0);
vec_sxd.val[0] = vmlaq_n_f32(vec_sxd.val[0], vec_dfwxfw0011, ptrGCoh[1]);
vec_sxd.val[1] = vmlaq_n_f32(vec_sxd.val[1], vec_dfwxfw0110, ptrGCoh[1]);
- vst2q_f32(&aec->sxd[i][0], vec_sxd);
+ vst2q_f32(&coherence_state->sxd[i][0], vec_sxd);
}
vec_sdSum = vaddq_f32(vec_sdSum, vec_sd);
@@ -593,39 +602,43 @@ static void SmoothedPSD(AecCore* aec,
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
- aec->sd[i] = ptrGCoh[0] * aec->sd[i] +
- ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
- aec->se[i] = ptrGCoh[0] * aec->se[i] +
- ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
+ coherence_state->sd[i] =
+ ptrGCoh[0] * coherence_state->sd[i] +
+ ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
+ coherence_state->se[i] =
+ ptrGCoh[0] * coherence_state->se[i] +
+ ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
// We threshold here to protect against the ill-effects of a zero farend.
// The threshold is not arbitrarily chosen, but balances protection and
// adverse interaction with the algorithm's tuning.
// TODO(bjornv): investigate further why this is so sensitive.
- aec->sx[i] = ptrGCoh[0] * aec->sx[i] +
- ptrGCoh[1] * WEBRTC_SPL_MAX(
- xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
- WebRtcAec_kMinFarendPSD);
-
- aec->sde[i][0] =
- ptrGCoh[0] * aec->sde[i][0] +
+ coherence_state->sx[i] =
+ ptrGCoh[0] * coherence_state->sx[i] +
+ ptrGCoh[1] *
+ WEBRTC_SPL_MAX(xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
+ WebRtcAec_kMinFarendPSD);
+
+ coherence_state->sde[i][0] =
+ ptrGCoh[0] * coherence_state->sde[i][0] +
ptrGCoh[1] * (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]);
- aec->sde[i][1] =
- ptrGCoh[0] * aec->sde[i][1] +
+ coherence_state->sde[i][1] =
+ ptrGCoh[0] * coherence_state->sde[i][1] +
ptrGCoh[1] * (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]);
- aec->sxd[i][0] =
- ptrGCoh[0] * aec->sxd[i][0] +
+ coherence_state->sxd[i][0] =
+ ptrGCoh[0] * coherence_state->sxd[i][0] +
ptrGCoh[1] * (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]);
- aec->sxd[i][1] =
- ptrGCoh[0] * aec->sxd[i][1] +
+ coherence_state->sxd[i][1] =
+ ptrGCoh[0] * coherence_state->sxd[i][1] +
ptrGCoh[1] * (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]);
- sdSum += aec->sd[i];
- seSum += aec->se[i];
+ sdSum += coherence_state->sd[i];
+ seSum += coherence_state->se[i];
}
// Divergent filter safeguard update.
- aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
+ *filter_divergence_state =
+ (*filter_divergence_state ? 1.05f : 1.0f) * seSum > sdSum;
// Signal extreme filter divergence if the error is significantly larger
// than the nearend (13 dB).
@@ -669,30 +682,23 @@ static void StoreAsComplexNEON(const float* data,
data_complex[0][PART_LEN] = data[1];
}
-static void SubbandCoherenceNEON(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- float* fft,
+static void ComputeCoherenceNEON(const CoherenceState* coherence_state,
float* cohde,
- float* cohxd,
- int* extreme_filter_divergence) {
+ float* cohxd) {
int i;
- SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
-
{
const float32x4_t vec_1eminus10 = vdupq_n_f32(1e-10f);
// Subband coherence
for (i = 0; i + 3 < PART_LEN1; i += 4) {
- const float32x4_t vec_sd = vld1q_f32(&aec->sd[i]);
- const float32x4_t vec_se = vld1q_f32(&aec->se[i]);
- const float32x4_t vec_sx = vld1q_f32(&aec->sx[i]);
+ const float32x4_t vec_sd = vld1q_f32(&coherence_state->sd[i]);
+ const float32x4_t vec_se = vld1q_f32(&coherence_state->se[i]);
+ const float32x4_t vec_sx = vld1q_f32(&coherence_state->sx[i]);
const float32x4_t vec_sdse = vmlaq_f32(vec_1eminus10, vec_sd, vec_se);
const float32x4_t vec_sdsx = vmlaq_f32(vec_1eminus10, vec_sd, vec_sx);
- float32x4x2_t vec_sde = vld2q_f32(&aec->sde[i][0]);
- float32x4x2_t vec_sxd = vld2q_f32(&aec->sxd[i][0]);
+ float32x4x2_t vec_sde = vld2q_f32(&coherence_state->sde[i][0]);
+ float32x4x2_t vec_sxd = vld2q_f32(&coherence_state->sxd[i][0]);
float32x4_t vec_cohde = vmulq_f32(vec_sde.val[0], vec_sde.val[0]);
float32x4_t vec_cohxd = vmulq_f32(vec_sxd.val[0], vec_sxd.val[0]);
vec_cohde = vmlaq_f32(vec_cohde, vec_sde.val[1], vec_sde.val[1]);
@@ -706,12 +712,12 @@ static void SubbandCoherenceNEON(AecCore* aec,
}
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
- cohde[i] =
- (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) /
- (aec->sd[i] * aec->se[i] + 1e-10f);
- cohxd[i] =
- (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) /
- (aec->sx[i] * aec->sd[i] + 1e-10f);
+ cohde[i] = (coherence_state->sde[i][0] * coherence_state->sde[i][0] +
+ coherence_state->sde[i][1] * coherence_state->sde[i][1]) /
+ (coherence_state->sd[i] * coherence_state->se[i] + 1e-10f);
+ cohxd[i] = (coherence_state->sxd[i][0] * coherence_state->sxd[i][0] +
+ coherence_state->sxd[i][1] * coherence_state->sxd[i][1]) /
+ (coherence_state->sx[i] * coherence_state->sd[i] + 1e-10f);
}
}
@@ -719,8 +725,10 @@ void WebRtcAec_InitAec_neon(void) {
WebRtcAec_FilterFar = FilterFarNEON;
WebRtcAec_ScaleErrorSignal = ScaleErrorSignalNEON;
WebRtcAec_FilterAdaptation = FilterAdaptationNEON;
- WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressNEON;
- WebRtcAec_SubbandCoherence = SubbandCoherenceNEON;
+ WebRtcAec_Overdrive = OverdriveNEON;
+ WebRtcAec_Suppress = SuppressNEON;
+ WebRtcAec_ComputeCoherence = ComputeCoherenceNEON;
+ WebRtcAec_UpdateCoherenceSpectra = UpdateCoherenceSpectraNEON;
WebRtcAec_StoreAsComplex = StoreAsComplexNEON;
WebRtcAec_PartitionDelay = PartitionDelayNEON;
WebRtcAec_WindowData = WindowDataNEON;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h
new file mode 100644
index 00000000000..d1fb6e892a7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_OPTIMIZED_METHODS_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_OPTIMIZED_METHODS_H_
+
+#include <memory>
+
+#include "webrtc/modules/audio_processing/aec/aec_core.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+typedef void (*WebRtcAecFilterFar)(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float y_fft[2][PART_LEN1]);
+extern WebRtcAecFilterFar WebRtcAec_FilterFar;
+typedef void (*WebRtcAecScaleErrorSignal)(float mu,
+ float error_threshold,
+ float x_pow[PART_LEN1],
+ float ef[2][PART_LEN1]);
+extern WebRtcAecScaleErrorSignal WebRtcAec_ScaleErrorSignal;
+typedef void (*WebRtcAecFilterAdaptation)(
+ int num_partitions,
+ int x_fft_buf_block_pos,
+ float x_fft_buf[2][kExtendedNumPartitions * PART_LEN1],
+ float e_fft[2][PART_LEN1],
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
+extern WebRtcAecFilterAdaptation WebRtcAec_FilterAdaptation;
+
+typedef void (*WebRtcAecOverdrive)(float overdrive_scaling,
+ const float hNlFb,
+ float hNl[PART_LEN1]);
+extern WebRtcAecOverdrive WebRtcAec_Overdrive;
+
+typedef void (*WebRtcAecSuppress)(const float hNl[PART_LEN1],
+ float efw[2][PART_LEN1]);
+extern WebRtcAecSuppress WebRtcAec_Suppress;
+
+typedef void (*WebRtcAecComputeCoherence)(const CoherenceState* coherence_state,
+ float* cohde,
+ float* cohxd);
+extern WebRtcAecComputeCoherence WebRtcAec_ComputeCoherence;
+
+typedef void (*WebRtcAecUpdateCoherenceSpectra)(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence);
+extern WebRtcAecUpdateCoherenceSpectra WebRtcAec_UpdateCoherenceSpectra;
+
+typedef int (*WebRtcAecPartitionDelay)(
+ int num_partitions,
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]);
+extern WebRtcAecPartitionDelay WebRtcAec_PartitionDelay;
+
+typedef void (*WebRtcAecStoreAsComplex)(const float* data,
+ float data_complex[2][PART_LEN1]);
+extern WebRtcAecStoreAsComplex WebRtcAec_StoreAsComplex;
+
+typedef void (*WebRtcAecWindowData)(float* x_windowed, const float* x);
+extern WebRtcAecWindowData WebRtcAec_WindowData;
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_OPTIMIZED_METHODS_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc
index c1a6e3de75b..47ba12f419e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_sse2.cc
@@ -20,10 +20,8 @@ extern "C" {
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
}
#include "webrtc/modules/audio_processing/aec/aec_common.h"
-#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
-extern "C" {
+#include "webrtc/modules/audio_processing/aec/aec_core_optimized_methods.h"
#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
-}
namespace webrtc {
@@ -377,15 +375,13 @@ static __m128 mm_pow_ps(__m128 a, __m128 b) {
return a_exp_b;
}
-static void OverdriveAndSuppressSSE2(AecCore* aec,
- float hNl[PART_LEN1],
- const float hNlFb,
- float efw[2][PART_LEN1]) {
+static void OverdriveSSE2(float overdrive_scaling,
+ float hNlFb,
+ float hNl[PART_LEN1]) {
int i;
const __m128 vec_hNlFb = _mm_set1_ps(hNlFb);
const __m128 vec_one = _mm_set1_ps(1.0f);
- const __m128 vec_minus_one = _mm_set1_ps(-1.0f);
- const __m128 vec_overDriveSm = _mm_set1_ps(aec->overDriveSm);
+ const __m128 vec_overdrive_scaling = _mm_set1_ps(overdrive_scaling);
// vectorized code (four at once)
for (i = 0; i + 3 < PART_LEN1; i += 4) {
// Weight subbands
@@ -401,28 +397,12 @@ static void OverdriveAndSuppressSSE2(AecCore* aec,
bigger, _mm_add_ps(vec_weightCurve_hNlFb, vec_one_weightCurve_hNl));
vec_hNl = _mm_or_ps(vec_if0, vec_if1);
- {
- const __m128 vec_overDriveCurve =
- _mm_loadu_ps(&WebRtcAec_overDriveCurve[i]);
- const __m128 vec_overDriveSm_overDriveCurve =
- _mm_mul_ps(vec_overDriveSm, vec_overDriveCurve);
- vec_hNl = mm_pow_ps(vec_hNl, vec_overDriveSm_overDriveCurve);
- _mm_storeu_ps(&hNl[i], vec_hNl);
- }
-
- // Suppress error signal
- {
- __m128 vec_efw_re = _mm_loadu_ps(&efw[0][i]);
- __m128 vec_efw_im = _mm_loadu_ps(&efw[1][i]);
- vec_efw_re = _mm_mul_ps(vec_efw_re, vec_hNl);
- vec_efw_im = _mm_mul_ps(vec_efw_im, vec_hNl);
-
- // Ooura fft returns incorrect sign on imaginary component. It matters
- // here because we are making an additive change with comfort noise.
- vec_efw_im = _mm_mul_ps(vec_efw_im, vec_minus_one);
- _mm_storeu_ps(&efw[0][i], vec_efw_re);
- _mm_storeu_ps(&efw[1][i], vec_efw_im);
- }
+ const __m128 vec_overDriveCurve =
+ _mm_loadu_ps(&WebRtcAec_overDriveCurve[i]);
+ const __m128 vec_overDriveSm_overDriveCurve =
+ _mm_mul_ps(vec_overdrive_scaling, vec_overDriveCurve);
+ vec_hNl = mm_pow_ps(vec_hNl, vec_overDriveSm_overDriveCurve);
+ _mm_storeu_ps(&hNl[i], vec_hNl);
}
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
@@ -431,8 +411,30 @@ static void OverdriveAndSuppressSSE2(AecCore* aec,
hNl[i] = WebRtcAec_weightCurve[i] * hNlFb +
(1 - WebRtcAec_weightCurve[i]) * hNl[i];
}
- hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+ hNl[i] = powf(hNl[i], overdrive_scaling * WebRtcAec_overDriveCurve[i]);
+ }
+}
+
+static void SuppressSSE2(const float hNl[PART_LEN1], float efw[2][PART_LEN1]) {
+ int i;
+ const __m128 vec_minus_one = _mm_set1_ps(-1.0f);
+ // vectorized code (four at once)
+ for (i = 0; i + 3 < PART_LEN1; i += 4) {
+ // Suppress error signal
+ __m128 vec_hNl = _mm_loadu_ps(&hNl[i]);
+ __m128 vec_efw_re = _mm_loadu_ps(&efw[0][i]);
+ __m128 vec_efw_im = _mm_loadu_ps(&efw[1][i]);
+ vec_efw_re = _mm_mul_ps(vec_efw_re, vec_hNl);
+ vec_efw_im = _mm_mul_ps(vec_efw_im, vec_hNl);
+ // Ooura fft returns incorrect sign on imaginary component. It matters
+ // here because we are making an additive change with comfort noise.
+ vec_efw_im = _mm_mul_ps(vec_efw_im, vec_minus_one);
+ _mm_storeu_ps(&efw[0][i], vec_efw_re);
+ _mm_storeu_ps(&efw[1][i], vec_efw_im);
+ }
+ // scalar code for the remaining items.
+ for (; i < PART_LEN1; i++) {
// Suppress error signal
efw[0][i] *= hNl[i];
efw[1][i] *= hNl[i];
@@ -451,7 +453,9 @@ __inline static void _mm_add_ps_4x1(__m128 sum, float* dst) {
_mm_store_ss(dst, sum);
}
-static int PartitionDelaySSE2(const AecCore* aec) {
+static int PartitionDelaySSE2(
+ int num_partitions,
+ float h_fft_buf[2][kExtendedNumPartitions * PART_LEN1]) {
// Measures the energy in each filter partition and returns the partition with
// highest energy.
// TODO(bjornv): Spread computational cost by computing one partition per
@@ -460,15 +464,15 @@ static int PartitionDelaySSE2(const AecCore* aec) {
int i;
int delay = 0;
- for (i = 0; i < aec->num_partitions; i++) {
+ for (i = 0; i < num_partitions; i++) {
int j;
int pos = i * PART_LEN1;
float wfEn = 0;
__m128 vec_wfEn = _mm_set1_ps(0.0f);
// vectorized code (four at once)
for (j = 0; j + 3 < PART_LEN1; j += 4) {
- const __m128 vec_wfBuf0 = _mm_loadu_ps(&aec->wfBuf[0][pos + j]);
- const __m128 vec_wfBuf1 = _mm_loadu_ps(&aec->wfBuf[1][pos + j]);
+ const __m128 vec_wfBuf0 = _mm_loadu_ps(&h_fft_buf[0][pos + j]);
+ const __m128 vec_wfBuf1 = _mm_loadu_ps(&h_fft_buf[1][pos + j]);
vec_wfEn = _mm_add_ps(vec_wfEn, _mm_mul_ps(vec_wfBuf0, vec_wfBuf0));
vec_wfEn = _mm_add_ps(vec_wfEn, _mm_mul_ps(vec_wfBuf1, vec_wfBuf1));
}
@@ -476,8 +480,8 @@ static int PartitionDelaySSE2(const AecCore* aec) {
// scalar code for the remaining items.
for (; j < PART_LEN1; j++) {
- wfEn += aec->wfBuf[0][pos + j] * aec->wfBuf[0][pos + j] +
- aec->wfBuf[1][pos + j] * aec->wfBuf[1][pos + j];
+ wfEn += h_fft_buf[0][pos + j] * h_fft_buf[0][pos + j] +
+ h_fft_buf[1][pos + j] * h_fft_buf[1][pos + j];
}
if (wfEn > wfEnMax) {
@@ -497,16 +501,19 @@ static int PartitionDelaySSE2(const AecCore* aec) {
//
// In addition to updating the PSDs, also the filter diverge state is determined
// upon actions are taken.
-static void SmoothedPSD(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- int* extreme_filter_divergence) {
+static void UpdateCoherenceSpectraSSE2(int mult,
+ bool extended_filter_enabled,
+ float efw[2][PART_LEN1],
+ float dfw[2][PART_LEN1],
+ float xfw[2][PART_LEN1],
+ CoherenceState* coherence_state,
+ short* filter_divergence_state,
+ int* extreme_filter_divergence) {
// Power estimate smoothing coefficients.
const float* ptrGCoh =
- aec->extended_filter_enabled
- ? WebRtcAec_kExtendedSmoothingCoefficients[aec->mult - 1]
- : WebRtcAec_kNormalSmoothingCoefficients[aec->mult - 1];
+ extended_filter_enabled
+ ? WebRtcAec_kExtendedSmoothingCoefficients[mult - 1]
+ : WebRtcAec_kNormalSmoothingCoefficients[mult - 1];
int i;
float sdSum = 0, seSum = 0;
const __m128 vec_15 = _mm_set1_ps(WebRtcAec_kMinFarendPSD);
@@ -522,9 +529,12 @@ static void SmoothedPSD(AecCore* aec,
const __m128 vec_efw1 = _mm_loadu_ps(&efw[1][i]);
const __m128 vec_xfw0 = _mm_loadu_ps(&xfw[0][i]);
const __m128 vec_xfw1 = _mm_loadu_ps(&xfw[1][i]);
- __m128 vec_sd = _mm_mul_ps(_mm_loadu_ps(&aec->sd[i]), vec_GCoh0);
- __m128 vec_se = _mm_mul_ps(_mm_loadu_ps(&aec->se[i]), vec_GCoh0);
- __m128 vec_sx = _mm_mul_ps(_mm_loadu_ps(&aec->sx[i]), vec_GCoh0);
+ __m128 vec_sd =
+ _mm_mul_ps(_mm_loadu_ps(&coherence_state->sd[i]), vec_GCoh0);
+ __m128 vec_se =
+ _mm_mul_ps(_mm_loadu_ps(&coherence_state->se[i]), vec_GCoh0);
+ __m128 vec_sx =
+ _mm_mul_ps(_mm_loadu_ps(&coherence_state->sx[i]), vec_GCoh0);
__m128 vec_dfw_sumsq = _mm_mul_ps(vec_dfw0, vec_dfw0);
__m128 vec_efw_sumsq = _mm_mul_ps(vec_efw0, vec_efw0);
__m128 vec_xfw_sumsq = _mm_mul_ps(vec_xfw0, vec_xfw0);
@@ -535,13 +545,13 @@ static void SmoothedPSD(AecCore* aec,
vec_sd = _mm_add_ps(vec_sd, _mm_mul_ps(vec_dfw_sumsq, vec_GCoh1));
vec_se = _mm_add_ps(vec_se, _mm_mul_ps(vec_efw_sumsq, vec_GCoh1));
vec_sx = _mm_add_ps(vec_sx, _mm_mul_ps(vec_xfw_sumsq, vec_GCoh1));
- _mm_storeu_ps(&aec->sd[i], vec_sd);
- _mm_storeu_ps(&aec->se[i], vec_se);
- _mm_storeu_ps(&aec->sx[i], vec_sx);
+ _mm_storeu_ps(&coherence_state->sd[i], vec_sd);
+ _mm_storeu_ps(&coherence_state->se[i], vec_se);
+ _mm_storeu_ps(&coherence_state->sx[i], vec_sx);
{
- const __m128 vec_3210 = _mm_loadu_ps(&aec->sde[i][0]);
- const __m128 vec_7654 = _mm_loadu_ps(&aec->sde[i + 2][0]);
+ const __m128 vec_3210 = _mm_loadu_ps(&coherence_state->sde[i][0]);
+ const __m128 vec_7654 = _mm_loadu_ps(&coherence_state->sde[i + 2][0]);
__m128 vec_a =
_mm_shuffle_ps(vec_3210, vec_7654, _MM_SHUFFLE(2, 0, 2, 0));
__m128 vec_b =
@@ -556,13 +566,14 @@ static void SmoothedPSD(AecCore* aec,
_mm_sub_ps(vec_dfwefw0110, _mm_mul_ps(vec_dfw1, vec_efw0));
vec_a = _mm_add_ps(vec_a, _mm_mul_ps(vec_dfwefw0011, vec_GCoh1));
vec_b = _mm_add_ps(vec_b, _mm_mul_ps(vec_dfwefw0110, vec_GCoh1));
- _mm_storeu_ps(&aec->sde[i][0], _mm_unpacklo_ps(vec_a, vec_b));
- _mm_storeu_ps(&aec->sde[i + 2][0], _mm_unpackhi_ps(vec_a, vec_b));
+ _mm_storeu_ps(&coherence_state->sde[i][0], _mm_unpacklo_ps(vec_a, vec_b));
+ _mm_storeu_ps(&coherence_state->sde[i + 2][0],
+ _mm_unpackhi_ps(vec_a, vec_b));
}
{
- const __m128 vec_3210 = _mm_loadu_ps(&aec->sxd[i][0]);
- const __m128 vec_7654 = _mm_loadu_ps(&aec->sxd[i + 2][0]);
+ const __m128 vec_3210 = _mm_loadu_ps(&coherence_state->sxd[i][0]);
+ const __m128 vec_7654 = _mm_loadu_ps(&coherence_state->sxd[i + 2][0]);
__m128 vec_a =
_mm_shuffle_ps(vec_3210, vec_7654, _MM_SHUFFLE(2, 0, 2, 0));
__m128 vec_b =
@@ -577,8 +588,9 @@ static void SmoothedPSD(AecCore* aec,
_mm_sub_ps(vec_dfwxfw0110, _mm_mul_ps(vec_dfw1, vec_xfw0));
vec_a = _mm_add_ps(vec_a, _mm_mul_ps(vec_dfwxfw0011, vec_GCoh1));
vec_b = _mm_add_ps(vec_b, _mm_mul_ps(vec_dfwxfw0110, vec_GCoh1));
- _mm_storeu_ps(&aec->sxd[i][0], _mm_unpacklo_ps(vec_a, vec_b));
- _mm_storeu_ps(&aec->sxd[i + 2][0], _mm_unpackhi_ps(vec_a, vec_b));
+ _mm_storeu_ps(&coherence_state->sxd[i][0], _mm_unpacklo_ps(vec_a, vec_b));
+ _mm_storeu_ps(&coherence_state->sxd[i + 2][0],
+ _mm_unpackhi_ps(vec_a, vec_b));
}
vec_sdSum = _mm_add_ps(vec_sdSum, vec_sd);
@@ -589,39 +601,43 @@ static void SmoothedPSD(AecCore* aec,
_mm_add_ps_4x1(vec_seSum, &seSum);
for (; i < PART_LEN1; i++) {
- aec->sd[i] = ptrGCoh[0] * aec->sd[i] +
- ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
- aec->se[i] = ptrGCoh[0] * aec->se[i] +
- ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
+ coherence_state->sd[i] =
+ ptrGCoh[0] * coherence_state->sd[i] +
+ ptrGCoh[1] * (dfw[0][i] * dfw[0][i] + dfw[1][i] * dfw[1][i]);
+ coherence_state->se[i] =
+ ptrGCoh[0] * coherence_state->se[i] +
+ ptrGCoh[1] * (efw[0][i] * efw[0][i] + efw[1][i] * efw[1][i]);
// We threshold here to protect against the ill-effects of a zero farend.
// The threshold is not arbitrarily chosen, but balances protection and
// adverse interaction with the algorithm's tuning.
// TODO(bjornv): investigate further why this is so sensitive.
- aec->sx[i] = ptrGCoh[0] * aec->sx[i] +
- ptrGCoh[1] * WEBRTC_SPL_MAX(
- xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
- WebRtcAec_kMinFarendPSD);
-
- aec->sde[i][0] =
- ptrGCoh[0] * aec->sde[i][0] +
+ coherence_state->sx[i] =
+ ptrGCoh[0] * coherence_state->sx[i] +
+ ptrGCoh[1] *
+ WEBRTC_SPL_MAX(xfw[0][i] * xfw[0][i] + xfw[1][i] * xfw[1][i],
+ WebRtcAec_kMinFarendPSD);
+
+ coherence_state->sde[i][0] =
+ ptrGCoh[0] * coherence_state->sde[i][0] +
ptrGCoh[1] * (dfw[0][i] * efw[0][i] + dfw[1][i] * efw[1][i]);
- aec->sde[i][1] =
- ptrGCoh[0] * aec->sde[i][1] +
+ coherence_state->sde[i][1] =
+ ptrGCoh[0] * coherence_state->sde[i][1] +
ptrGCoh[1] * (dfw[0][i] * efw[1][i] - dfw[1][i] * efw[0][i]);
- aec->sxd[i][0] =
- ptrGCoh[0] * aec->sxd[i][0] +
+ coherence_state->sxd[i][0] =
+ ptrGCoh[0] * coherence_state->sxd[i][0] +
ptrGCoh[1] * (dfw[0][i] * xfw[0][i] + dfw[1][i] * xfw[1][i]);
- aec->sxd[i][1] =
- ptrGCoh[0] * aec->sxd[i][1] +
+ coherence_state->sxd[i][1] =
+ ptrGCoh[0] * coherence_state->sxd[i][1] +
ptrGCoh[1] * (dfw[0][i] * xfw[1][i] - dfw[1][i] * xfw[0][i]);
- sdSum += aec->sd[i];
- seSum += aec->se[i];
+ sdSum += coherence_state->sd[i];
+ seSum += coherence_state->se[i];
}
// Divergent filter safeguard update.
- aec->divergeState = (aec->divergeState ? 1.05f : 1.0f) * seSum > sdSum;
+ *filter_divergence_state =
+ (*filter_divergence_state ? 1.05f : 1.0f) * seSum > sdSum;
// Signal extreme filter divergence if the error is significantly larger
// than the nearend (13 dB).
@@ -668,34 +684,27 @@ static void StoreAsComplexSSE2(const float* data,
data_complex[0][PART_LEN] = data[1];
}
-static void SubbandCoherenceSSE2(AecCore* aec,
- float efw[2][PART_LEN1],
- float dfw[2][PART_LEN1],
- float xfw[2][PART_LEN1],
- float* fft,
+static void ComputeCoherenceSSE2(const CoherenceState* coherence_state,
float* cohde,
- float* cohxd,
- int* extreme_filter_divergence) {
+ float* cohxd) {
int i;
- SmoothedPSD(aec, efw, dfw, xfw, extreme_filter_divergence);
-
{
const __m128 vec_1eminus10 = _mm_set1_ps(1e-10f);
// Subband coherence
for (i = 0; i + 3 < PART_LEN1; i += 4) {
- const __m128 vec_sd = _mm_loadu_ps(&aec->sd[i]);
- const __m128 vec_se = _mm_loadu_ps(&aec->se[i]);
- const __m128 vec_sx = _mm_loadu_ps(&aec->sx[i]);
+ const __m128 vec_sd = _mm_loadu_ps(&coherence_state->sd[i]);
+ const __m128 vec_se = _mm_loadu_ps(&coherence_state->se[i]);
+ const __m128 vec_sx = _mm_loadu_ps(&coherence_state->sx[i]);
const __m128 vec_sdse =
_mm_add_ps(vec_1eminus10, _mm_mul_ps(vec_sd, vec_se));
const __m128 vec_sdsx =
_mm_add_ps(vec_1eminus10, _mm_mul_ps(vec_sd, vec_sx));
- const __m128 vec_sde_3210 = _mm_loadu_ps(&aec->sde[i][0]);
- const __m128 vec_sde_7654 = _mm_loadu_ps(&aec->sde[i + 2][0]);
- const __m128 vec_sxd_3210 = _mm_loadu_ps(&aec->sxd[i][0]);
- const __m128 vec_sxd_7654 = _mm_loadu_ps(&aec->sxd[i + 2][0]);
+ const __m128 vec_sde_3210 = _mm_loadu_ps(&coherence_state->sde[i][0]);
+ const __m128 vec_sde_7654 = _mm_loadu_ps(&coherence_state->sde[i + 2][0]);
+ const __m128 vec_sxd_3210 = _mm_loadu_ps(&coherence_state->sxd[i][0]);
+ const __m128 vec_sxd_7654 = _mm_loadu_ps(&coherence_state->sxd[i + 2][0]);
const __m128 vec_sde_0 =
_mm_shuffle_ps(vec_sde_3210, vec_sde_7654, _MM_SHUFFLE(2, 0, 2, 0));
const __m128 vec_sde_1 =
@@ -716,12 +725,12 @@ static void SubbandCoherenceSSE2(AecCore* aec,
// scalar code for the remaining items.
for (; i < PART_LEN1; i++) {
- cohde[i] =
- (aec->sde[i][0] * aec->sde[i][0] + aec->sde[i][1] * aec->sde[i][1]) /
- (aec->sd[i] * aec->se[i] + 1e-10f);
- cohxd[i] =
- (aec->sxd[i][0] * aec->sxd[i][0] + aec->sxd[i][1] * aec->sxd[i][1]) /
- (aec->sx[i] * aec->sd[i] + 1e-10f);
+ cohde[i] = (coherence_state->sde[i][0] * coherence_state->sde[i][0] +
+ coherence_state->sde[i][1] * coherence_state->sde[i][1]) /
+ (coherence_state->sd[i] * coherence_state->se[i] + 1e-10f);
+ cohxd[i] = (coherence_state->sxd[i][0] * coherence_state->sxd[i][0] +
+ coherence_state->sxd[i][1] * coherence_state->sxd[i][1]) /
+ (coherence_state->sx[i] * coherence_state->sd[i] + 1e-10f);
}
}
}
@@ -730,8 +739,10 @@ void WebRtcAec_InitAec_SSE2(void) {
WebRtcAec_FilterFar = FilterFarSSE2;
WebRtcAec_ScaleErrorSignal = ScaleErrorSignalSSE2;
WebRtcAec_FilterAdaptation = FilterAdaptationSSE2;
- WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressSSE2;
- WebRtcAec_SubbandCoherence = SubbandCoherenceSSE2;
+ WebRtcAec_Overdrive = OverdriveSSE2;
+ WebRtcAec_Suppress = SuppressSSE2;
+ WebRtcAec_ComputeCoherence = ComputeCoherenceSSE2;
+ WebRtcAec_UpdateCoherenceSpectra = UpdateCoherenceSpectraSSE2;
WebRtcAec_StoreAsComplex = StoreAsComplexSSE2;
WebRtcAec_PartitionDelay = PartitionDelaySSE2;
WebRtcAec_WindowData = WindowDataSSE2;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.cc
index 03efc103ea6..690fe9f34f9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.cc
@@ -581,9 +581,5 @@ void aec_rdft_init(void) {
#endif
#if defined(WEBRTC_HAS_NEON)
aec_rdft_init_neon();
-#elif defined(WEBRTC_DETECT_NEON)
- if ((WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) != 0) {
- aec_rdft_init_neon();
- }
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
index 18eb7a5c3f3..d83eb27d83f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
@@ -54,7 +54,7 @@ void aec_rdft_inverse_128(float* a);
#if defined(MIPS_FPU_LE)
void aec_rdft_init_mips(void);
#endif
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void aec_rdft_init_neon(void);
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.cc
index 7e64e657167..7e64e657167 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.cc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.cc
index 43b6a68cd72..43b6a68cd72 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_neon.cc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.cc
index b4e453ff53e..b4e453ff53e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_sse2.cc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc
index f963a4e1d30..716da38ab88 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.cc
@@ -14,9 +14,6 @@
#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include <math.h>
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-#include <stdio.h>
-#endif
#include <stdlib.h>
#include <string.h>
@@ -26,7 +23,7 @@ extern "C" {
}
#include "webrtc/modules/audio_processing/aec/aec_core.h"
#include "webrtc/modules/audio_processing/aec/aec_resampler.h"
-#include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h"
+#include "webrtc/modules/audio_processing/logging/apm_data_dumper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -99,9 +96,7 @@ static const int kMaxBufSizeStart = 62; // In partitions
static const int sampMsNb = 8; // samples per ms in nb
static const int initCheck = 42;
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-int webrtc_aec_instance_count = 0;
-#endif
+int Aec::instance_count = 0;
// Estimates delay to set the position of the far-end buffer read pointer
// (controlled by knownDelay)
@@ -123,13 +118,14 @@ static void ProcessExtended(Aec* self,
int32_t skew);
void* WebRtcAec_Create() {
- Aec* aecpc = reinterpret_cast<Aec*>(malloc(sizeof(Aec)));
+ Aec* aecpc = new Aec();
if (!aecpc) {
return NULL;
}
+ aecpc->data_dumper.reset(new ApmDataDumper(aecpc->instance_count));
- aecpc->aec = WebRtcAec_CreateAec();
+ aecpc->aec = WebRtcAec_CreateAec(aecpc->instance_count);
if (!aecpc->aec) {
WebRtcAec_Free(aecpc);
return NULL;
@@ -151,22 +147,7 @@ void* WebRtcAec_Create() {
aecpc->initFlag = 0;
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- {
- char filename[64];
- snprintf(filename, sizeof(filename), "aec_buf%d.dat",
- webrtc_aec_instance_count);
- aecpc->bufFile = fopen(filename, "wb");
- snprintf(filename, sizeof(filename), "aec_skew%d.dat",
- webrtc_aec_instance_count);
- aecpc->skewFile = fopen(filename, "wb");
- snprintf(filename, sizeof(filename), "aec_delay%d.dat",
- webrtc_aec_instance_count);
- aecpc->delayFile = fopen(filename, "wb");
- webrtc_aec_instance_count++;
- }
-#endif
-
+ aecpc->instance_count++;
return aecpc;
}
@@ -179,19 +160,14 @@ void WebRtcAec_Free(void* aecInst) {
WebRtc_FreeBuffer(aecpc->far_pre_buf);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- fclose(aecpc->bufFile);
- fclose(aecpc->skewFile);
- fclose(aecpc->delayFile);
-#endif
-
WebRtcAec_FreeAec(aecpc->aec);
WebRtcAec_FreeResampler(aecpc->resampler);
- free(aecpc);
+ delete aecpc;
}
int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
Aec* aecpc = reinterpret_cast<Aec*>(aecInst);
+ aecpc->data_dumper->InitiateNewSetOfRecordings();
AecConfig aecConfig;
if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000 &&
@@ -376,15 +352,9 @@ int32_t WebRtcAec_Process(void* aecInst,
msInSndCardBuf, skew);
}
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- {
- int16_t far_buf_size_ms = (int16_t)(WebRtcAec_system_delay(aecpc->aec) /
- (sampMsNb * aecpc->rate_factor));
- (void)fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile);
- (void)fwrite(&aecpc->knownDelay, sizeof(aecpc->knownDelay), 1,
- aecpc->delayFile);
- }
-#endif
+ int far_buf_size_samples = WebRtcAec_system_delay(aecpc->aec);
+ aecpc->data_dumper->DumpRaw("aec_system_delay", 1, &far_buf_size_samples);
+ aecpc->data_dumper->DumpRaw("aec_known_delay", 1, &aecpc->knownDelay);
return retVal;
}
@@ -603,9 +573,7 @@ static int ProcessNormal(Aec* aecpc,
aecpc->skew = maxSkewEst;
}
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- (void)fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile);
-#endif
+ aecpc->data_dumper->DumpRaw("aec_skew", 1, &aecpc->skew);
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h
index f4b1f20ab75..8e5e52c32bf 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.h
@@ -11,8 +11,14 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_H_
+#include <memory>
+
#include <stddef.h>
+extern "C" {
+#include "webrtc/common_audio/ring_buffer.h"
+}
+#include "webrtc/modules/audio_processing/aec/aec_core.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -56,6 +62,54 @@ typedef struct {
struct AecCore;
+class ApmDataDumper;
+
+typedef struct Aec {
+ std::unique_ptr<ApmDataDumper> data_dumper;
+
+ int delayCtr;
+ int sampFreq;
+ int splitSampFreq;
+ int scSampFreq;
+ float sampFactor; // scSampRate / sampFreq
+ short skewMode;
+ int bufSizeStart;
+ int knownDelay;
+ int rate_factor;
+
+ short initFlag; // indicates if AEC has been initialized
+
+ // Variables used for averaging far end buffer size
+ short counter;
+ int sum;
+ short firstVal;
+ short checkBufSizeCtr;
+
+ // Variables used for delay shifts
+ short msInSndCardBuf;
+ short filtDelay; // Filtered delay estimate.
+ int timeForDelayChange;
+ int startup_phase;
+ int checkBuffSize;
+ short lastDelayDiff;
+
+ // Structures
+ void* resampler;
+
+ int skewFrCtr;
+ int resample; // if the skew is small enough we don't resample
+ int highSkewCtr;
+ float skew;
+
+ RingBuffer* far_pre_buf; // Time domain far-end pre-buffer.
+
+ int farend_started;
+
+ // Aec instance counter.
+ static int instance_count;
+ AecCore* aec;
+} Aec;
+
/*
* Allocates the memory needed by the AEC. The memory needs to be initialized
* separately using the WebRtcAec_Init() function. Returns a pointer to the
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h
deleted file mode 100644
index b4a6fd8390e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation_internal.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
-
-extern "C" {
-#include "webrtc/common_audio/ring_buffer.h"
-}
-#include "webrtc/modules/audio_processing/aec/aec_core.h"
-
-namespace webrtc {
-
-typedef struct {
- int delayCtr;
- int sampFreq;
- int splitSampFreq;
- int scSampFreq;
- float sampFactor; // scSampRate / sampFreq
- short skewMode;
- int bufSizeStart;
- int knownDelay;
- int rate_factor;
-
- short initFlag; // indicates if AEC has been initialized
-
- // Variables used for averaging far end buffer size
- short counter;
- int sum;
- short firstVal;
- short checkBufSizeCtr;
-
- // Variables used for delay shifts
- short msInSndCardBuf;
- short filtDelay; // Filtered delay estimate.
- int timeForDelayChange;
- int startup_phase;
- int checkBuffSize;
- short lastDelayDiff;
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
- FILE* bufFile;
- FILE* delayFile;
- FILE* skewFile;
-#endif
-
- // Structures
- void* resampler;
-
- int skewFrCtr;
- int resample; // if the skew is small enough we don't resample
- int highSkewCtr;
- float skew;
-
- RingBuffer* far_pre_buf; // Time domain far-end pre-buffer.
-
- int farend_started;
-
- AecCore* aec;
-} Aec;
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
index be145898b59..51a4df2b782 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
@@ -10,7 +10,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/audio_processing/aec/aec_core.h"
-#include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h"
#include "webrtc/modules/audio_processing/aec/echo_cancellation.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.cc
index 6bf1cf7f3ef..a17220dbd76 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.cc
@@ -14,12 +14,16 @@
#include <stddef.h>
#include <stdlib.h>
+extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/real_fft.h"
+}
#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
-#include "webrtc/system_wrappers/include/compile_assert_c.h"
+extern "C" {
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
+}
+
#include "webrtc/typedefs.h"
#ifdef AEC_DEBUG
@@ -208,7 +212,7 @@ StoreAdaptiveChannel WebRtcAecm_StoreAdaptiveChannel;
ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
AecmCore* WebRtcAecm_CreateCore() {
- AecmCore* aecm = malloc(sizeof(AecmCore));
+ AecmCore* aecm = static_cast<AecmCore*>(malloc(sizeof(AecmCore)));
aecm->farFrameBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN,
sizeof(int16_t));
@@ -361,7 +365,7 @@ static void ResetAdaptiveChannelC(AecmCore* aecm) {
}
// Initialize function pointers for ARM Neon platform.
-#if (defined WEBRTC_DETECT_NEON || defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
static void WebRtcAecm_InitNeon(void)
{
WebRtcAecm_StoreAdaptiveChannel = WebRtcAecm_StoreAdaptiveChannelNeon;
@@ -501,20 +505,14 @@ int WebRtcAecm_InitCore(AecmCore* const aecm, int samplingFreq) {
// Assert a preprocessor definition at compile-time. It's an assumption
// used in assembly code, so check the assembly files before any change.
- COMPILE_ASSERT(PART_LEN % 16 == 0);
+ static_assert(PART_LEN % 16 == 0, "PART_LEN is not a multiple of 16");
// Initialize function pointers.
WebRtcAecm_CalcLinearEnergies = CalcLinearEnergiesC;
WebRtcAecm_StoreAdaptiveChannel = StoreAdaptiveChannelC;
WebRtcAecm_ResetAdaptiveChannel = ResetAdaptiveChannelC;
-#ifdef WEBRTC_DETECT_NEON
- uint64_t features = WebRtc_GetCPUFeaturesARM();
- if ((features & kCPUFeatureNEON) != 0)
- {
- WebRtcAecm_InitNeon();
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
WebRtcAecm_InitNeon();
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h
index b52bb62d2de..33d80889aa4 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.h
@@ -13,8 +13,10 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_AECM_CORE_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_AECM_CORE_H_
+extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+}
#include "webrtc/modules/audio_processing/aecm/aecm_defines.h"
#include "webrtc/typedefs.h"
@@ -400,7 +402,7 @@ extern ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
// For the above function pointers, functions for generic platforms are declared
// and defined as static in file aecm_core.c, while those for ARM Neon platforms
// are declared below and defined in file aecm_core_neon.c.
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.cc
index 3a8fafa4ece..57f859f550d 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.cc
@@ -14,19 +14,18 @@
#include <stddef.h>
#include <stdlib.h>
+extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/real_fft.h"
+}
#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
-#include "webrtc/system_wrappers/include/compile_assert_c.h"
+extern "C" {
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
+}
#include "webrtc/typedefs.h"
// Square root of Hanning window in Q14.
-#if defined(WEBRTC_DETECT_NEON) || defined(WEBRTC_HAS_NEON)
-// Table is defined in an ARM assembly file.
-extern const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END;
-#else
static const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
0, 399, 798, 1196, 1594, 1990, 2386, 2780, 3172,
3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224,
@@ -37,7 +36,6 @@ static const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
15231, 15373, 15506, 15631, 15746, 15851, 15947, 16034,
16111, 16179, 16237, 16286, 16325, 16354, 16373, 16384
};
-#endif
#ifdef AECM_WITH_ABS_APPROX
//Q15 alpha = 0.99439986968132 const Factor for magnitude approximation
@@ -768,4 +766,3 @@ static void ComfortNoise(AecmCore* aecm,
out[i].imag = WebRtcSpl_AddSatW16(out[i].imag, uImag[i]);
}
}
-
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.cc
index 3ca9982ebfa..e625a46ec5f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_mips.cc
@@ -1563,4 +1563,3 @@ static void ComfortNoise(AecmCore* aecm,
sgn = ((int)tt) >> 31;
out[PART_LEN].imag = sgn == (int16_t)(tt >> 15) ? (int16_t)tt : (16384 ^ sgn);
}
-
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc
index 1751fcf7ada..81c7667d981 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc
@@ -18,19 +18,6 @@
// TODO(kma): Re-write the corresponding assembly file, the offset
// generating script and makefile, to replace these C functions.
-// Square root of Hanning window in Q14.
-const ALIGN8_BEG int16_t WebRtcAecm_kSqrtHanning[] ALIGN8_END = {
- 0,
- 399, 798, 1196, 1594, 1990, 2386, 2780, 3172,
- 3562, 3951, 4337, 4720, 5101, 5478, 5853, 6224,
- 6591, 6954, 7313, 7668, 8019, 8364, 8705, 9040,
- 9370, 9695, 10013, 10326, 10633, 10933, 11227, 11514,
- 11795, 12068, 12335, 12594, 12845, 13089, 13325, 13553,
- 13773, 13985, 14189, 14384, 14571, 14749, 14918, 15079,
- 15231, 15373, 15506, 15631, 15746, 15851, 15947, 16034,
- 16111, 16179, 16237, 16286, 16325, 16354, 16373, 16384
-};
-
static inline void AddLanes(uint32_t* ptr, uint32x4_t v) {
#if defined(WEBRTC_ARCH_ARM64)
*(ptr) = vaddvq_u32(v);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc
index 91e6f0e80ce..a81466e678e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc
@@ -15,8 +15,10 @@
#endif
#include <stdlib.h>
+extern "C" {
#include "webrtc/common_audio/ring_buffer.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+}
#include "webrtc/modules/audio_processing/aecm/aecm_core.h"
#define BUF_SIZE_FRAMES 50 // buffer size (frames)
@@ -79,7 +81,7 @@ static int WebRtcAecm_EstBufDelay(AecMobile* aecmInst, short msInSndCardBuf);
static int WebRtcAecm_DelayComp(AecMobile* aecmInst);
void* WebRtcAecm_Create() {
- AecMobile* aecm = malloc(sizeof(AecMobile));
+ AecMobile* aecm = static_cast<AecMobile*>(malloc(sizeof(AecMobile)));
WebRtcSpl_Init();
@@ -114,7 +116,7 @@ void* WebRtcAecm_Create() {
}
void WebRtcAecm_Free(void* aecmInst) {
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
if (aecm == NULL) {
return;
@@ -138,7 +140,7 @@ void WebRtcAecm_Free(void* aecmInst) {
int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
{
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
AecmConfig aecConfig;
if (aecm == NULL)
@@ -196,7 +198,7 @@ int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
// farend signal.
int32_t WebRtcAecm_GetBufferFarendError(void *aecmInst, const int16_t *farend,
size_t nrOfSamples) {
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
if (aecm == NULL)
return -1;
@@ -216,7 +218,7 @@ int32_t WebRtcAecm_GetBufferFarendError(void *aecmInst, const int16_t *farend,
int32_t WebRtcAecm_BufferFarend(void *aecmInst, const int16_t *farend,
size_t nrOfSamples) {
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
const int32_t err =
WebRtcAecm_GetBufferFarendError(aecmInst, farend, nrOfSamples);
@@ -239,7 +241,7 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
const int16_t *nearendClean, int16_t *out,
size_t nrOfSamples, int16_t msInSndCardBuf)
{
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
int32_t retVal = 0;
size_t i;
short nmbrOfFilledBuffers;
@@ -435,7 +437,7 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
int32_t WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
{
- AecMobile* aecm = aecmInst;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
if (aecm == NULL)
{
@@ -516,8 +518,8 @@ int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
const void* echo_path,
size_t size_bytes)
{
- AecMobile* aecm = aecmInst;
- const int16_t* echo_path_ptr = echo_path;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
+ const int16_t* echo_path_ptr = static_cast<const int16_t*>(echo_path);
if (aecmInst == NULL) {
return -1;
@@ -544,8 +546,8 @@ int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
void* echo_path,
size_t size_bytes)
{
- AecMobile* aecm = aecmInst;
- int16_t* echo_path_ptr = echo_path;
+ AecMobile* aecm = static_cast<AecMobile*>(aecmInst);
+ int16_t* echo_path_ptr = static_cast<int16_t*>(echo_path);
if (aecmInst == NULL) {
return -1;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi b/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
index 264f3e5befb..e6eab36a01e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
@@ -9,6 +9,8 @@
{
'variables': {
'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/audio_processing/asm_offsets',
+ # Outputs some low-level debug files.
+ 'aec_debug_dump%': 0,
},
'targets': [
{
@@ -16,7 +18,6 @@
'type': 'static_library',
'variables': {
# Outputs some low-level debug files.
- 'aec_debug_dump%': 0,
'agc_debug_dump%': 0,
# Disables the usual mode where we trust the reported system delay
@@ -34,17 +35,16 @@
'sources': [
'aec/aec_core.cc',
'aec/aec_core.h',
- 'aec/aec_core_internal.h',
- 'aec/aec_rdft.c',
+ 'aec/aec_core_optimized_methods.h',
+ 'aec/aec_rdft.cc',
'aec/aec_rdft.h',
'aec/aec_resampler.cc',
'aec/aec_resampler.h',
'aec/echo_cancellation.cc',
- 'aec/echo_cancellation_internal.h',
'aec/echo_cancellation.h',
- 'aecm/aecm_core.c',
+ 'aecm/aecm_core.cc',
'aecm/aecm_core.h',
- 'aecm/echo_control_mobile.c',
+ 'aecm/echo_control_mobile.cc',
'aecm/echo_control_mobile.h',
'agc/agc.cc',
'agc/agc.h',
@@ -91,9 +91,8 @@
'intelligibility/intelligibility_utils.h',
'level_estimator_impl.cc',
'level_estimator_impl.h',
- 'logging/aec_logging.h',
- 'logging/aec_logging_file_handling.cc',
- 'logging/aec_logging_file_handling.h',
+ 'logging/apm_data_dumper.cc',
+ 'logging/apm_data_dumper.h',
'noise_suppression_impl.cc',
'noise_suppression_impl.h',
'render_queue_item_verifier.h',
@@ -120,10 +119,10 @@
'typing_detection.h',
'utility/block_mean_calculator.cc',
'utility/block_mean_calculator.h',
- 'utility/delay_estimator.c',
+ 'utility/delay_estimator.cc',
'utility/delay_estimator.h',
'utility/delay_estimator_internal.h',
- 'utility/delay_estimator_wrapper.c',
+ 'utility/delay_estimator_wrapper.cc',
'utility/delay_estimator_wrapper.h',
'vad/common.h',
'vad/gmm.cc',
@@ -150,7 +149,9 @@
],
'conditions': [
['aec_debug_dump==1', {
- 'defines': ['WEBRTC_AEC_DEBUG_DUMP',],
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=1',],
+ }, {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=0',],
}],
['aec_untrusted_delay_for_testing==1', {
'defines': ['WEBRTC_UNTRUSTED_DELAY',],
@@ -201,19 +202,19 @@
}],
['target_arch=="mipsel" and mips_arch_variant!="r6"', {
'sources': [
- 'aecm/aecm_core_mips.c',
+ 'aecm/aecm_core_mips.cc',
],
'conditions': [
['mips_float_abi=="hard"', {
'sources': [
'aec/aec_core_mips.cc',
- 'aec/aec_rdft_mips.c',
+ 'aec/aec_rdft_mips.cc',
],
}],
],
}, {
'sources': [
- 'aecm/aecm_core_c.c',
+ 'aecm/aecm_core_c.cc',
],
}],
],
@@ -246,9 +247,14 @@
'type': 'static_library',
'sources': [
'aec/aec_core_sse2.cc',
- 'aec/aec_rdft_sse2.c',
+ 'aec/aec_rdft_sse2.cc',
],
'conditions': [
+ ['aec_debug_dump==1', {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=1',],
+ }, {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=0',],
+ }],
['os_posix==1', {
'cflags': [ '-msse2', ],
'xcode_settings': {
@@ -269,10 +275,18 @@
],
'sources': [
'aec/aec_core_neon.cc',
- 'aec/aec_rdft_neon.c',
- 'aecm/aecm_core_neon.c',
+ 'aec/aec_rdft_neon.cc',
+ 'aecm/aecm_core_neon.cc',
'ns/nsx_core_neon.c',
],
+ 'conditions': [
+ ['aec_debug_dump==1', {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=1',],
+ }],
+ ['aec_debug_dump==0', {
+ 'defines': ['WEBRTC_AEC_DEBUG_DUMP=0',],
+ }],
+ ],
}],
}],
],
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
index 2b6f1c4ac19..e75b3280346 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
@@ -94,7 +94,7 @@ bool is_multi_band(int sample_rate_hz) {
sample_rate_hz == AudioProcessing::kSampleRate48kHz;
}
-int ClosestNativeRate(int min_proc_rate) {
+int ClosestHigherNativeRate(int min_proc_rate) {
for (int rate : AudioProcessing::kNativeSampleRatesHz) {
if (rate >= min_proc_rate) {
return rate;
@@ -163,12 +163,10 @@ AudioProcessingImpl::AudioProcessingImpl(const Config& config,
private_submodules_(new ApmPrivateSubmodules(beamformer)),
constants_(config.Get<ExperimentalAgc>().startup_min_volume,
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
- false,
+ false),
#else
- config.Get<ExperimentalAgc>().enabled,
+ config.Get<ExperimentalAgc>().enabled),
#endif
- config.Get<Intelligibility>().enabled),
-
#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
capture_(false,
#else
@@ -176,7 +174,8 @@ AudioProcessingImpl::AudioProcessingImpl(const Config& config,
#endif
config.Get<Beamforming>().array_geometry,
config.Get<Beamforming>().target_direction),
- capture_nonlocked_(config.Get<Beamforming>().enabled)
+ capture_nonlocked_(config.Get<Beamforming>().enabled,
+ config.Get<Intelligibility>().enabled)
{
{
rtc::CritScope cs_render(&crit_render_);
@@ -362,22 +361,24 @@ int AudioProcessingImpl::InitializeLocked(const ProcessingConfig& config) {
formats_.api_format = config;
- capture_nonlocked_.fwd_proc_format = StreamConfig(ClosestNativeRate(std::min(
- formats_.api_format.input_stream().sample_rate_hz(),
- formats_.api_format.output_stream().sample_rate_hz())));
+ capture_nonlocked_.fwd_proc_format = StreamConfig(ClosestHigherNativeRate(
+ std::min(formats_.api_format.input_stream().sample_rate_hz(),
+ formats_.api_format.output_stream().sample_rate_hz())));
- // We normally process the reverse stream at 16 kHz. Unless...
- int rev_proc_rate = kSampleRate16kHz;
+ int rev_proc_rate = ClosestHigherNativeRate(std::min(
+ formats_.api_format.reverse_input_stream().sample_rate_hz(),
+ formats_.api_format.reverse_output_stream().sample_rate_hz()));
+ // TODO(aluebs): Remove this restriction once we figure out why the 3-band
+ // splitting filter degrades the AEC performance.
+ if (rev_proc_rate > kSampleRate32kHz) {
+ rev_proc_rate = is_rev_processed() ? kSampleRate32kHz : kSampleRate16kHz;
+ }
+ // If the forward sample rate is 8 kHz, the reverse stream is also processed
+ // at this rate.
if (capture_nonlocked_.fwd_proc_format.sample_rate_hz() == kSampleRate8kHz) {
- // ...the forward stream is at 8 kHz.
rev_proc_rate = kSampleRate8kHz;
} else {
- if (formats_.api_format.reverse_input_stream().sample_rate_hz() ==
- kSampleRate32kHz) {
- // ...or the input is at 32 kHz, in which case we use the splitting
- // filter rather than the resampler.
- rev_proc_rate = kSampleRate32kHz;
- }
+ rev_proc_rate = std::max(rev_proc_rate, static_cast<int>(kSampleRate16kHz));
}
// Always downmix the reverse stream to mono for analysis. This has been
@@ -409,6 +410,13 @@ void AudioProcessingImpl::SetExtraOptions(const Config& config) {
InitializeTransient();
}
+ if(capture_nonlocked_.intelligibility_enabled !=
+ config.Get<Intelligibility>().enabled) {
+ capture_nonlocked_.intelligibility_enabled =
+ config.Get<Intelligibility>().enabled;
+ InitializeIntelligibility();
+ }
+
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
if (capture_nonlocked_.beamformer_enabled !=
config.Get<Beamforming>().enabled) {
@@ -702,10 +710,13 @@ int AudioProcessingImpl::ProcessStreamLocked() {
ca->CopyLowPassToReference();
}
public_submodules_->noise_suppression->ProcessCaptureAudio(ca);
- if (constants_.intelligibility_enabled) {
+ if (capture_nonlocked_.intelligibility_enabled) {
RTC_DCHECK(public_submodules_->noise_suppression->is_enabled());
+ int gain_db = public_submodules_->gain_control->is_enabled() ?
+ public_submodules_->gain_control->compression_gain_db() :
+ 0;
public_submodules_->intelligibility_enhancer->SetCaptureNoiseEstimate(
- public_submodules_->noise_suppression->NoiseEstimate());
+ public_submodules_->noise_suppression->NoiseEstimate(), gain_db);
}
// Ensure that the stream delay was set before the call to the
@@ -898,7 +909,7 @@ int AudioProcessingImpl::ProcessReverseStreamLocked() {
ra->SplitIntoFrequencyBands();
}
- if (constants_.intelligibility_enabled) {
+ if (capture_nonlocked_.intelligibility_enabled) {
public_submodules_->intelligibility_enhancer->ProcessRenderAudio(
ra->split_channels_f(kBand0To8kHz), capture_nonlocked_.split_rate,
ra->num_channels());
@@ -1146,16 +1157,16 @@ bool AudioProcessingImpl::fwd_analysis_needed() const {
}
bool AudioProcessingImpl::is_rev_processed() const {
- return constants_.intelligibility_enabled;
+ return capture_nonlocked_.intelligibility_enabled;
}
bool AudioProcessingImpl::rev_synthesis_needed() const {
return (is_rev_processed() &&
- formats_.rev_proc_format.sample_rate_hz() == kSampleRate32kHz);
+ is_multi_band(formats_.rev_proc_format.sample_rate_hz()));
}
bool AudioProcessingImpl::rev_analysis_needed() const {
- return formats_.rev_proc_format.sample_rate_hz() == kSampleRate32kHz &&
+ return is_multi_band(formats_.rev_proc_format.sample_rate_hz()) &&
(is_rev_processed() ||
public_submodules_->echo_cancellation
->is_enabled_render_side_query() ||
@@ -1211,7 +1222,7 @@ void AudioProcessingImpl::InitializeBeamformer() {
}
void AudioProcessingImpl::InitializeIntelligibility() {
- if (constants_.intelligibility_enabled) {
+ if (capture_nonlocked_.intelligibility_enabled) {
public_submodules_->intelligibility_enhancer.reset(
new IntelligibilityEnhancer(capture_nonlocked_.split_rate,
render_.render_audio->num_channels(),
@@ -1392,8 +1403,10 @@ int AudioProcessingImpl::WriteInitMessage() {
formats_.api_format.reverse_input_stream().sample_rate_hz());
msg->set_output_sample_rate(
formats_.api_format.output_stream().sample_rate_hz());
- // TODO(ekmeyerson): Add reverse output fields to
- // debug_dump_.capture.event_msg.
+ msg->set_reverse_output_sample_rate(
+ formats_.api_format.reverse_output_stream().sample_rate_hz());
+ msg->set_num_reverse_output_channels(
+ formats_.api_format.reverse_output_stream().num_channels());
RETURN_ON_ERR(WriteMessageToDebugFile(debug_dump_.debug_file.get(),
&debug_dump_.num_bytes_left_for_log_,
@@ -1436,6 +1449,8 @@ int AudioProcessingImpl::WriteConfigMessage(bool forced) {
config.set_transient_suppression_enabled(
capture_.transient_suppressor_enabled);
+ config.set_intelligibility_enhancer_enabled(
+ capture_nonlocked_.intelligibility_enabled);
std::string experiments_description =
public_submodules_->echo_cancellation->GetExperimentsDescription();
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
index 7323b85c78e..04ddabd1c7f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
@@ -276,16 +276,12 @@ class AudioProcessingImpl : public AudioProcessing {
// APM constants.
const struct ApmConstants {
- ApmConstants(int agc_startup_min_volume,
- bool use_experimental_agc,
- bool intelligibility_enabled)
+ ApmConstants(int agc_startup_min_volume, bool use_experimental_agc)
: // Format of processing streams at input/output call sites.
agc_startup_min_volume(agc_startup_min_volume),
- use_experimental_agc(use_experimental_agc),
- intelligibility_enabled(intelligibility_enabled) {}
+ use_experimental_agc(use_experimental_agc) {}
int agc_startup_min_volume;
bool use_experimental_agc;
- bool intelligibility_enabled;
} constants_;
struct ApmCaptureState {
@@ -325,11 +321,13 @@ class AudioProcessingImpl : public AudioProcessing {
} capture_ GUARDED_BY(crit_capture_);
struct ApmCaptureNonLockedState {
- ApmCaptureNonLockedState(bool beamformer_enabled)
+ ApmCaptureNonLockedState(bool beamformer_enabled,
+ bool intelligibility_enabled)
: fwd_proc_format(kSampleRate16kHz),
split_rate(kSampleRate16kHz),
stream_delay_ms(0),
- beamformer_enabled(beamformer_enabled) {}
+ beamformer_enabled(beamformer_enabled),
+ intelligibility_enabled(intelligibility_enabled) {}
// Only the rate and samples fields of fwd_proc_format_ are used because the
// forward processing number of channels is mutable and is tracked by the
// capture_audio_.
@@ -337,6 +335,7 @@ class AudioProcessingImpl : public AudioProcessing {
int split_rate;
int stream_delay_ms;
bool beamformer_enabled;
+ bool intelligibility_enabled;
} capture_nonlocked_;
struct ApmRenderState {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc
index 948c5efd93f..ded75c86520 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audio_processing_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_unittest.cc
@@ -54,7 +54,12 @@ bool write_ref_data = false;
const google::protobuf::int32 kChannels[] = {1, 2};
const int kSampleRates[] = {8000, 16000, 32000, 48000};
+#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
+// Android doesn't support 48kHz.
+const int kProcessSampleRates[] = {8000, 16000, 32000};
+#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
const int kProcessSampleRates[] = {8000, 16000, 32000, 48000};
+#endif
enum StreamDirection { kForward = 0, kReverse };
@@ -2692,7 +2697,7 @@ INSTANTIATE_TEST_CASE_P(
std::tr1::make_tuple(16000, 32000, 32000, 32000, 25, 0),
std::tr1::make_tuple(16000, 32000, 16000, 32000, 25, 20),
std::tr1::make_tuple(16000, 16000, 48000, 16000, 40, 20),
- std::tr1::make_tuple(16000, 16000, 32000, 16000, 50, 20),
+ std::tr1::make_tuple(16000, 16000, 32000, 16000, 40, 20),
std::tr1::make_tuple(16000, 16000, 16000, 16000, 0, 0)));
#elif defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
@@ -2748,7 +2753,7 @@ INSTANTIATE_TEST_CASE_P(
std::tr1::make_tuple(16000, 32000, 32000, 32000, 25, 0),
std::tr1::make_tuple(16000, 32000, 16000, 32000, 25, 20),
std::tr1::make_tuple(16000, 16000, 48000, 16000, 35, 20),
- std::tr1::make_tuple(16000, 16000, 32000, 16000, 40, 20),
+ std::tr1::make_tuple(16000, 16000, 32000, 16000, 35, 20),
std::tr1::make_tuple(16000, 16000, 16000, 16000, 0, 0)));
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/debug.proto b/chromium/third_party/webrtc/modules/audio_processing/debug.proto
index 1c025fbc72c..44177735e8c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/debug.proto
+++ b/chromium/third_party/webrtc/modules/audio_processing/debug.proto
@@ -12,6 +12,8 @@ message Init {
optional int32 num_reverse_channels = 5;
optional int32 reverse_sample_rate = 6;
optional int32 output_sample_rate = 7;
+ optional int32 reverse_output_sample_rate = 8;
+ optional int32 num_reverse_output_channels = 9;
}
// May contain interleaved or deinterleaved data, but don't store both formats.
@@ -44,7 +46,7 @@ message Stream {
// Contains the configurations of various APM component. A Config message is
// added when any of the fields are changed.
message Config {
- // Next field number 18.
+ // Next field number 19.
// Acoustic echo canceler.
optional bool aec_enabled = 1;
optional bool aec_delay_agnostic_enabled = 2;
@@ -70,6 +72,8 @@ message Config {
// Semicolon-separated string containing experimental feature
// descriptions.
optional string experiments_description = 17;
+ // Intelligibility Enhancer
+ optional bool intelligibility_enhancer_enabled = 18;
}
message Event {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
index 97f69974dba..d7b27e99f3c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
@@ -12,6 +12,7 @@
#include <string.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_processing/aecm/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/system_wrappers/include/logging.h"
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
index 3a6fcb116d5..2461f72ad35 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_processing/gain_control_impl.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/optional.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/modules/audio_processing/agc/legacy/gain_control.h"
@@ -274,6 +275,11 @@ int GainControlImpl::ProcessCaptureAudio(AudioBuffer* audio,
return AudioProcessing::kNoError;
}
+int GainControlImpl::compression_gain_db() const {
+ rtc::CritScope cs(crit_capture_);
+ return compression_gain_db_;
+}
+
// TODO(ajm): ensure this is called under kAdaptiveAnalog.
int GainControlImpl::set_stream_analog_level(int level) {
rtc::CritScope cs(crit_capture_);
@@ -413,11 +419,6 @@ int GainControlImpl::set_compression_gain_db(int gain) {
return Configure();
}
-int GainControlImpl::compression_gain_db() const {
- rtc::CritScope cs(crit_capture_);
- return compression_gain_db_;
-}
-
int GainControlImpl::enable_limiter(bool enable) {
{
rtc::CritScope cs(crit_capture_);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
index 9498ac60b54..2459ce3b4b7 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
@@ -51,6 +51,8 @@ class GainControlImpl : public GainControl {
// Reads render side data that has been queued on the render call.
void ReadQueuedRenderData();
+ int compression_gain_db() const override;
+
private:
class GainController;
@@ -61,7 +63,6 @@ class GainControlImpl : public GainControl {
int set_target_level_dbfs(int level) override;
int target_level_dbfs() const override;
int set_compression_gain_db(int gain) override;
- int compression_gain_db() const override;
int enable_limiter(bool enable) override;
int set_analog_level_limits(int minimum, int maximum) override;
int analog_level_minimum() const override;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h b/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
index 0c93a984b24..2f8e48f82da 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
@@ -160,9 +160,7 @@ struct Beamforming {
const SphericalPointf target_direction;
};
-// Use to enable intelligibility enhancer in audio processing. Must be provided
-// though the constructor. It will have no impact if used with
-// AudioProcessing::SetExtraOptions().
+// Use to enable intelligibility enhancer in audio processing.
//
// Note: If enabled and the reverse stream has more than one output channel,
// the reverse stream will become an upmixed mono signal.
@@ -685,7 +683,7 @@ class EchoCancellation {
// (Pre non-linear processing suppression) A_NLP = 10log_10(P_echo / P_a)
AudioProcessing::Statistic a_nlp;
- // Fraction of time that the AEC linear filter is divergent, in a 0.5-second
+ // Fraction of time that the AEC linear filter is divergent, in a 1-second
// non-overlapped aggregation window.
float divergent_filter_fraction;
};
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
index de36b7a8bc7..ae7f9119213 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.cc
@@ -38,6 +38,8 @@ const float kDecayRate = 0.994f; // Power estimation decay rate.
const float kMaxRelativeGainChange = 0.006f;
const float kRho = 0.0004f; // Default production and interpretation SNR.
const float kPowerNormalizationFactor = 1.f / (1 << 30);
+const float kMaxActiveSNR = 128.f; // 21dB
+const float kMinInactiveSNR = 32.f; // 15dB
// Returns dot product of vectors |a| and |b| with size |length|.
float DotProduct(const float* a, const float* b, size_t length) {
@@ -84,6 +86,8 @@ IntelligibilityEnhancer::IntelligibilityEnhancer(int sample_rate_hz,
audio_s16_(chunk_length_),
chunks_since_voice_(kSpeechOffsetDelay),
is_speech_(false),
+ snr_(kMaxActiveSNR),
+ is_active_(false),
noise_estimation_buffer_(num_noise_bins),
noise_estimation_queue_(kMaxNumNoiseEstimatesToBuffer,
std::vector<float>(num_noise_bins),
@@ -105,8 +109,12 @@ IntelligibilityEnhancer::IntelligibilityEnhancer(int sample_rate_hz,
}
void IntelligibilityEnhancer::SetCaptureNoiseEstimate(
- std::vector<float> noise) {
+ std::vector<float> noise, int gain_db) {
RTC_DCHECK_EQ(noise.size(), num_noise_bins_);
+ const float gain = std::pow(10.f, gain_db / 20.f);
+ for (auto& bin : noise) {
+ bin *= gain;
+ }
// Disregarding return value since buffer overflow is acceptable, because it
// is not critical to get each noise estimate.
if (noise_estimation_queue_.Insert(&noise)) {
@@ -135,29 +143,55 @@ void IntelligibilityEnhancer::ProcessAudioBlock(
if (is_speech_) {
clear_power_estimator_.Step(in_block[0]);
}
- const std::vector<float>& clear_power = clear_power_estimator_.power();
- const std::vector<float>& noise_power = noise_power_estimator_.power();
- MapToErbBands(clear_power.data(), render_filter_bank_,
- filtered_clear_pow_.data());
- MapToErbBands(noise_power.data(), capture_filter_bank_,
- filtered_noise_pow_.data());
- SolveForGainsGivenLambda(kLambdaTop, start_freq_, gains_eq_.data());
- const float power_target = std::accumulate(
- filtered_clear_pow_.data(), filtered_clear_pow_.data() + bank_size_, 0.f);
- const float power_top =
- DotProduct(gains_eq_.data(), filtered_clear_pow_.data(), bank_size_);
- SolveForGainsGivenLambda(kLambdaBot, start_freq_, gains_eq_.data());
- const float power_bot =
- DotProduct(gains_eq_.data(), filtered_clear_pow_.data(), bank_size_);
- if (power_target >= power_bot && power_target <= power_top) {
- SolveForLambda(power_target);
- UpdateErbGains();
- } // Else experiencing power underflow, so do nothing.
+ SnrBasedEffectActivation();
+ if (is_active_) {
+ MapToErbBands(clear_power_estimator_.power().data(), render_filter_bank_,
+ filtered_clear_pow_.data());
+ MapToErbBands(noise_power_estimator_.power().data(), capture_filter_bank_,
+ filtered_noise_pow_.data());
+ SolveForGainsGivenLambda(kLambdaTop, start_freq_, gains_eq_.data());
+ const float power_target = std::accumulate(
+ filtered_clear_pow_.data(),
+ filtered_clear_pow_.data() + bank_size_,
+ 0.f);
+ const float power_top =
+ DotProduct(gains_eq_.data(), filtered_clear_pow_.data(), bank_size_);
+ SolveForGainsGivenLambda(kLambdaBot, start_freq_, gains_eq_.data());
+ const float power_bot =
+ DotProduct(gains_eq_.data(), filtered_clear_pow_.data(), bank_size_);
+ if (power_target >= power_bot && power_target <= power_top) {
+ SolveForLambda(power_target);
+ UpdateErbGains();
+ } // Else experiencing power underflow, so do nothing.
+ }
for (size_t i = 0; i < in_channels; ++i) {
gain_applier_.Apply(in_block[i], out_block[i]);
}
}
+void IntelligibilityEnhancer::SnrBasedEffectActivation() {
+ const float* clear_psd = clear_power_estimator_.power().data();
+ const float* noise_psd = noise_power_estimator_.power().data();
+ const float clear_power =
+ std::accumulate(clear_psd, clear_psd + freqs_, 0.f);
+ const float noise_power =
+ std::accumulate(noise_psd, noise_psd + freqs_, 0.f);
+ snr_ = kDecayRate * snr_ + (1.f - kDecayRate) * clear_power /
+ (noise_power + std::numeric_limits<float>::epsilon());
+ if (is_active_) {
+ if (snr_ > kMaxActiveSNR) {
+ is_active_ = false;
+ // Set the target gains to unity.
+ float* gains = gain_applier_.target();
+ for (size_t i = 0; i < freqs_; ++i) {
+ gains[i] = 1.f;
+ }
+ }
+ } else {
+ is_active_ = snr_ < kMinInactiveSNR;
+ }
+}
+
void IntelligibilityEnhancer::SolveForLambda(float power_target) {
const float kConvergeThresh = 0.001f; // TODO(ekmeyerson): Find best values
const int kMaxIters = 100; // for these, based on experiments.
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
index 14132129349..63ae80e2c44 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer.h
@@ -36,7 +36,7 @@ class IntelligibilityEnhancer : public LappedTransform::Callback {
size_t num_noise_bins);
// Sets the capture noise magnitude spectrum estimate.
- void SetCaptureNoiseEstimate(std::vector<float> noise);
+ void SetCaptureNoiseEstimate(std::vector<float> noise, int gain_db);
// Reads chunk of speech in time domain and updates with modified signal.
void ProcessRenderAudio(float* const* audio,
@@ -56,6 +56,12 @@ class IntelligibilityEnhancer : public LappedTransform::Callback {
private:
FRIEND_TEST_ALL_PREFIXES(IntelligibilityEnhancerTest, TestErbCreation);
FRIEND_TEST_ALL_PREFIXES(IntelligibilityEnhancerTest, TestSolveForGains);
+ FRIEND_TEST_ALL_PREFIXES(IntelligibilityEnhancerTest,
+ TestNoiseGainHasExpectedResult);
+
+ // Updates the SNR estimation and enables or disables this component using a
+ // hysteresis.
+ void SnrBasedEffectActivation();
// Bisection search for optimal |lambda|.
void SolveForLambda(float power_target);
@@ -103,6 +109,8 @@ class IntelligibilityEnhancer : public LappedTransform::Callback {
std::vector<int16_t> audio_s16_;
size_t chunks_since_voice_;
bool is_speech_;
+ float snr_;
+ bool is_active_;
std::vector<float> noise_estimation_buffer_;
SwapQueue<std::vector<float>, RenderQueueItemVerifier<float>>
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc
index 080e228cb8e..30035ab16ed 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_enhancer_unittest.cc
@@ -237,7 +237,7 @@ void ProcessOneFrame(int sample_rate_hz,
noise_suppressor->ProcessCaptureAudio(capture_audio_buffer);
intelligibility_enhancer->SetCaptureNoiseEstimate(
- noise_suppressor->NoiseEstimate());
+ noise_suppressor->NoiseEstimate(), 0);
if (sample_rate_hz > AudioProcessing::kSampleRate16kHz) {
render_audio_buffer->MergeFrequencyBands();
@@ -311,12 +311,17 @@ void RunBitexactnessTest(int sample_rate_hz,
output_reference, render_output, kElementErrorBound));
}
+float float_rand() {
+ return std::rand() * 2.f / RAND_MAX - 1;
+}
+
} // namespace
class IntelligibilityEnhancerTest : public ::testing::Test {
protected:
IntelligibilityEnhancerTest()
: clear_data_(kSamples), noise_data_(kSamples), orig_data_(kSamples) {
+ std::srand(1);
enh_.reset(
new IntelligibilityEnhancer(kSampleRate, kNumChannels, kNumNoiseBins));
}
@@ -352,8 +357,6 @@ TEST_F(IntelligibilityEnhancerTest, TestRenderUpdate) {
std::fill(orig_data_.begin(), orig_data_.end(), 0.f);
std::fill(clear_data_.begin(), clear_data_.end(), 0.f);
EXPECT_FALSE(CheckUpdate());
- std::srand(1);
- auto float_rand = []() { return std::rand() * 2.f / RAND_MAX - 1; };
std::generate(noise_data_.begin(), noise_data_.end(), float_rand);
EXPECT_FALSE(CheckUpdate());
std::generate(clear_data_.begin(), clear_data_.end(), float_rand);
@@ -403,6 +406,29 @@ TEST_F(IntelligibilityEnhancerTest, TestSolveForGains) {
}
}
+TEST_F(IntelligibilityEnhancerTest, TestNoiseGainHasExpectedResult) {
+ const int kGainDB = 6;
+ const float kGainFactor = std::pow(10.f, kGainDB / 20.f);
+ const float kTolerance = 0.003f;
+ std::vector<float> noise(kNumNoiseBins);
+ std::vector<float> noise_psd(kNumNoiseBins);
+ std::generate(noise.begin(), noise.end(), float_rand);
+ for (size_t i = 0; i < kNumNoiseBins; ++i) {
+ noise_psd[i] = kGainFactor * kGainFactor * noise[i] * noise[i];
+ }
+ float* clear_cursor = clear_data_.data();
+ for (size_t i = 0; i < kNumFramesToProcess; ++i) {
+ enh_->SetCaptureNoiseEstimate(noise, kGainDB);
+ enh_->ProcessRenderAudio(&clear_cursor, kSampleRate, kNumChannels);
+ }
+ const std::vector<float>& estimated_psd =
+ enh_->noise_power_estimator_.power();
+ for (size_t i = 0; i < kNumNoiseBins; ++i) {
+ EXPECT_LT(std::abs(estimated_psd[i] - noise_psd[i]) / noise_psd[i],
+ kTolerance);
+ }
+}
+
TEST(IntelligibilityEnhancerBitExactnessTest, DISABLED_Mono8kHz) {
const float kOutputReference[] = {-0.001892f, -0.003296f, -0.001953f};
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc
index 3a9433b4768..3675f66cafe 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/intelligibility_utils.cc
@@ -22,16 +22,15 @@ namespace intelligibility {
namespace {
+const float kMinFactor = 0.01f;
+const float kMaxFactor = 1000.f;
+
// Return |current| changed towards |target|, with the relative change being at
// most |limit|.
float UpdateFactor(float target, float current, float limit) {
float gain = target / (current + std::numeric_limits<float>::epsilon());
- if (gain < 1.f - limit) {
- gain = 1.f - limit;
- } else if (gain > 1.f + limit) {
- gain = 1.f + limit;
- }
- return current * gain + std::numeric_limits<float>::epsilon();
+ gain = std::min(std::max(gain, 1.f - limit), 1.f + limit);
+ return std::min(std::max(current * gain, kMinFactor), kMaxFactor);;
}
} // namespace
diff --git a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
index 64ccfd96efc..abd10d85165 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/intelligibility/test/intelligibility_proc.cc
@@ -64,7 +64,7 @@ void void_main(int argc, char* argv[]) {
capture_audio.CopyFrom(noise_buf.channels(), stream_config);
ns.AnalyzeCaptureAudio(&capture_audio);
ns.ProcessCaptureAudio(&capture_audio);
- enh.SetCaptureNoiseEstimate(ns.NoiseEstimate());
+ enh.SetCaptureNoiseEstimate(ns.NoiseEstimate(), 0);
enh.ProcessRenderAudio(in_buf.channels(), in_file.sample_rate(),
in_file.num_channels());
Interleave(in_buf.channels(), in_buf.num_frames(), in_buf.num_channels(),
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging.h b/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging.h
deleted file mode 100644
index b062913be28..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging.h
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_
-
-#include <stdio.h>
-
-#include "webrtc/modules/audio_processing/logging/aec_logging_file_handling.h"
-
-// To enable AEC logging, invoke GYP with -Daec_debug_dump=1.
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-// Dumps a wav data to file.
-#define RTC_AEC_DEBUG_WAV_WRITE(file, data, num_samples) \
- do { \
- rtc_WavWriteSamples(file, data, num_samples); \
- } while (0)
-
-// (Re)opens a wav file for writing using the specified sample rate.
-#define RTC_AEC_DEBUG_WAV_REOPEN(name, instance_index, process_rate, \
- sample_rate, wav_file) \
- do { \
- WebRtcAec_ReopenWav(name, instance_index, process_rate, sample_rate, \
- wav_file); \
- } while (0)
-
-// Closes a wav file.
-#define RTC_AEC_DEBUG_WAV_CLOSE(wav_file) \
- do { \
- rtc_WavClose(wav_file); \
- } while (0)
-
-// Dumps a raw data to file.
-#define RTC_AEC_DEBUG_RAW_WRITE(file, data, data_size) \
- do { \
- (void) fwrite(data, data_size, 1, file); \
- } while (0)
-
-// Dumps a raw scalar int32 to file.
-#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_INT32(file, data) \
- do { \
- int32_t value_to_store = data; \
- (void) fwrite(&value_to_store, sizeof(value_to_store), 1, file); \
- } while (0)
-
-// Dumps a raw scalar double to file.
-#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_DOUBLE(file, data) \
- do { \
- double value_to_store = data; \
- (void) fwrite(&value_to_store, sizeof(value_to_store), 1, file); \
- } while (0)
-
-// Opens a raw data file for writing using the specified sample rate.
-#define RTC_AEC_DEBUG_RAW_OPEN(name, instance_counter, file) \
- do { \
- WebRtcAec_RawFileOpen(name, instance_counter, file); \
- } while (0)
-
-// Closes a raw data file.
-#define RTC_AEC_DEBUG_RAW_CLOSE(file) \
- do { \
- fclose(file); \
- } while (0)
-
-#else // RTC_AEC_DEBUG_DUMP
-#define RTC_AEC_DEBUG_WAV_WRITE(file, data, num_samples) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_WAV_REOPEN(wav_file, name, instance_index, process_rate, \
- sample_rate) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_WAV_CLOSE(wav_file) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_WRITE(file, data, data_size) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_INT32(file, data) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_WRITE_SCALAR_DOUBLE(file, data) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_OPEN(file, name, instance_counter) \
- do { \
- } while (0)
-
-#define RTC_AEC_DEBUG_RAW_CLOSE(file) \
- do { \
- } while (0)
-
-#endif // WEBRTC_AEC_DEBUG_DUMP
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.cc b/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.cc
deleted file mode 100644
index 3a434714e1e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.cc
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_processing/logging/aec_logging_file_handling.h"
-
-#include <stdint.h>
-#include <stdio.h>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/stringutils.h"
-#include "webrtc/common_audio/wav_file.h"
-#include "webrtc/typedefs.h"
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-void WebRtcAec_ReopenWav(const char* name,
- int instance_index,
- int process_rate,
- int sample_rate,
- rtc_WavWriter** wav_file) {
- if (*wav_file) {
- if (rtc_WavSampleRate(*wav_file) == sample_rate)
- return;
- rtc_WavClose(*wav_file);
- }
- char filename[64];
- int written = rtc::sprintfn(filename, sizeof(filename), "%s%d-%d.wav", name,
- instance_index, process_rate);
-
- // Ensure there was no buffer output error.
- RTC_DCHECK_GE(written, 0);
- // Ensure that the buffer size was sufficient.
- RTC_DCHECK_LT(static_cast<size_t>(written), sizeof(filename));
-
- *wav_file = rtc_WavOpen(filename, sample_rate, 1);
-}
-
-void WebRtcAec_RawFileOpen(const char* name, int instance_index, FILE** file) {
- char filename[64];
- int written = rtc::sprintfn(filename, sizeof(filename), "%s_%d.dat", name,
- instance_index);
-
- // Ensure there was no buffer output error.
- RTC_DCHECK_GE(written, 0);
- // Ensure that the buffer size was sufficient.
- RTC_DCHECK_LT(static_cast<size_t>(written), sizeof(filename));
-
- *file = fopen(filename, "wb");
-}
-
-#endif // WEBRTC_AEC_DEBUG_DUMP
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.h b/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.h
deleted file mode 100644
index 5ec83948726..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/logging/aec_logging_file_handling.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_FILE_HANDLING_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_FILE_HANDLING_
-
-#include <stdio.h>
-
-#include "webrtc/common_audio/wav_file.h"
-#include "webrtc/typedefs.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-// Opens a new Wav file for writing. If it was already open with a different
-// sample frequency, it closes it first.
-void WebRtcAec_ReopenWav(const char* name,
- int instance_index,
- int process_rate,
- int sample_rate,
- rtc_WavWriter** wav_file);
-
-// Opens dumpfile with instance-specific filename.
-void WebRtcAec_RawFileOpen(const char* name, int instance_index, FILE** file);
-
-#endif // WEBRTC_AEC_DEBUG_DUMP
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_LOGGING_FILE_HANDLING_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.cc b/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.cc
new file mode 100644
index 00000000000..491196e0972
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.cc
@@ -0,0 +1,65 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/logging/apm_data_dumper.h"
+
+#include <sstream>
+
+#include "webrtc/base/stringutils.h"
+
+// Check to verify that the define is properly set.
+#if !defined(WEBRTC_AEC_DEBUG_DUMP) || \
+ (WEBRTC_AEC_DEBUG_DUMP != 0 && WEBRTC_AEC_DEBUG_DUMP != 1)
+#error "Set WEBRTC_AEC_DEBUG_DUMP to either 0 or 1"
+#endif
+
+namespace webrtc {
+
+namespace {
+
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+std::string FormFileName(const char* name,
+ int instance_index,
+ int reinit_index,
+ const std::string& suffix) {
+ std::stringstream ss;
+ ss << name << "_" << instance_index << "-" << reinit_index << suffix;
+ return ss.str();
+}
+#endif
+
+} // namespace
+
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+FILE* ApmDataDumper::GetRawFile(const char* name) {
+ std::string filename =
+ FormFileName(name, instance_index_, recording_set_index_, ".dat");
+ auto& f = raw_files_[filename];
+ if (!f) {
+ f.reset(fopen(filename.c_str(), "wb"));
+ }
+ return f.get();
+}
+
+WavWriter* ApmDataDumper::GetWavFile(const char* name,
+ int sample_rate_hz,
+ int num_channels) {
+ std::string filename =
+ FormFileName(name, instance_index_, recording_set_index_, ".wav");
+ auto& f = wav_files_[filename];
+ if (!f) {
+ f.reset(new WavWriter(filename.c_str(), sample_rate_hz, num_channels));
+ }
+ return f.get();
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.h b/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.h
new file mode 100644
index 00000000000..93232b7c9dc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/logging/apm_data_dumper.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_LOGGING_APM_DATA_DUMPER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_LOGGING_APM_DATA_DUMPER_H_
+
+#include <stdio.h>
+
+#include <memory>
+#include <string>
+#include <unordered_map>
+
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_audio/wav_file.h"
+
+// Check to verify that the define is properly set.
+#if !defined(WEBRTC_AEC_DEBUG_DUMP) || \
+ (WEBRTC_AEC_DEBUG_DUMP != 0 && WEBRTC_AEC_DEBUG_DUMP != 1)
+#error "Set WEBRTC_AEC_DEBUG_DUMP to either 0 or 1"
+#endif
+
+namespace webrtc {
+
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+// Functor used to use as a custom deleter in the map of file pointers to raw
+// files.
+struct RawFileCloseFunctor {
+ void operator()(FILE* f) const { fclose(f); }
+};
+#endif
+
+// Class that handles dumping of variables into files.
+class ApmDataDumper {
+ public:
+// Constructor that takes an instance index that may
+// be used to distinguish data dumped from different
+// instances of the code.
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ explicit ApmDataDumper(int instance_index)
+ : instance_index_(instance_index) {}
+#else
+ explicit ApmDataDumper(int instance_index) {}
+#endif
+
+ // Reinitializes the data dumping such that new versions
+ // of all files being dumped to are created.
+ void InitiateNewSetOfRecordings() {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ ++recording_set_index_;
+#endif
+ }
+
+ // Methods for performing dumping of data of various types into
+ // various formats.
+ void DumpRaw(const char* name, int v_length, const float* v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ FILE* file = GetRawFile(name);
+ fwrite(v, sizeof(v[0]), v_length, file);
+#endif
+ }
+
+ void DumpRaw(const char* name, rtc::ArrayView<const float> v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ DumpRaw(name, v.size(), v.data());
+#endif
+ }
+
+ void DumpRaw(const char* name, int v_length, const int16_t* v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ FILE* file = GetRawFile(name);
+ fwrite(v, sizeof(v[0]), v_length, file);
+#endif
+ }
+
+ void DumpRaw(const char* name, rtc::ArrayView<const int16_t> v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ DumpRaw(name, v.size(), v.data());
+#endif
+ }
+
+ void DumpRaw(const char* name, int v_length, const int32_t* v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ FILE* file = GetRawFile(name);
+ fwrite(v, sizeof(v[0]), v_length, file);
+#endif
+ }
+
+ void DumpRaw(const char* name, rtc::ArrayView<const int32_t> v) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ DumpRaw(name, v.size(), v.data());
+#endif
+ }
+
+ void DumpWav(const char* name,
+ int v_length,
+ const float* v,
+ int sample_rate_hz,
+ int num_channels) {
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ WavWriter* file = GetWavFile(name, sample_rate_hz, num_channels);
+ file->WriteSamples(v, v_length);
+#endif
+ }
+
+ private:
+#if WEBRTC_AEC_DEBUG_DUMP == 1
+ const int instance_index_;
+ int recording_set_index_ = 0;
+ std::unordered_map<std::string, std::unique_ptr<FILE, RawFileCloseFunctor>>
+ raw_files_;
+ std::unordered_map<std::string, std::unique_ptr<WavWriter>> wav_files_;
+
+ FILE* GetRawFile(const char* name);
+ WavWriter* GetWavFile(const char* name, int sample_rate_hz, int num_channels);
+#endif
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(ApmDataDumper);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_LOGGING_APM_DATA_DUMPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
index 4344c56fcc6..e1c9fdcd019 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_processing/noise_suppression_impl.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
#if defined(WEBRTC_NS_FLOAT)
#include "webrtc/modules/audio_processing/ns/noise_suppression.h"
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
index 25f16d26abc..94b6449776f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
@@ -19,7 +19,7 @@
#include "webrtc/modules/audio_processing/ns/nsx_core.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
-#if (defined WEBRTC_DETECT_NEON || defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
/* Tables are defined in ARM assembly files. */
extern const int16_t WebRtcNsx_kLogTable[9];
extern const int16_t WebRtcNsx_kCounterDiv[201];
@@ -65,7 +65,7 @@ static const int16_t WebRtcNsx_kLogTableFrac[256] = {
237, 238, 238, 239, 240, 241, 241, 242, 243, 244, 244, 245, 246, 247, 247,
248, 249, 249, 250, 251, 252, 252, 253, 254, 255, 255
};
-#endif // WEBRTC_DETECT_NEON || WEBRTC_HAS_NEON
+#endif // WEBRTC_HAS_NEON
// Skip first frequency bins during estimation. (0 <= value < 64)
static const size_t kStartBand = 5;
@@ -557,7 +557,7 @@ AnalysisUpdate WebRtcNsx_AnalysisUpdate;
Denormalize WebRtcNsx_Denormalize;
NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer;
-#if (defined WEBRTC_DETECT_NEON || defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
// Initialize function pointers for ARM Neon platform.
static void WebRtcNsx_InitNeon(void) {
WebRtcNsx_NoiseEstimation = WebRtcNsx_NoiseEstimationNeon;
@@ -762,12 +762,7 @@ int32_t WebRtcNsx_InitCore(NoiseSuppressionFixedC* inst, uint32_t fs) {
WebRtcNsx_Denormalize = DenormalizeC;
WebRtcNsx_NormalizeRealBuffer = NormalizeRealBufferC;
-#ifdef WEBRTC_DETECT_NEON
- uint64_t features = WebRtc_GetCPUFeaturesARM();
- if ((features & kCPUFeatureNEON) != 0) {
- WebRtcNsx_InitNeon();
- }
-#elif defined(WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
WebRtcNsx_InitNeon();
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
index f463dbbe1a2..d1754f31eae 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
@@ -215,7 +215,7 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
uint32_t* priorLocSnr,
uint32_t* postLocSnr);
-#if (defined WEBRTC_DETECT_NEON || defined WEBRTC_HAS_NEON)
+#if defined(WEBRTC_HAS_NEON)
// For the above function pointers, functions for generic platforms are declared
// and defined as static in file nsx_core.c, while those for ARM Neon platforms
// are declared below and defined in file nsx_core_neon.c.
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
index da7aa3d5dbe..213320d38c8 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
@@ -96,8 +96,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
}
tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
// compute indicator function: sigmoid map
- tableIndex = (int16_t)(tmp32no1 >> 14);
- if ((tableIndex < 16) && (tableIndex >= 0)) {
+ if (tmp32no1 < (16 << 14) && tmp32no1 >= 0) {
+ tableIndex = (int16_t)(tmp32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
@@ -128,8 +128,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
// FLOAT code
// indicator1 = 0.5 * (tanh(sgnMap * widthPrior *
// (threshPrior1 - tmpFloat1)) + 1.0);
- tableIndex = (int16_t)(tmpU32no1 >> 14);
- if (tableIndex < 16) {
+ if (tmpU32no1 < (16 << 14)) {
+ tableIndex = (int16_t)(tmpU32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
@@ -175,8 +175,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
/* FLOAT code
indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
*/
- tableIndex = (int16_t)(tmpU32no1 >> 14);
- if (tableIndex < 16) {
+ if (tmpU32no1 < (16 << 14)) {
+ tableIndex = (int16_t)(tmpU32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
index 7688d82d78d..3922308c7c8 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
@@ -131,8 +131,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
}
tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
// compute indicator function: sigmoid map
- tableIndex = (int16_t)(tmp32no1 >> 14);
- if ((tableIndex < 16) && (tableIndex >= 0)) {
+ if (tmp32no1 < (16 << 14) && tmp32no1 >= 0) {
+ tableIndex = (int16_t)(tmp32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
@@ -163,8 +163,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
// FLOAT code
// indicator1 = 0.5 * (tanh(sgnMap * widthPrior *
// (threshPrior1 - tmpFloat1)) + 1.0);
- tableIndex = (int16_t)(tmpU32no1 >> 14);
- if (tableIndex < 16) {
+ if (tmpU32no1 < (16 << 14)) {
+ tableIndex = (int16_t)(tmpU32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
@@ -210,8 +210,8 @@ void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
/* FLOAT code
indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
*/
- tableIndex = (int16_t)(tmpU32no1 >> 14);
- if (tableIndex < 16) {
+ if (tmpU32no1 < (16 << 14)) {
+ tableIndex = (int16_t)(tmpU32no1 >> 14);
tmp16no2 = kIndicatorTable[tableIndex];
tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h b/chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h
index f7cde598210..76d5e0edb8f 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/audio_file_processor.h
@@ -16,11 +16,11 @@
#include <memory>
#include <vector>
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_audio/channel_buffer.h"
#include "webrtc/common_audio/wav_file.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h"
@@ -33,9 +33,9 @@ namespace webrtc {
// Holds a few statistics about a series of TickIntervals.
struct TickIntervalStats {
TickIntervalStats() : min(std::numeric_limits<int64_t>::max()) {}
- TickInterval sum;
- TickInterval max;
- TickInterval min;
+ int64_t sum;
+ int64_t max;
+ int64_t min;
};
// Interface for processing an input file with an AudioProcessing instance and
@@ -60,10 +60,10 @@ class AudioFileProcessor {
class ScopedTimer {
public:
explicit ScopedTimer(TickIntervalStats* proc_time)
- : proc_time_(proc_time), start_time_(TickTime::Now()) {}
+ : proc_time_(proc_time), start_time_(rtc::TimeNanos()) {}
~ScopedTimer() {
- TickInterval interval = TickTime::Now() - start_time_;
+ int64_t interval = rtc::TimeNanos() - start_time_;
proc_time_->sum += interval;
proc_time_->max = std::max(proc_time_->max, interval);
proc_time_->min = std::min(proc_time_->min, interval);
@@ -71,7 +71,7 @@ class AudioFileProcessor {
private:
TickIntervalStats* const proc_time_;
- TickTime start_time_;
+ int64_t start_time_;
};
TickIntervalStats* mutable_proc_time() { return &proc_time_; }
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc b/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc
index 41e45bfdc69..33790d837f4 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/audioproc_float.cc
@@ -25,7 +25,6 @@
#include "webrtc/modules/audio_processing/test/audio_file_processor.h"
#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/trace_to_stderr.h"
namespace {
@@ -167,13 +166,14 @@ int main(int argc, char* argv[]) {
if (FLAGS_perf) {
const auto& proc_time = processor->proc_time();
- int64_t exec_time_us = proc_time.sum.Microseconds();
+ int64_t exec_time_us = proc_time.sum / rtc::kNumNanosecsPerMicrosec;
printf(
"\nExecution time: %.3f s, File time: %.2f s\n"
"Time per chunk (mean, max, min):\n%.0f us, %.0f us, %.0f us\n",
exec_time_us * 1e-6, num_chunks * 1.f / kChunksPerSecond,
- exec_time_us * 1.f / num_chunks, 1.f * proc_time.max.Microseconds(),
- 1.f * proc_time.min.Microseconds());
+ exec_time_us * 1.f / num_chunks,
+ 1.f * proc_time.max / rtc::kNumNanosecsPerMicrosec,
+ 1.f * proc_time.min / rtc::kNumNanosecsPerMicrosec);
}
return 0;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc b/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc
index fc127e610ed..fa76747c2be 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_replayer.cc
@@ -187,6 +187,10 @@ void DebugDumpReplayer::MaybeRecreateApm(const audioproc::Config& msg) {
config.Set<ExtendedFilter>(
new ExtendedFilter(msg.aec_extended_filter_enabled()));
+ RTC_CHECK(msg.has_intelligibility_enhancer_enabled());
+ config.Set<Intelligibility>(
+ new Intelligibility(msg.intelligibility_enhancer_enabled()));
+
// We only create APM once, since changes on these fields should not
// happen in current implementation.
if (!apm_.get()) {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc b/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc
index 3acb69444d9..64d659ea500 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/debug_dump_test.cc
@@ -10,6 +10,7 @@
#include <stddef.h> // size_t
+#include <memory>
#include <string>
#include <vector>
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc b/chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc
index 185bc142d50..527e0a1e3e5 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/process_test.cc
@@ -19,13 +19,13 @@
#include <memory>
#include "webrtc/base/format_macros.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/test/protobuf_utils.h"
#include "webrtc/modules/audio_processing/test/test_utils.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/perf_test.h"
#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
@@ -562,7 +562,7 @@ void void_main(int argc, char* argv[]) {
int reverse_count = 0;
int primary_count = 0;
int near_read_bytes = 0;
- TickInterval acc_ticks;
+ int64_t acc_nanos = 0;
AudioFrame far_frame;
AudioFrame near_frame;
@@ -573,8 +573,8 @@ void void_main(int argc, char* argv[]) {
int8_t stream_has_voice = 0;
float ns_speech_prob = 0.0f;
- TickTime t0 = TickTime::Now();
- TickTime t1 = t0;
+ int64_t t0 = rtc::TimeNanos();
+ int64_t t1 = t0;
int64_t max_time_us = 0;
int64_t max_time_reverse_us = 0;
int64_t min_time_us = 1e6;
@@ -676,7 +676,7 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t0 = TickTime::Now();
+ t0 = rtc::TimeNanos();
}
if (msg.has_data()) {
@@ -692,14 +692,15 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t1 = TickTime::Now();
- TickInterval tick_diff = t1 - t0;
- acc_ticks += tick_diff;
- if (tick_diff.Microseconds() > max_time_reverse_us) {
- max_time_reverse_us = tick_diff.Microseconds();
+ t1 = rtc::TimeNanos();
+ int64_t diff_nanos = t1 - t0;
+ acc_nanos += diff_nanos;
+ int64_t diff_us = diff_nanos / rtc::kNumNanosecsPerMicrosec;
+ if (diff_us > max_time_reverse_us) {
+ max_time_reverse_us = diff_us;
}
- if (tick_diff.Microseconds() < min_time_reverse_us) {
- min_time_reverse_us = tick_diff.Microseconds();
+ if (diff_us < min_time_reverse_us) {
+ min_time_reverse_us = diff_us;
}
}
@@ -737,7 +738,7 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t0 = TickTime::Now();
+ t0 = rtc::TimeNanos();
}
ASSERT_EQ(apm->kNoError,
@@ -795,14 +796,15 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t1 = TickTime::Now();
- TickInterval tick_diff = t1 - t0;
- acc_ticks += tick_diff;
- if (tick_diff.Microseconds() > max_time_us) {
- max_time_us = tick_diff.Microseconds();
+ t1 = rtc::TimeNanos();
+ int64_t diff_nanos = t1 - t0;
+ acc_nanos += diff_nanos;
+ int64_t diff_us = diff_nanos / rtc::kNumNanosecsPerMicrosec;
+ if (diff_us > max_time_us) {
+ max_time_us = diff_us;
}
- if (tick_diff.Microseconds() < min_time_us) {
- min_time_us = tick_diff.Microseconds();
+ if (diff_us < min_time_us) {
+ min_time_us = diff_us;
}
}
@@ -925,21 +927,22 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t0 = TickTime::Now();
+ t0 = rtc::TimeNanos();
}
ASSERT_EQ(apm->kNoError,
apm->ProcessReverseStream(&far_frame));
if (perf_testing) {
- t1 = TickTime::Now();
- TickInterval tick_diff = t1 - t0;
- acc_ticks += tick_diff;
- if (tick_diff.Microseconds() > max_time_reverse_us) {
- max_time_reverse_us = tick_diff.Microseconds();
+ t1 = rtc::TimeNanos();
+ int64_t diff_nanos = t1 - t0;
+ acc_nanos += diff_nanos;
+ int64_t diff_us = diff_nanos / rtc::kNumNanosecsPerMicrosec;
+ if (diff_us > max_time_reverse_us) {
+ max_time_reverse_us = diff_us;
}
- if (tick_diff.Microseconds() < min_time_reverse_us) {
- min_time_reverse_us = tick_diff.Microseconds();
+ if (diff_us < min_time_reverse_us) {
+ min_time_reverse_us = diff_us;
}
}
@@ -982,7 +985,7 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t0 = TickTime::Now();
+ t0 = rtc::TimeNanos();
}
const int capture_level_in = capture_level;
@@ -1030,14 +1033,15 @@ void void_main(int argc, char* argv[]) {
}
if (perf_testing) {
- t1 = TickTime::Now();
- TickInterval tick_diff = t1 - t0;
- acc_ticks += tick_diff;
- if (tick_diff.Microseconds() > max_time_us) {
- max_time_us = tick_diff.Microseconds();
+ t1 = rtc::TimeNanos();
+ int64_t diff_nanos = t1 - t0;
+ acc_nanos += diff_nanos;
+ int64_t diff_us = diff_nanos / rtc::kNumNanosecsPerMicrosec;
+ if (diff_us > max_time_us) {
+ max_time_us = diff_us;
}
- if (tick_diff.Microseconds() < min_time_us) {
- min_time_us = tick_diff.Microseconds();
+ if (diff_us < min_time_us) {
+ min_time_us = diff_us;
}
}
@@ -1130,7 +1134,7 @@ void void_main(int argc, char* argv[]) {
if (perf_testing) {
if (primary_count > 0) {
- int64_t exec_time = acc_ticks.Milliseconds();
+ int64_t exec_time = acc_nanos / rtc::kNumNanosecsPerMillisec;
printf("\nTotal time: %.3f s, file time: %.2f s\n",
exec_time * 0.001, primary_count * 0.01);
printf("Time per frame: %.3f ms (average), %.3f ms (max),"
diff --git a/chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc b/chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc
index fbb8e85fee7..f5c0700b3f8 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/test/unpack.cc
@@ -252,6 +252,7 @@ int do_main(int argc, char* argv[]) {
PRINT_CONFIG(ns_enabled);
PRINT_CONFIG(ns_level);
PRINT_CONFIG(transient_suppression_enabled);
+ PRINT_CONFIG(intelligibility_enhancer_enabled);
if (msg.has_experiments_description()) {
fprintf(settings_file, " experiments_description: %s\n",
msg.experiments_description().c_str());
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.cc
index 02df75a1010..56bdde890c9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.cc
@@ -13,6 +13,7 @@
#include <assert.h>
#include <stdlib.h>
#include <string.h>
+#include <algorithm>
// Number of right shifts for scaling is linearly depending on number of bits in
// the far-end binary spectrum.
@@ -276,7 +277,8 @@ BinaryDelayEstimatorFarend* WebRtc_CreateBinaryDelayEstimatorFarend(
if (history_size > 1) {
// Sanity conditions fulfilled.
- self = malloc(sizeof(BinaryDelayEstimatorFarend));
+ self = static_cast<BinaryDelayEstimatorFarend*>(
+ malloc(sizeof(BinaryDelayEstimatorFarend)));
}
if (self == NULL) {
return NULL;
@@ -296,11 +298,12 @@ int WebRtc_AllocateFarendBufferMemory(BinaryDelayEstimatorFarend* self,
int history_size) {
assert(self != NULL);
// (Re-)Allocate memory for history buffers.
- self->binary_far_history =
+ self->binary_far_history = static_cast<uint32_t*>(
realloc(self->binary_far_history,
- history_size * sizeof(*self->binary_far_history));
- self->far_bit_counts = realloc(self->far_bit_counts,
- history_size * sizeof(*self->far_bit_counts));
+ history_size * sizeof(*self->binary_far_history)));
+ self->far_bit_counts = static_cast<int*>(
+ realloc(self->far_bit_counts,
+ history_size * sizeof(*self->far_bit_counts)));
if ((self->binary_far_history == NULL) || (self->far_bit_counts == NULL)) {
history_size = 0;
}
@@ -404,7 +407,8 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
if ((farend != NULL) && (max_lookahead >= 0)) {
// Sanity conditions fulfilled.
- self = malloc(sizeof(BinaryDelayEstimator));
+ self = static_cast<BinaryDelayEstimator*>(
+ malloc(sizeof(BinaryDelayEstimator)));
}
if (self == NULL) {
return NULL;
@@ -422,8 +426,8 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
self->mean_bit_counts = NULL;
self->bit_counts = NULL;
self->histogram = NULL;
- self->binary_near_history =
- malloc((max_lookahead + 1) * sizeof(*self->binary_near_history));
+ self->binary_near_history = static_cast<uint32_t*>(
+ malloc((max_lookahead + 1) * sizeof(*self->binary_near_history)));
if (self->binary_near_history == NULL ||
WebRtc_AllocateHistoryBufferMemory(self, farend->history_size) == 0) {
WebRtc_FreeBinaryDelayEstimator(self);
@@ -444,13 +448,13 @@ int WebRtc_AllocateHistoryBufferMemory(BinaryDelayEstimator* self,
// The extra array element in |mean_bit_counts| and |histogram| is a dummy
// element only used while |last_delay| == -2, i.e., before we have a valid
// estimate.
- self->mean_bit_counts =
+ self->mean_bit_counts = static_cast<int32_t*>(
realloc(self->mean_bit_counts,
- (history_size + 1) * sizeof(*self->mean_bit_counts));
- self->bit_counts =
- realloc(self->bit_counts, history_size * sizeof(*self->bit_counts));
- self->histogram =
- realloc(self->histogram, (history_size + 1) * sizeof(*self->histogram));
+ (history_size + 1) * sizeof(*self->mean_bit_counts)));
+ self->bit_counts = static_cast<int32_t*>(
+ realloc(self->bit_counts, history_size * sizeof(*self->bit_counts)));
+ self->histogram = static_cast<float*>(
+ realloc(self->histogram, (history_size + 1) * sizeof(*self->histogram)));
if ((self->mean_bit_counts == NULL) ||
(self->bit_counts == NULL) ||
@@ -616,13 +620,10 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
(value_best_candidate < self->last_delay_probability)));
// Check for nonstationary farend signal.
- int non_stationary_farend = 0;
- for (i = 0; i < self->history_size; ++i) {
- if (self->farend->far_bit_counts[i] > 0) {
- non_stationary_farend = 1;
- break;
- }
- }
+ const bool non_stationary_farend =
+ std::any_of(self->farend->far_bit_counts,
+ self->farend->far_bit_counts + self->history_size,
+ [](int a) { return a > 0; });
if (non_stationary_farend) {
// Only update the validation statistics when the farend is nonstationary
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
index 4ebe0e61289..3e46763a6a2 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
@@ -10,11 +10,9 @@
#include "testing/gtest/include/gtest/gtest.h"
-extern "C" {
#include "webrtc/modules/audio_processing/utility/delay_estimator.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_internal.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h"
-}
#include "webrtc/typedefs.h"
namespace {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc
index b5448bc5bde..75c7abea776 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.cc
@@ -16,7 +16,6 @@
#include "webrtc/modules/audio_processing/utility/delay_estimator.h"
#include "webrtc/modules/audio_processing/utility/delay_estimator_internal.h"
-#include "webrtc/system_wrappers/include/compile_assert_c.h"
// Only bit |kBandFirst| through bit |kBandLast| are processed and
// |kBandFirst| - |kBandLast| must be < 32.
@@ -144,10 +143,11 @@ void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size) {
// Check if the sub band used in the delay estimation is small enough to fit
// the binary spectra in a uint32_t.
- COMPILE_ASSERT(kBandLast - kBandFirst < 32);
+ static_assert(kBandLast - kBandFirst < 32, "");
if (spectrum_size >= kBandLast) {
- self = malloc(sizeof(DelayEstimatorFarend));
+ self = static_cast<DelayEstimatorFarend*>(
+ malloc(sizeof(DelayEstimatorFarend)));
}
if (self != NULL) {
@@ -158,7 +158,8 @@ void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size) {
memory_fail |= (self->binary_farend == NULL);
// Allocate memory for spectrum buffers.
- self->mean_far_spectrum = malloc(spectrum_size * sizeof(SpectrumType));
+ self->mean_far_spectrum =
+ static_cast<SpectrumType*>(malloc(spectrum_size * sizeof(SpectrumType)));
memory_fail |= (self->mean_far_spectrum == NULL);
self->spectrum_size = spectrum_size;
@@ -275,7 +276,7 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead) {
DelayEstimatorFarend* farend = (DelayEstimatorFarend*) farend_handle;
if (farend_handle != NULL) {
- self = malloc(sizeof(DelayEstimator));
+ self = static_cast<DelayEstimator*>(malloc(sizeof(DelayEstimator)));
}
if (self != NULL) {
@@ -287,8 +288,8 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead) {
memory_fail |= (self->binary_handle == NULL);
// Allocate memory for spectrum buffers.
- self->mean_near_spectrum = malloc(farend->spectrum_size *
- sizeof(SpectrumType));
+ self->mean_near_spectrum = static_cast<SpectrumType*>(
+ malloc(farend->spectrum_size * sizeof(SpectrumType)));
memory_fail |= (self->mean_near_spectrum == NULL);
self->spectrum_size = farend->spectrum_size;
@@ -328,7 +329,7 @@ int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift) {
}
int WebRtc_set_history_size(void* handle, int history_size) {
- DelayEstimator* self = handle;
+ DelayEstimator* self = static_cast<DelayEstimator*>(handle);
if ((self == NULL) || (history_size <= 1)) {
return -1;
@@ -337,7 +338,7 @@ int WebRtc_set_history_size(void* handle, int history_size) {
}
int WebRtc_history_size(const void* handle) {
- const DelayEstimator* self = handle;
+ const DelayEstimator* self = static_cast<const DelayEstimator*>(handle);
if (self == NULL) {
return -1;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
index 674a5197a87..5a0d37c2747 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/audio_processing/voice_detection_impl.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/vad/include/webrtc_vad.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
index bf608789d5d..09652d84194 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
@@ -83,6 +83,10 @@ BitrateController* BitrateController::CreateBitrateController(
return new BitrateControllerImpl(clock, observer);
}
+BitrateController* BitrateController::CreateBitrateController(Clock* clock) {
+ return new BitrateControllerImpl(clock, nullptr);
+}
+
BitrateControllerImpl::BitrateControllerImpl(Clock* clock,
BitrateObserver* observer)
: clock_(clock),
@@ -94,8 +98,8 @@ BitrateControllerImpl::BitrateControllerImpl(Clock* clock,
last_fraction_loss_(0),
last_rtt_ms_(0),
last_reserved_bitrate_bps_(0) {
- // This calls the observer_, which means that the observer provided by the
- // user must be ready to accept a bitrate update when it constructs the
+ // This calls the observer_ if set, which means that the observer provided by
+ // the user must be ready to accept a bitrate update when it constructs the
// controller. We do this to avoid having to keep synchronized initial values
// in both the controller and the allocator.
MaybeTriggerOnNetworkChanged();
@@ -122,6 +126,18 @@ void BitrateControllerImpl::SetMinMaxBitrate(int min_bitrate_bps,
MaybeTriggerOnNetworkChanged();
}
+void BitrateControllerImpl::SetBitrates(int start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps) {
+ {
+ rtc::CritScope cs(&critsect_);
+ bandwidth_estimation_.SetBitrates(start_bitrate_bps,
+ min_bitrate_bps,
+ max_bitrate_bps);
+ }
+ MaybeTriggerOnNetworkChanged();
+}
+
void BitrateControllerImpl::SetReservedBitrate(uint32_t reserved_bitrate_bps) {
{
rtc::CritScope cs(&critsect_);
@@ -187,11 +203,15 @@ void BitrateControllerImpl::OnReceivedRtcpReceiverReport(
}
void BitrateControllerImpl::MaybeTriggerOnNetworkChanged() {
- uint32_t bitrate;
+ if (!observer_)
+ return;
+
+ uint32_t bitrate_bps;
uint8_t fraction_loss;
int64_t rtt;
- if (GetNetworkParameters(&bitrate, &fraction_loss, &rtt))
- observer_->OnNetworkChanged(bitrate, fraction_loss, rtt);
+
+ if (GetNetworkParameters(&bitrate_bps, &fraction_loss, &rtt))
+ observer_->OnNetworkChanged(bitrate_bps, fraction_loss, rtt);
}
bool BitrateControllerImpl::GetNetworkParameters(uint32_t* bitrate,
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
index 6f776d758b4..5a61379ce01 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
@@ -20,6 +20,7 @@
#include <list>
#include <utility>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h"
@@ -27,6 +28,8 @@ namespace webrtc {
class BitrateControllerImpl : public BitrateController {
public:
+ // TODO(perkj): BitrateObserver has been deprecated and is not used in WebRTC.
+ // |observer| is left for project that is not yet updated.
BitrateControllerImpl(Clock* clock, BitrateObserver* observer);
virtual ~BitrateControllerImpl() {}
@@ -34,15 +37,26 @@ class BitrateControllerImpl : public BitrateController {
RtcpBandwidthObserver* CreateRtcpBandwidthObserver() override;
+ // Deprecated
void SetStartBitrate(int start_bitrate_bps) override;
+ // Deprecated
void SetMinMaxBitrate(int min_bitrate_bps, int max_bitrate_bps) override;
+ void SetBitrates(int start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps) override;
+
void UpdateDelayBasedEstimate(uint32_t bitrate_bps) override;
void SetReservedBitrate(uint32_t reserved_bitrate_bps) override;
void SetEventLog(RtcEventLog* event_log) override;
+ // Returns true if the parameters have changed since the last call.
+ bool GetNetworkParameters(uint32_t* bitrate,
+ uint8_t* fraction_loss,
+ int64_t* rtt) override;
+
int64_t TimeUntilNextProcess() override;
void Process() override;
@@ -57,20 +71,16 @@ class BitrateControllerImpl : public BitrateController {
int number_of_packets,
int64_t now_ms);
+ // Deprecated
void MaybeTriggerOnNetworkChanged();
- // Returns true if the parameters have changed since the last call.
- bool GetNetworkParameters(uint32_t* bitrate,
- uint8_t* fraction_loss,
- int64_t* rtt);
-
void OnNetworkChanged(uint32_t bitrate,
uint8_t fraction_loss, // 0 - 255.
int64_t rtt) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
// Used by process thread.
- Clock* clock_;
- BitrateObserver* observer_;
+ Clock* const clock_;
+ BitrateObserver* const observer_;
int64_t last_bitrate_update_ms_;
rtc::CriticalSection critsect_;
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
index 3f467ef8a46..4f92a3884b5 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
@@ -14,11 +14,16 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
+#include "webrtc/modules/pacing/mock/mock_paced_sender.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-using webrtc::RtcpBandwidthObserver;
-using webrtc::BitrateObserver;
+using ::testing::Exactly;
+using ::testing::Return;
+
using webrtc::BitrateController;
+using webrtc::BitrateObserver;
+using webrtc::PacedSender;
+using webrtc::RtcpBandwidthObserver;
uint8_t WeightedLoss(int num_packets1, uint8_t fraction_loss1,
int num_packets2, uint8_t fraction_loss2) {
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h b/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
index a9c247acf17..a61cf6a7a74 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
@@ -18,6 +18,7 @@
#include <map>
#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
namespace webrtc {
@@ -26,6 +27,8 @@ class CriticalSectionWrapper;
class RtcEventLog;
struct PacketInfo;
+// Deprecated
+// TODO(perkj): Remove BitrateObserver when no implementations use it.
class BitrateObserver {
// Observer class for bitrate changes announced due to change in bandwidth
// estimate or due to bitrate allocation changes. Fraction loss and rtt is
@@ -46,16 +49,26 @@ class BitrateController : public Module {
// estimation and divide the available bitrate between all its registered
// BitrateObservers.
public:
- static const int kDefaultStartBitrateKbps = 300;
+ static const int kDefaultStartBitratebps = 300000;
+ // Deprecated:
+ // TODO(perkj): BitrateObserver has been deprecated and is not used in WebRTC.
+ // Remove this method once other other projects does not use it.
static BitrateController* CreateBitrateController(Clock* clock,
BitrateObserver* observer);
+ static BitrateController* CreateBitrateController(Clock* clock);
+
virtual ~BitrateController() {}
virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() = 0;
+ // Deprecated
virtual void SetStartBitrate(int start_bitrate_bps) = 0;
+ // Deprecated
virtual void SetMinMaxBitrate(int min_bitrate_bps, int max_bitrate_bps) = 0;
+ virtual void SetBitrates(int start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps) = 0;
virtual void UpdateDelayBasedEstimate(uint32_t bitrate_bps) = 0;
@@ -66,6 +79,10 @@ class BitrateController : public Module {
virtual bool AvailableBandwidth(uint32_t* bandwidth) const = 0;
virtual void SetReservedBitrate(uint32_t reserved_bitrate_bps) = 0;
+
+ virtual bool GetNetworkParameters(uint32_t* bitrate,
+ uint8_t* fraction_loss,
+ int64_t* rtt) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h b/chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h
index 45b596a8688..da6169e748e 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/include/mock/mock_bitrate_controller.h
@@ -31,10 +31,16 @@ class MockBitrateController : public BitrateController {
MOCK_METHOD1(SetStartBitrate, void(int start_bitrate_bps));
MOCK_METHOD2(SetMinMaxBitrate,
void(int min_bitrate_bps, int max_bitrate_bps));
+ MOCK_METHOD3(SetBitrates,
+ void(int start_bitrate_bps,
+ int min_bitrate_bps,
+ int max_bitrate_bps));
MOCK_METHOD1(UpdateDelayBasedEstimate, void(uint32_t bitrate_bps));
MOCK_METHOD1(SetEventLog, void(RtcEventLog* event_log));
MOCK_CONST_METHOD1(AvailableBandwidth, bool(uint32_t* bandwidth));
MOCK_METHOD1(SetReservedBitrate, void(uint32_t reserved_bitrate_bps));
+ MOCK_METHOD3(GetNetworkParameters,
+ bool(uint32_t* bitrate, uint8_t* fraction_loss, int64_t* rtt));
MOCK_METHOD0(Process, void());
MOCK_METHOD0(TimeUntilNextProcess, int64_t());
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
index 785267d8c97..a1b78a257c6 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
@@ -67,6 +67,14 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation()
SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {}
+void SendSideBandwidthEstimation::SetBitrates(int send_bitrate,
+ int min_bitrate,
+ int max_bitrate) {
+ if (send_bitrate > 0)
+ SetSendBitrate(send_bitrate);
+ SetMinMaxBitrate(min_bitrate, max_bitrate);
+}
+
void SendSideBandwidthEstimation::SetSendBitrate(int bitrate) {
RTC_DCHECK_GT(bitrate, 0);
bitrate_ = bitrate;
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
index 15894f93954..402d22a6bf4 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
@@ -46,6 +46,9 @@ class SendSideBandwidthEstimation {
int number_of_packets,
int64_t now_ms);
+ void SetBitrates(int send_bitrate,
+ int min_bitrate,
+ int max_bitrate);
void SetSendBitrate(int bitrate);
void SetMinMaxBitrate(int min_bitrate, int max_bitrate);
int GetMinBitrate() const;
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc b/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc
index 11ce46adf3a..9f95fc3a1f9 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc
+++ b/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller.cc
@@ -15,11 +15,11 @@
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/socket.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/include/send_time_history.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h"
@@ -47,11 +47,10 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) override {
+ const RTPHeader& header) override {
CriticalSectionScoped cs(crit_sect_.get());
PickEstimatorFromHeader(header);
- rbe_->IncomingPacket(arrival_time_ms, payload_size, header, was_paced);
+ rbe_->IncomingPacket(arrival_time_ms, payload_size, header);
}
void Process() override {
@@ -80,7 +79,7 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
return rbe_->LatestEstimate(ssrcs, bitrate_bps);
}
- void SetMinBitrate(int min_bitrate_bps) {
+ void SetMinBitrate(int min_bitrate_bps) override {
CriticalSectionScoped cs(crit_sect_.get());
rbe_->SetMinBitrate(min_bitrate_bps);
min_bitrate_bps_ = min_bitrate_bps;
@@ -115,7 +114,7 @@ class WrappingBitrateEstimator : public RemoteBitrateEstimator {
// Instantiate RBE for Time Offset or Absolute Send Time extensions.
void PickEstimator() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_.get()) {
if (using_absolute_send_time_) {
- rbe_.reset(new RemoteBitrateEstimatorAbsSendTime(observer_, clock_));
+ rbe_.reset(new RemoteBitrateEstimatorAbsSendTime(observer_));
} else {
rbe_.reset(new RemoteBitrateEstimatorSingleStream(observer_, clock_));
}
@@ -140,31 +139,78 @@ CongestionController::CongestionController(
BitrateObserver* bitrate_observer,
RemoteBitrateObserver* remote_bitrate_observer)
: clock_(clock),
- pacer_(new PacedSender(clock_,
- &packet_router_,
- BitrateController::kDefaultStartBitrateKbps,
- PacedSender::kDefaultPaceMultiplier *
- BitrateController::kDefaultStartBitrateKbps,
- 0)),
+ observer_(nullptr),
+ packet_router_(new PacketRouter()),
+ pacer_(new PacedSender(clock_, packet_router_.get())),
remote_bitrate_estimator_(
new WrappingBitrateEstimator(remote_bitrate_observer, clock_)),
- // Constructed last as this object calls the provided callback on
- // construction.
bitrate_controller_(
BitrateController::CreateBitrateController(clock_, bitrate_observer)),
- remote_estimator_proxy_(clock_, &packet_router_),
+ remote_estimator_proxy_(clock_, packet_router_.get()),
+ transport_feedback_adapter_(bitrate_controller_.get(), clock_),
+ min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps),
+ last_reported_bitrate_bps_(0),
+ last_reported_fraction_loss_(0),
+ last_reported_rtt_(0),
+ network_state_(kNetworkUp) {
+ Init();
+}
+
+CongestionController::CongestionController(
+ Clock* clock,
+ Observer* observer,
+ RemoteBitrateObserver* remote_bitrate_observer)
+ : clock_(clock),
+ observer_(observer),
+ packet_router_(new PacketRouter()),
+ pacer_(new PacedSender(clock_, packet_router_.get())),
+ remote_bitrate_estimator_(
+ new WrappingBitrateEstimator(remote_bitrate_observer, clock_)),
+ bitrate_controller_(BitrateController::CreateBitrateController(clock_)),
+ remote_estimator_proxy_(clock_, packet_router_.get()),
+ transport_feedback_adapter_(bitrate_controller_.get(), clock_),
+ min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps),
+ last_reported_bitrate_bps_(0),
+ last_reported_fraction_loss_(0),
+ last_reported_rtt_(0),
+ network_state_(kNetworkUp) {
+ Init();
+}
+
+CongestionController::CongestionController(
+ Clock* clock,
+ Observer* observer,
+ RemoteBitrateObserver* remote_bitrate_observer,
+ std::unique_ptr<PacketRouter> packet_router,
+ std::unique_ptr<PacedSender> pacer)
+ : clock_(clock),
+ observer_(observer),
+ packet_router_(std::move(packet_router)),
+ pacer_(std::move(pacer)),
+ remote_bitrate_estimator_(
+ new WrappingBitrateEstimator(remote_bitrate_observer, clock_)),
+ // Constructed last as this object calls the provided callback on
+ // construction.
+ bitrate_controller_(BitrateController::CreateBitrateController(clock_)),
+ remote_estimator_proxy_(clock_, packet_router_.get()),
transport_feedback_adapter_(bitrate_controller_.get(), clock_),
- min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps) {
+ min_bitrate_bps_(RemoteBitrateEstimator::kDefaultMinBitrateBps),
+ last_reported_bitrate_bps_(0),
+ last_reported_fraction_loss_(0),
+ last_reported_rtt_(0),
+ network_state_(kNetworkUp) {
+ Init();
+}
+
+CongestionController::~CongestionController() {}
+
+void CongestionController::Init() {
transport_feedback_adapter_.SetBitrateEstimator(
- new RemoteBitrateEstimatorAbsSendTime(&transport_feedback_adapter_,
- clock_));
+ new RemoteBitrateEstimatorAbsSendTime(&transport_feedback_adapter_));
transport_feedback_adapter_.GetBitrateEstimator()->SetMinBitrate(
min_bitrate_bps_);
}
-CongestionController::~CongestionController() {
-}
-
void CongestionController::SetBweBitrates(int min_bitrate_bps,
int start_bitrate_bps,
@@ -177,16 +223,19 @@ void CongestionController::SetBweBitrates(int min_bitrate_bps,
min_bitrate_bps = kMinBitrateBps;
if (max_bitrate_bps > 0)
max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps);
- if (start_bitrate_bps > 0) {
+ if (start_bitrate_bps > 0)
start_bitrate_bps = std::max(min_bitrate_bps, start_bitrate_bps);
- bitrate_controller_->SetStartBitrate(start_bitrate_bps);
- }
- bitrate_controller_->SetMinMaxBitrate(min_bitrate_bps, max_bitrate_bps);
+
+ bitrate_controller_->SetBitrates(start_bitrate_bps,
+ min_bitrate_bps,
+ max_bitrate_bps);
+
if (remote_bitrate_estimator_)
remote_bitrate_estimator_->SetMinBitrate(min_bitrate_bps);
min_bitrate_bps_ = min_bitrate_bps;
transport_feedback_adapter_.GetBitrateEstimator()->SetMinBitrate(
min_bitrate_bps_);
+ MaybeTriggerOnNetworkChanged();
}
BitrateController* CongestionController::GetBitrateController() const {
@@ -207,10 +256,9 @@ CongestionController::GetTransportFeedbackObserver() {
return &transport_feedback_adapter_;
}
-void CongestionController::UpdatePacerBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps) {
- pacer_->UpdateBitrate(bitrate_kbps, max_bitrate_kbps, min_bitrate_kbps);
+void CongestionController::SetAllocatedSendBitrate(int allocated_bitrate_bps,
+ int padding_bitrate_bps) {
+ pacer_->SetAllocatedSendBitrate(allocated_bitrate_bps, padding_bitrate_bps);
}
int64_t CongestionController::GetPacerQueuingDelayMs() const {
@@ -223,6 +271,11 @@ void CongestionController::SignalNetworkState(NetworkState state) {
} else {
pacer_->Pause();
}
+ {
+ rtc::CritScope cs(&critsect_);
+ network_state_ = state;
+ }
+ MaybeTriggerOnNetworkChanged();
}
void CongestionController::OnSentPacket(const rtc::SentPacket& sent_packet) {
@@ -243,6 +296,52 @@ int64_t CongestionController::TimeUntilNextProcess() {
void CongestionController::Process() {
bitrate_controller_->Process();
remote_bitrate_estimator_->Process();
+ MaybeTriggerOnNetworkChanged();
+}
+
+void CongestionController::MaybeTriggerOnNetworkChanged() {
+ // TODO(perkj): |observer_| can be nullptr if the ctor that accepts a
+ // BitrateObserver is used. Remove this check once the ctor is removed.
+ if (!observer_)
+ return;
+
+ uint32_t bitrate_bps;
+ uint8_t fraction_loss;
+ int64_t rtt;
+ bool estimate_changed = bitrate_controller_->GetNetworkParameters(
+ &bitrate_bps, &fraction_loss, &rtt);
+ if (estimate_changed)
+ pacer_->SetEstimatedBitrate(bitrate_bps);
+
+ bitrate_bps = IsNetworkDown() || IsSendQueueFull() ? 0 : bitrate_bps;
+
+ if (HasNetworkParametersToReportChanged(bitrate_bps, fraction_loss, rtt)) {
+ observer_->OnNetworkChanged(bitrate_bps, fraction_loss, rtt);
+ }
+}
+
+bool CongestionController::HasNetworkParametersToReportChanged(
+ uint32_t bitrate_bps,
+ uint8_t fraction_loss,
+ int64_t rtt) {
+ rtc::CritScope cs(&critsect_);
+ bool changed =
+ last_reported_bitrate_bps_ != bitrate_bps ||
+ (bitrate_bps > 0 && (last_reported_fraction_loss_ != fraction_loss ||
+ last_reported_rtt_ != rtt));
+ last_reported_bitrate_bps_ = bitrate_bps;
+ last_reported_fraction_loss_ = fraction_loss;
+ last_reported_rtt_ = rtt;
+ return changed;
+}
+
+bool CongestionController::IsSendQueueFull() const {
+ return pacer_->ExpectedQueueTimeMs() > PacedSender::kMaxQueueLengthMs;
+}
+
+bool CongestionController::IsNetworkDown() const {
+ rtc::CritScope cs(&critsect_);
+ return network_state_ == kNetworkDown;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller_unittest.cc b/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller_unittest.cc
new file mode 100644
index 00000000000..c82c75daf3c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/congestion_controller/congestion_controller_unittest.cc
@@ -0,0 +1,157 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/pacing/mock/mock_paced_sender.h"
+#include "webrtc/modules/congestion_controller/include/congestion_controller.h"
+#include "webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+using testing::_;
+using testing::NiceMock;
+using testing::Return;
+using testing::SaveArg;
+using testing::StrictMock;
+
+namespace webrtc {
+namespace test {
+
+class CongestionControllerTest : public ::testing::Test {
+ protected:
+ CongestionControllerTest() : clock_(123456) {}
+ ~CongestionControllerTest() override {}
+
+ void SetUp() override {
+ pacer_ = new NiceMock<MockPacedSender>();
+ std::unique_ptr<PacedSender> pacer(pacer_); // Passes ownership.
+ std::unique_ptr<PacketRouter> packet_router(new PacketRouter());
+ controller_.reset(
+ new CongestionController(&clock_, &observer_, &remote_bitrate_observer_,
+ std::move(packet_router), std::move(pacer)));
+ bandwidth_observer_.reset(
+ controller_->GetBitrateController()->CreateRtcpBandwidthObserver());
+
+ // Set the initial bitrate estimate and expect the |observer| and |pacer_|
+ // to be updated.
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps, _, _));
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps));
+ controller_->SetBweBitrates(0, kInitialBitrateBps, 5 * kInitialBitrateBps);
+ }
+
+ SimulatedClock clock_;
+ StrictMock<MockCongestionObserver> observer_;
+ NiceMock<MockPacedSender>* pacer_;
+ NiceMock<MockRemoteBitrateObserver> remote_bitrate_observer_;
+ std::unique_ptr<RtcpBandwidthObserver> bandwidth_observer_;
+ std::unique_ptr<CongestionController> controller_;
+ const uint32_t kInitialBitrateBps = 60000;
+};
+
+TEST_F(CongestionControllerTest, OnNetworkChanged) {
+ // Test no change.
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps * 2, _, _));
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps * 2));
+ bandwidth_observer_->OnReceivedEstimatedBitrate(kInitialBitrateBps * 2);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps, _, _));
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps));
+ bandwidth_observer_->OnReceivedEstimatedBitrate(kInitialBitrateBps);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+}
+
+TEST_F(CongestionControllerTest, OnSendQueueFull) {
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs + 1));
+
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->Process();
+
+ // Let the pacer not be full next time the controller checks.
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs - 1));
+
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps, _, _));
+ controller_->Process();
+}
+
+TEST_F(CongestionControllerTest, OnSendQueueFullAndEstimateChange) {
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs + 1));
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->Process();
+
+ // Receive new estimate but let the queue still be full.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(kInitialBitrateBps * 2);
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs + 1));
+ // The send pacer should get the new estimate though.
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps * 2));
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+
+ // Let the pacer not be full next time the controller checks.
+ // |OnNetworkChanged| should be called with the new estimate.
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs - 1));
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps * 2, _, _));
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+}
+
+TEST_F(CongestionControllerTest, SignalNetworkState) {
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->SignalNetworkState(kNetworkDown);
+
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps, _, _));
+ controller_->SignalNetworkState(kNetworkUp);
+
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->SignalNetworkState(kNetworkDown);
+}
+
+TEST_F(CongestionControllerTest,
+ SignalNetworkStateAndQueueIsFullAndEstimateChange) {
+ // Send queue is full
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillRepeatedly(Return(PacedSender::kMaxQueueLengthMs + 1));
+ EXPECT_CALL(observer_, OnNetworkChanged(0, _, _));
+ controller_->Process();
+
+ // Queue is full and network is down. Expect no bitrate change.
+ controller_->SignalNetworkState(kNetworkDown);
+ controller_->Process();
+
+ // Queue is full but network is up. Expect no bitrate change.
+ controller_->SignalNetworkState(kNetworkUp);
+ controller_->Process();
+
+ // Receive new estimate but let the queue still be full.
+ EXPECT_CALL(*pacer_, SetEstimatedBitrate(kInitialBitrateBps * 2));
+ bandwidth_observer_->OnReceivedEstimatedBitrate(kInitialBitrateBps * 2);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+
+ // Let the pacer not be full next time the controller checks.
+ EXPECT_CALL(*pacer_, ExpectedQueueTimeMs())
+ .WillOnce(Return(PacedSender::kMaxQueueLengthMs - 1));
+ EXPECT_CALL(observer_, OnNetworkChanged(kInitialBitrateBps * 2, _, _));
+ controller_->Process();
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h b/chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h
index 65bf5be3ec1..da8719d33a7 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h
+++ b/chromium/third_party/webrtc/modules/congestion_controller/include/congestion_controller.h
@@ -11,13 +11,16 @@
#ifndef WEBRTC_MODULES_CONGESTION_CONTROLLER_INCLUDE_CONGESTION_CONTROLLER_H_
#define WEBRTC_MODULES_CONGESTION_CONTROLLER_INCLUDE_CONGESTION_CONTROLLER_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h"
#include "webrtc/modules/include/module.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h"
#include "webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h"
-#include "webrtc/stream.h"
namespace rtc {
struct SentPacket;
@@ -28,7 +31,6 @@ namespace webrtc {
class BitrateController;
class BitrateObserver;
class Clock;
-class PacedSender;
class ProcessThread;
class RemoteBitrateEstimator;
class RemoteBitrateObserver;
@@ -36,9 +38,33 @@ class TransportFeedbackObserver;
class CongestionController : public CallStatsObserver, public Module {
public:
+ // Observer class for bitrate changes announced due to change in bandwidth
+ // estimate or due to that the send pacer is full. Fraction loss and rtt is
+ // also part of this callback to allow the observer to optimize its settings
+ // for different types of network environments. The bitrate does not include
+ // packet headers and is measured in bits per second.
+ class Observer {
+ public:
+ virtual void OnNetworkChanged(uint32_t bitrate_bps,
+ uint8_t fraction_loss, // 0 - 255.
+ int64_t rtt_ms) = 0;
+
+ protected:
+ virtual ~Observer() {}
+ };
+ // Deprecated
+ // TODO(perkj): Remove once no other clients use this ctor.
CongestionController(Clock* clock,
BitrateObserver* bitrate_observer,
RemoteBitrateObserver* remote_bitrate_observer);
+ CongestionController(Clock* clock,
+ Observer* observer,
+ RemoteBitrateObserver* remote_bitrate_observer);
+ CongestionController(Clock* clock,
+ Observer* observer,
+ RemoteBitrateObserver* remote_bitrate_observer,
+ std::unique_ptr<PacketRouter> packet_router,
+ std::unique_ptr<PacedSender> pacer);
virtual ~CongestionController();
virtual void SetBweBitrates(int min_bitrate_bps,
@@ -50,12 +76,11 @@ class CongestionController : public CallStatsObserver, public Module {
bool send_side_bwe);
virtual int64_t GetPacerQueuingDelayMs() const;
virtual PacedSender* pacer() { return pacer_.get(); }
- virtual PacketRouter* packet_router() { return &packet_router_; }
+ virtual PacketRouter* packet_router() { return packet_router_.get(); }
virtual TransportFeedbackObserver* GetTransportFeedbackObserver();
- virtual void UpdatePacerBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps);
+ void SetAllocatedSendBitrate(int allocated_bitrate_bps,
+ int padding_bitrate_bps);
virtual void OnSentPacket(const rtc::SentPacket& sent_packet);
@@ -67,14 +92,28 @@ class CongestionController : public CallStatsObserver, public Module {
void Process() override;
private:
+ void Init();
+ void MaybeTriggerOnNetworkChanged();
+
+ bool IsSendQueueFull() const;
+ bool IsNetworkDown() const;
+ bool HasNetworkParametersToReportChanged(uint32_t bitrate_bps,
+ uint8_t fraction_loss,
+ int64_t rtt);
Clock* const clock_;
- const rtc::scoped_ptr<PacedSender> pacer_;
- const rtc::scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
- const rtc::scoped_ptr<BitrateController> bitrate_controller_;
- PacketRouter packet_router_;
+ Observer* const observer_;
+ const std::unique_ptr<PacketRouter> packet_router_;
+ const std::unique_ptr<PacedSender> pacer_;
+ const std::unique_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+ const std::unique_ptr<BitrateController> bitrate_controller_;
RemoteEstimatorProxy remote_estimator_proxy_;
TransportFeedbackAdapter transport_feedback_adapter_;
int min_bitrate_bps_;
+ rtc::CriticalSection critsect_;
+ uint32_t last_reported_bitrate_bps_ GUARDED_BY(critsect_);
+ uint8_t last_reported_fraction_loss_ GUARDED_BY(critsect_);
+ int64_t last_reported_rtt_ GUARDED_BY(critsect_);
+ NetworkState network_state_ GUARDED_BY(critsect_);
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(CongestionController);
};
diff --git a/chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h b/chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h
index 0813c3d42cd..20955ea81a8 100644
--- a/chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h
+++ b/chromium/third_party/webrtc/modules/congestion_controller/include/mock/mock_congestion_controller.h
@@ -12,20 +12,27 @@
#define WEBRTC_MODULES_CONGESTION_CONTROLLER_INCLUDE_MOCK_MOCK_CONGESTION_CONTROLLER_H_
#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/socket.h"
#include "webrtc/modules/congestion_controller/include/congestion_controller.h"
namespace webrtc {
namespace test {
+class MockCongestionObserver : public CongestionController::Observer {
+ public:
+ MOCK_METHOD3(OnNetworkChanged,
+ void(uint32_t bitrate_bps,
+ uint8_t fraction_loss,
+ int64_t rtt_ms));
+};
+
class MockCongestionController : public CongestionController {
public:
MockCongestionController(Clock* clock,
- BitrateObserver* bitrate_observer,
+ Observer* observer,
RemoteBitrateObserver* remote_bitrate_observer)
- : CongestionController(clock,
- bitrate_observer,
- remote_bitrate_observer) {}
+ : CongestionController(clock, observer, remote_bitrate_observer) {}
MOCK_METHOD3(SetBweBitrates,
void(int min_bitrate_bps,
int start_bitrate_bps,
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn b/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
index aa33993192a..894d9308e36 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
@@ -60,9 +60,7 @@ source_set("desktop_capture") {
"mouse_cursor_monitor.h",
"mouse_cursor_monitor_mac.mm",
"mouse_cursor_monitor_win.cc",
- "screen_capture_frame_queue.cc",
"screen_capture_frame_queue.h",
- "screen_capturer.cc",
"screen_capturer.h",
"screen_capturer_helper.cc",
"screen_capturer_helper.h",
@@ -87,7 +85,6 @@ source_set("desktop_capture") {
"win/screen_capturer_win_magnifier.h",
"win/window_capture_utils.cc",
"win/window_capture_utils.h",
- "window_capturer.cc",
"window_capturer.h",
"window_capturer_mac.mm",
"window_capturer_win.cc",
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc b/chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc
index f57fc572b65..733fe9b2adf 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/cropped_desktop_frame.cc
@@ -12,6 +12,8 @@
#include "webrtc/modules/desktop_capture/cropped_desktop_frame.h"
+#include "webrtc/base/constructormagic.h"
+
namespace webrtc {
// A DesktopFrame that is a sub-rect of another DesktopFrame.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc b/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc
index 0dd564f1705..cbe7d96e5d6 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.cc
@@ -32,7 +32,7 @@ void CroppingWindowCapturer::Start(DesktopCapturer::Callback* callback) {
}
void CroppingWindowCapturer::SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
window_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory));
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h
index 177b5443a31..dfeb447e449 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h
@@ -13,7 +13,6 @@
#include <memory>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/screen_capturer.h"
#include "webrtc/modules/desktop_capture/window_capturer.h"
@@ -32,7 +31,7 @@ class CroppingWindowCapturer : public WindowCapturer,
// DesktopCapturer implementation.
void Start(DesktopCapturer::Callback* callback) override;
void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) override;
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void Capture(const DesktopRegion& region) override;
void SetExcludedWindow(WindowId window) override;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
index 55afedeabf1..4c6e27e561e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
@@ -12,6 +12,7 @@
#include <string.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
@@ -137,7 +138,7 @@ void DesktopAndCursorComposer::Start(DesktopCapturer::Callback* callback) {
}
void DesktopAndCursorComposer::SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
desktop_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory));
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
index cd0b2cfbb65..dcbe6129e65 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
@@ -13,7 +13,7 @@
#include <memory>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
@@ -36,7 +36,7 @@ class DesktopAndCursorComposer : public DesktopCapturer,
// DesktopCapturer interface.
void Start(DesktopCapturer::Callback* callback) override;
void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) override;
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void Capture(const DesktopRegion& region) override;
void SetExcludedWindow(WindowId window) override;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
index b92447c3497..c4fbabf6b24 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
@@ -56,9 +56,7 @@
"mouse_cursor_monitor_mac.mm",
"mouse_cursor_monitor_win.cc",
"mouse_cursor_monitor_x11.cc",
- "screen_capture_frame_queue.cc",
"screen_capture_frame_queue.h",
- "screen_capturer.cc",
"screen_capturer.h",
"screen_capturer_helper.cc",
"screen_capturer_helper.h",
@@ -84,7 +82,6 @@
"win/screen_capture_utils.h",
"win/window_capture_utils.cc",
"win/window_capture_utils.h",
- "window_capturer.cc",
"window_capturer.h",
"window_capturer_mac.mm",
"window_capturer_win.cc",
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
index 47f78dc3ff4..ba70e015537 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
@@ -13,7 +13,8 @@
#include <stddef.h>
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
#include "webrtc/modules/desktop_capture/shared_memory.h"
@@ -28,11 +29,6 @@ class DesktopCapturer {
// Interface that must be implemented by the DesktopCapturer consumers.
class Callback {
public:
- // Deprecated.
- // TODO(sergeyu): Remove this method once all references to it are removed
- // from chromium.
- virtual SharedMemory* CreateSharedMemory(size_t size) { return nullptr; }
-
// Called after a frame has been captured. Handler must take ownership of
// |frame|. If capture has failed for any reason |frame| is set to NULL
// (e.g. the window has been closed).
@@ -53,7 +49,7 @@ class DesktopCapturer {
// where Capture() is called. It will be destroyed on the same thread. Shared
// memory is currently supported only by some DesktopCapturer implementations.
virtual void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {}
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {}
// Captures next frame. |region| specifies region of the capture target that
// should be fresh in the resulting frame. The frame may also include fresh
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc
index 6bc7b2e38f6..3278ed46dcb 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.cc
@@ -84,8 +84,7 @@ std::unique_ptr<DesktopFrame> SharedMemoryDesktopFrame::Create(
size_t buffer_size =
size.width() * size.height() * DesktopFrame::kBytesPerPixel;
std::unique_ptr<SharedMemory> shared_memory;
- shared_memory = rtc::ScopedToUnique(
- shared_memory_factory->CreateSharedMemory(buffer_size));
+ shared_memory = shared_memory_factory->CreateSharedMemory(buffer_size);
if (!shared_memory)
return nullptr;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h
index 3cd839ca1b8..53091239e40 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame.h
@@ -13,6 +13,7 @@
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
#include "webrtc/modules/desktop_capture/shared_memory.h"
@@ -55,11 +56,6 @@ class DesktopFrame {
int64_t capture_time_ms() const { return capture_time_ms_; }
void set_capture_time_ms(int64_t time_ms) { capture_time_ms_ = time_ms; }
- // Optional shape for the frame. Frames may be shaped e.g. if
- // capturing the contents of a shaped window.
- const DesktopRegion* shape() const { return shape_.get(); }
- void set_shape(DesktopRegion* shape) { shape_.reset(shape); }
-
// Copies pixels from a buffer or another frame. |dest_rect| rect must lay
// within bounds of this frame.
void CopyPixelsFrom(uint8_t* src_buffer, int src_stride,
@@ -89,7 +85,6 @@ class DesktopFrame {
DesktopRegion updated_region_;
DesktopVector dpi_;
int64_t capture_time_ms_;
- std::unique_ptr<DesktopRegion> shape_;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(DesktopFrame);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc
index f139fb5cddf..624b729203b 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.cc
@@ -49,10 +49,8 @@ DesktopFrameWin* DesktopFrameWin::Create(
std::unique_ptr<SharedMemory> shared_memory;
HANDLE section_handle = nullptr;
if (shared_memory_factory) {
- shared_memory = rtc::ScopedToUnique(
- shared_memory_factory->CreateSharedMemory(buffer_size));
- if (shared_memory)
- section_handle = shared_memory->handle();
+ shared_memory = shared_memory_factory->CreateSharedMemory(buffer_size);
+ section_handle = shared_memory->handle();
}
void* data = nullptr;
HBITMAP bitmap = CreateDIBSection(hdc, &bmi, DIB_RGB_COLORS, &data,
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h
index 929d23c0e56..3513e14ffb7 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_frame_win.h
@@ -15,6 +15,7 @@
#include <windows.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc
index bc9972660ad..e130c103811 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.cc
@@ -20,6 +20,9 @@ DesktopRegion::RowSpan::RowSpan(int32_t left, int32_t right)
: left(left), right(right) {
}
+DesktopRegion::Row::Row(const Row&) = default;
+DesktopRegion::Row::Row(Row&&) = default;
+
DesktopRegion::Row::Row(int32_t top, int32_t bottom)
: top(top), bottom(bottom) {
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
index c86da56e173..5278159412a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
@@ -47,6 +47,8 @@ class DesktopRegion {
// Row represents a single row of a region. A row is set of rectangles that
// have the same vertical position.
struct Row {
+ Row(const Row&);
+ Row(Row&&);
Row(int32_t top, int32_t bottom);
~Row();
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/differ.h b/chromium/third_party/webrtc/modules/desktop_capture/differ.h
index c3dcd4b0e1d..9ab059bcaa2 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/differ.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/differ.h
@@ -14,6 +14,7 @@
#include <memory>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
index df4e6a45c0a..543910de289 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
@@ -11,6 +11,7 @@
#include <memory>
#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/differ.h"
#include "webrtc/modules/desktop_capture/differ_block.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
index 2f2dd72a6e5..329beef23d3 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
@@ -16,6 +16,7 @@
#include <memory>
#include <set>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
#include "webrtc/system_wrappers/include/atomic32.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc b/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc
index 2d3c2d90479..451acb3db5e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.cc
@@ -15,6 +15,7 @@
#include <string>
#include "webrtc/base/macutils.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/mac/window_list_utils.h"
#include "webrtc/system_wrappers/include/logging.h"
@@ -141,7 +142,7 @@ bool IsChromeWindow(CGWindowID id) {
} // namespace
FullScreenChromeWindowDetector::FullScreenChromeWindowDetector()
- : ref_count_(0) {}
+ : ref_count_(0), last_update_time_ns_(0) {}
FullScreenChromeWindowDetector::~FullScreenChromeWindowDetector() {}
@@ -161,10 +162,7 @@ CGWindowID FullScreenChromeWindowDetector::FindFullScreenWindow(
if (static_cast<CGWindowID>(it->id) != full_screen_window_id)
continue;
- int64_t time_interval =
- (TickTime::Now() - last_udpate_time_).Milliseconds();
- LOG(LS_WARNING) << "The full-screen window exists in the list, "
- << "which was updated " << time_interval << "ms ago.";
+ LOG(LS_WARNING) << "The full-screen window exists in the list.";
return kCGNullWindowID;
}
@@ -174,7 +172,7 @@ CGWindowID FullScreenChromeWindowDetector::FindFullScreenWindow(
void FullScreenChromeWindowDetector::UpdateWindowListIfNeeded(
CGWindowID original_window) {
if (IsChromeWindow(original_window) &&
- (TickTime::Now() - last_udpate_time_).Milliseconds()
+ (rtc::TimeNanos() - last_update_time_ns_) / rtc::kNumNanosecsPerMillisec
> kUpdateIntervalMs) {
previous_window_list_.clear();
previous_window_list_.swap(current_window_list_);
@@ -186,7 +184,7 @@ void FullScreenChromeWindowDetector::UpdateWindowListIfNeeded(
}
GetWindowList(&current_window_list_);
- last_udpate_time_ = TickTime::Now();
+ last_update_time_ns_ = rtc::TimeNanos();
}
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h
index 4e6008966e6..838966d46eb 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h
@@ -13,9 +13,9 @@
#include <ApplicationServices/ApplicationServices.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/window_capturer.h"
#include "webrtc/system_wrappers/include/atomic32.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -59,7 +59,7 @@ class FullScreenChromeWindowDetector {
// full-screen window exists in the list) if Capture() is called too soon.
WindowCapturer::WindowList current_window_list_;
WindowCapturer::WindowList previous_window_list_;
- TickTime last_udpate_time_;
+ int64_t last_update_time_ns_;
RTC_DISALLOW_COPY_AND_ASSIGN(FullScreenChromeWindowDetector);
};
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
index 204bb00b160..479a39a0217 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
@@ -11,10 +11,12 @@
#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
#include <assert.h>
+#include <string.h>
#include <memory>
#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
#include "webrtc/modules/desktop_capture/win/cursor.h"
#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
@@ -22,6 +24,17 @@
namespace webrtc {
+namespace {
+
+bool IsSameCursorShape(const CURSORINFO& left, const CURSORINFO& right) {
+ // If the cursors are not showing, we do not care the hCursor handle.
+ return left.flags == right.flags &&
+ (left.flags != CURSOR_SHOWING ||
+ left.hCursor == right.hCursor);
+}
+
+} // namespace
+
class MouseCursorMonitorWin : public MouseCursorMonitor {
public:
explicit MouseCursorMonitorWin(HWND window);
@@ -45,7 +58,8 @@ class MouseCursorMonitorWin : public MouseCursorMonitor {
HDC desktop_dc_;
- HCURSOR last_cursor_;
+ // The last CURSORINFO (converted to MouseCursor) we have sent to the client.
+ CURSORINFO last_cursor_;
};
MouseCursorMonitorWin::MouseCursorMonitorWin(HWND window)
@@ -53,8 +67,8 @@ MouseCursorMonitorWin::MouseCursorMonitorWin(HWND window)
screen_(kInvalidScreenId),
callback_(NULL),
mode_(SHAPE_AND_POSITION),
- desktop_dc_(NULL),
- last_cursor_(NULL) {
+ desktop_dc_(NULL) {
+ memset(&last_cursor_, 0, sizeof(CURSORINFO));
}
MouseCursorMonitorWin::MouseCursorMonitorWin(ScreenId screen)
@@ -62,9 +76,9 @@ MouseCursorMonitorWin::MouseCursorMonitorWin(ScreenId screen)
screen_(screen),
callback_(NULL),
mode_(SHAPE_AND_POSITION),
- desktop_dc_(NULL),
- last_cursor_(NULL) {
+ desktop_dc_(NULL) {
assert(screen >= kFullDesktopScreenId);
+ memset(&last_cursor_, 0, sizeof(CURSORINFO));
}
MouseCursorMonitorWin::~MouseCursorMonitorWin() {
@@ -92,13 +106,31 @@ void MouseCursorMonitorWin::Capture() {
return;
}
- if (last_cursor_ != cursor_info.hCursor) {
- last_cursor_ = cursor_info.hCursor;
- // Note that |cursor_info.hCursor| does not need to be freed.
- std::unique_ptr<MouseCursor> cursor(
- CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor));
- if (cursor.get())
- callback_->OnMouseCursor(cursor.release());
+ if (!IsSameCursorShape(cursor_info, last_cursor_)) {
+ if (cursor_info.flags == CURSOR_SUPPRESSED) {
+ // The cursor is intentionally hidden now, send an empty bitmap.
+ last_cursor_ = cursor_info;
+ callback_->OnMouseCursor(new MouseCursor(
+ new BasicDesktopFrame(DesktopSize()), DesktopVector()));
+ } else {
+ // According to MSDN https://goo.gl/u6gyuC, HCURSOR instances returned by
+ // functions other than CreateCursor do not need to be actively destroyed.
+ // And CloseHandle function (https://goo.gl/ja5ycW) does not close a
+ // cursor, so assume a HCURSOR does not need to be closed.
+ if (cursor_info.flags == 0) {
+ // Host machine does not have a hardware mouse attached, we will send a
+ // default one instead.
+ // Note, Windows automatically caches cursor resource, so we do not need
+ // to cache the result of LoadCursor.
+ cursor_info.hCursor = LoadCursor(nullptr, IDC_ARROW);
+ }
+ std::unique_ptr<MouseCursor> cursor(
+ CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor));
+ if (cursor) {
+ last_cursor_ = cursor_info;
+ callback_->OnMouseCursor(cursor.release());
+ }
+ }
}
if (mode_ != SHAPE_AND_POSITION)
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h
deleted file mode 100644
index 57120a0b3fa..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_SHAPE_H_
-#define WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_SHAPE_H_
-
-// This file is no longer needed, but some code in chromium still includes it.
-// TODO(sergeyu): Cleanup dependencies in chromium and remove this file.
-
-#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_SHAPE_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
deleted file mode 100644
index 94d8a27b137..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
-
-#include <assert.h>
-#include <algorithm>
-
-#include "webrtc/modules/desktop_capture/desktop_frame.h"
-#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
-#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-ScreenCaptureFrameQueue::ScreenCaptureFrameQueue() : current_(0) {}
-
-ScreenCaptureFrameQueue::~ScreenCaptureFrameQueue() {}
-
-void ScreenCaptureFrameQueue::MoveToNextFrame() {
- current_ = (current_ + 1) % kQueueLength;
-
- // Verify that the frame is not shared, i.e. that consumer has released it
- // before attempting to capture again.
- assert(!frames_[current_].get() || !frames_[current_]->IsShared());
-}
-
-void ScreenCaptureFrameQueue::ReplaceCurrentFrame(DesktopFrame* frame) {
- frames_[current_].reset(SharedDesktopFrame::Wrap(frame));
-}
-
-void ScreenCaptureFrameQueue::Reset() {
- for (int i = 0; i < kQueueLength; ++i)
- frames_[i].reset();
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h
index 21af0f320fc..97f3b810e9d 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.h
@@ -13,12 +13,12 @@
#include <memory>
-#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
-#include "webrtc/typedefs.h"
+#include "webrtc/base/constructormagic.h"
+// TODO(zijiehe): These headers are not used in this file, but to avoid build
+// break in remoting/host. We should add headers in each individual files.
+#include "webrtc/modules/desktop_capture/desktop_frame.h" // Remove
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h" // Remove
-namespace webrtc {
-class DesktopFrame;
-} // namespace webrtc
namespace webrtc {
@@ -35,28 +35,38 @@ namespace webrtc {
// Frame consumer is expected to never hold more than kQueueLength frames
// created by this function and it should release the earliest one before trying
// to capture a new frame (i.e. before MoveToNextFrame() is called).
+template <typename FrameType>
class ScreenCaptureFrameQueue {
public:
- ScreenCaptureFrameQueue();
- ~ScreenCaptureFrameQueue();
+ ScreenCaptureFrameQueue() : current_(0) {}
+ ~ScreenCaptureFrameQueue() = default;
// Moves to the next frame in the queue, moving the 'current' frame to become
// the 'previous' one.
- void MoveToNextFrame();
+ void MoveToNextFrame() {
+ current_ = (current_ + 1) % kQueueLength;
+ }
// Replaces the current frame with a new one allocated by the caller. The
// existing frame (if any) is destroyed. Takes ownership of |frame|.
- void ReplaceCurrentFrame(DesktopFrame* frame);
+ void ReplaceCurrentFrame(FrameType* frame) {
+ frames_[current_].reset(frame);
+ }
// Marks all frames obsolete and resets the previous frame pointer. No
// frames are freed though as the caller can still access them.
- void Reset();
+ void Reset() {
+ for (int i = 0; i < kQueueLength; i++) {
+ frames_[i].reset();
+ }
+ current_ = 0;
+ }
- SharedDesktopFrame* current_frame() const {
+ FrameType* current_frame() const {
return frames_[current_].get();
}
- SharedDesktopFrame* previous_frame() const {
+ FrameType* previous_frame() const {
return frames_[(current_ + kQueueLength - 1) % kQueueLength].get();
}
@@ -65,7 +75,7 @@ class ScreenCaptureFrameQueue {
int current_;
static const int kQueueLength = 2;
- std::unique_ptr<SharedDesktopFrame> frames_[kQueueLength];
+ std::unique_ptr<FrameType> frames_[kQueueLength];
RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCaptureFrameQueue);
};
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.cc
deleted file mode 100644
index 97f69d3baff..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.cc
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/desktop_capture/screen_capturer.h"
-
-#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
-
-namespace webrtc {
-
-ScreenCapturer* ScreenCapturer::Create() {
- return Create(DesktopCaptureOptions::CreateDefault());
-}
-
-#if defined(WEBRTC_LINUX)
-ScreenCapturer* ScreenCapturer::CreateWithXDamage(
- bool use_update_notifications) {
- DesktopCaptureOptions options;
- options.set_use_update_notifications(use_update_notifications);
- return Create(options);
-}
-#elif defined(WEBRTC_WIN)
-ScreenCapturer* ScreenCapturer::CreateWithDisableAero(bool disable_effects) {
- DesktopCaptureOptions options;
- options.set_disable_effects(disable_effects);
- return Create(options);
-}
-#endif
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
index b4e34887662..48ecc31fbd8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
@@ -48,34 +48,10 @@ class ScreenCapturer : public DesktopCapturer {
};
typedef std::vector<Screen> ScreenList;
- // TODO(sergeyu): Remove this class once all dependencies are removed from
- // chromium.
- class MouseShapeObserver {
- };
-
virtual ~ScreenCapturer() {}
- // Creates platform-specific capturer.
- //
- // TODO(sergeyu): Remove all Create() methods except the first one.
- // crbug.com/172183
+ // Creates a platform-specific capturer.
static ScreenCapturer* Create(const DesktopCaptureOptions& options);
- static ScreenCapturer* Create();
-
-#if defined(WEBRTC_LINUX)
- // Creates platform-specific capturer and instructs it whether it should use
- // X DAMAGE support.
- static ScreenCapturer* CreateWithXDamage(bool use_x_damage);
-#elif defined(WEBRTC_WIN)
- // Creates Windows-specific capturer and instructs it whether or not to
- // disable desktop compositing.
- static ScreenCapturer* CreateWithDisableAero(bool disable_aero);
-#endif // defined(WEBRTC_WIN)
-
- // TODO(sergeyu): Remove this method once all dependencies are removed from
- // chromium.
- virtual void SetMouseShapeObserver(
- MouseShapeObserver* mouse_shape_observer) {};
// Get the list of screens (not containing kFullDesktopScreenId). Returns
// false in case of a failure.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h
index f912378333c..458bccc1ed3 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.h
@@ -13,6 +13,7 @@
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
index c41dc4d7a3b..bf6c72950c2 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
@@ -22,7 +22,10 @@
#include <OpenGL/CGLMacro.h>
#include <OpenGL/OpenGL.h>
+#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/macutils.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
@@ -32,8 +35,8 @@
#include "webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -234,7 +237,7 @@ class ScreenCapturerMac : public ScreenCapturer {
ScopedPixelBufferObject pixel_buffer_object_;
// Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
+ ScreenCaptureFrameQueue<SharedDesktopFrame> queue_;
// Current display configuration.
MacDesktopConfiguration desktop_config_;
@@ -381,9 +384,10 @@ void ScreenCapturerMac::Start(Callback* callback) {
}
void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
- TickTime capture_start_time = TickTime::Now();
+ int64_t capture_start_time_nanos = rtc::TimeNanos();
queue_.MoveToNextFrame();
+ RTC_DCHECK(!queue_.current_frame() || !queue_.current_frame()->IsShared());
desktop_config_monitor_->Lock();
MacDesktopConfiguration new_config =
@@ -405,7 +409,7 @@ void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
// Note that we can't reallocate other buffers at this point, since the caller
// may still be reading from them.
if (!queue_.current_frame())
- queue_.ReplaceCurrentFrame(CreateFrame());
+ queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(CreateFrame()));
DesktopFrame* current_frame = queue_.current_frame();
@@ -444,7 +448,8 @@ void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
desktop_config_monitor_->Unlock();
new_frame->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
+ (rtc::TimeNanos() - capture_start_time_nanos) /
+ rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureCompleted(new_frame);
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc
index 64d649cd9d1..815c7f53afe 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac_unittest.cc
@@ -16,6 +16,7 @@
#include <ostream>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
@@ -38,7 +39,10 @@ class ScreenCapturerMacTest : public testing::Test {
void CaptureDoneCallback2(DesktopFrame* frame);
protected:
- void SetUp() override { capturer_.reset(ScreenCapturer::Create()); }
+ void SetUp() override {
+ capturer_.reset(
+ ScreenCapturer::Create(DesktopCaptureOptions::CreateDefault()));
+ }
std::unique_ptr<ScreenCapturer> capturer_;
MockScreenCapturerCallback callback_;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
index b1f64e41c97..7264249e0fb 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_MOCK_OBJECTS_H_
#include "testing/gmock/include/gmock/gmock.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/screen_capturer.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
index 72105acca47..bc87ed3eba8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
@@ -14,6 +14,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
@@ -59,8 +60,8 @@ class FakeSharedMemoryFactory : public SharedMemoryFactory {
FakeSharedMemoryFactory() {}
~FakeSharedMemoryFactory() override {}
- rtc::scoped_ptr<SharedMemory> CreateSharedMemory(size_t size) override {
- return rtc::scoped_ptr<SharedMemory>(
+ std::unique_ptr<SharedMemory> CreateSharedMemory(size_t size) override {
+ return std::unique_ptr<SharedMemory>(
new FakeSharedMemory(new char[size], size));
}
@@ -117,7 +118,7 @@ TEST_F(ScreenCapturerTest, UseSharedBuffers) {
capturer_->Start(&callback_);
capturer_->SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory>(new FakeSharedMemoryFactory()));
+ std::unique_ptr<SharedMemoryFactory>(new FakeSharedMemoryFactory()));
capturer_->Capture(DesktopRegion());
ASSERT_TRUE(frame);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
index 65e682b6f8b..5540e6820fa 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
@@ -21,14 +21,16 @@
#include <X11/Xutil.h>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/differ.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace {
@@ -106,7 +108,7 @@ class ScreenCapturerLinux : public ScreenCapturer,
ScreenCapturerHelper helper_;
// Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
+ ScreenCaptureFrameQueue<SharedDesktopFrame> queue_;
// Invalid region from the previous capture. This is used to synchronize the
// current with the last buffer used.
@@ -234,9 +236,10 @@ void ScreenCapturerLinux::Start(Callback* callback) {
}
void ScreenCapturerLinux::Capture(const DesktopRegion& region) {
- TickTime capture_start_time = TickTime::Now();
+ int64_t capture_start_time_nanos = rtc::TimeNanos();
queue_.MoveToNextFrame();
+ RTC_DCHECK(!queue_.current_frame() || !queue_.current_frame()->IsShared());
// Process XEvents for XDamage and cursor shape tracking.
options_.x_display()->ProcessPendingXEvents();
@@ -256,7 +259,7 @@ void ScreenCapturerLinux::Capture(const DesktopRegion& region) {
if (!queue_.current_frame()) {
std::unique_ptr<DesktopFrame> frame(
new BasicDesktopFrame(x_server_pixel_buffer_.window_size()));
- queue_.ReplaceCurrentFrame(frame.release());
+ queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(frame.release()));
}
// Refresh the Differ helper used by CaptureFrame(), if needed.
@@ -274,7 +277,8 @@ void ScreenCapturerLinux::Capture(const DesktopRegion& region) {
DesktopFrame* result = CaptureScreen();
last_invalid_region_ = result->updated_region();
result->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
+ (rtc::TimeNanos() - capture_start_time_nanos) /
+ rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureCompleted(result);
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc b/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc
index 309bac55add..8d10827e29c 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.cc
@@ -12,6 +12,7 @@
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/include/atomic32.h"
namespace webrtc {
@@ -48,8 +49,7 @@ class SharedDesktopFrame::Core {
SharedDesktopFrame::~SharedDesktopFrame() {}
// static
-SharedDesktopFrame* SharedDesktopFrame::Wrap(
- DesktopFrame* desktop_frame) {
+SharedDesktopFrame* SharedDesktopFrame::Wrap(DesktopFrame* desktop_frame) {
rtc::scoped_refptr<Core> core(new Core(desktop_frame));
return new SharedDesktopFrame(core);
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h b/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h
index 7d18db153cd..4f6a2bb7c69 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_SHARED_DESKTOP_FRAME_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_SHARED_DESKTOP_FRAME_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h b/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
index 45f531e0d53..6e15f23f6b8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
@@ -17,8 +17,9 @@
#include <windows.h>
#endif
+#include <memory>
+
#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -69,7 +70,7 @@ class SharedMemoryFactory {
SharedMemoryFactory() {}
virtual ~SharedMemoryFactory() {}
- virtual rtc::scoped_ptr<SharedMemory> CreateSharedMemory(size_t size) = 0;
+ virtual std::unique_ptr<SharedMemory> CreateSharedMemory(size_t size) = 0;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(SharedMemoryFactory);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
index d3035a15ca5..9df2e5fc9b2 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
@@ -14,6 +14,8 @@
#include <utility>
+#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
@@ -24,7 +26,6 @@
#include "webrtc/modules/desktop_capture/win/desktop.h"
#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -36,22 +37,6 @@ const UINT DWM_EC_ENABLECOMPOSITION = 1;
const wchar_t kDwmapiLibraryName[] = L"dwmapi.dll";
-// SharedMemoryFactory that creates SharedMemory using the deprecated
-// DesktopCapturer::Callback::CreateSharedMemory().
-class CallbackSharedMemoryFactory : public SharedMemoryFactory {
- public:
- CallbackSharedMemoryFactory(DesktopCapturer::Callback* callback)
- : callback_(callback) {}
- ~CallbackSharedMemoryFactory() override {}
-
- rtc::scoped_ptr<SharedMemory> CreateSharedMemory(size_t size) override {
- return rtc::scoped_ptr<SharedMemory>(callback_->CreateSharedMemory(size));
- }
-
- private:
- DesktopCapturer::Callback* callback_;
-};
-
} // namespace
ScreenCapturerWinGdi::ScreenCapturerWinGdi(const DesktopCaptureOptions& options)
@@ -89,15 +74,15 @@ ScreenCapturerWinGdi::~ScreenCapturerWinGdi() {
}
void ScreenCapturerWinGdi::SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
- shared_memory_factory_ =
- rtc::ScopedToUnique(std::move(shared_memory_factory));
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
+ shared_memory_factory_ = std::move(shared_memory_factory);
}
void ScreenCapturerWinGdi::Capture(const DesktopRegion& region) {
- TickTime capture_start_time = TickTime::Now();
+ int64_t capture_start_time_nanos = rtc::TimeNanos();
queue_.MoveToNextFrame();
+ RTC_DCHECK(!queue_.current_frame() || !queue_.current_frame()->IsShared());
// Request that the system not power-down the system, or the display hardware.
if (!SetThreadExecutionState(ES_DISPLAY_REQUIRED | ES_SYSTEM_REQUIRED)) {
@@ -152,7 +137,8 @@ void ScreenCapturerWinGdi::Capture(const DesktopRegion& region) {
frame->mutable_updated_region()->Clear();
helper_.TakeInvalidRegion(frame->mutable_updated_region());
frame->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
+ (rtc::TimeNanos() - capture_start_time_nanos) /
+ rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureCompleted(frame);
}
@@ -172,8 +158,6 @@ void ScreenCapturerWinGdi::Start(Callback* callback) {
assert(callback);
callback_ = callback;
- if (!shared_memory_factory_)
- shared_memory_factory_.reset(new CallbackSharedMemoryFactory(callback));
// Vote to disable Aero composited desktop effects while capturing. Windows
// will restore Aero automatically if the process exits. This has no effect
@@ -265,9 +249,9 @@ bool ScreenCapturerWinGdi::CaptureImage() {
std::unique_ptr<DesktopFrame> buffer(DesktopFrameWin::Create(
size, shared_memory_factory_.get(), desktop_dc_));
- if (!buffer.get())
+ if (!buffer)
return false;
- queue_.ReplaceCurrentFrame(buffer.release());
+ queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(buffer.release()));
}
// Select the target bitmap into the memory dc and copy the rect from desktop
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
index 17cb0aa1940..5a50580e690 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
@@ -17,9 +17,10 @@
#include <windows.h>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
namespace webrtc {
@@ -37,7 +38,7 @@ class ScreenCapturerWinGdi : public ScreenCapturer {
// Overridden from ScreenCapturer:
void Start(Callback* callback) override;
void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) override;
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void Capture(const DesktopRegion& region) override;
bool GetScreenList(ScreenList* screens) override;
bool SelectScreen(ScreenId id) override;
@@ -71,7 +72,7 @@ class ScreenCapturerWinGdi : public ScreenCapturer {
HDC memory_dc_;
// Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
+ ScreenCaptureFrameQueue<SharedDesktopFrame> queue_;
// Rectangle describing the bounds of the desktop device context, relative to
// the primary display's top-left.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
index 8af9779ce80..6b4308bb7a3 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
@@ -14,6 +14,7 @@
#include <utility>
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
@@ -24,7 +25,6 @@
#include "webrtc/modules/desktop_capture/win/desktop.h"
#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -82,13 +82,12 @@ void ScreenCapturerWinMagnifier::Start(Callback* callback) {
}
void ScreenCapturerWinMagnifier::SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
- shared_memory_factory_ =
- rtc::ScopedToUnique(std::move(shared_memory_factory));
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
+ shared_memory_factory_ = std::move(shared_memory_factory);
}
void ScreenCapturerWinMagnifier::Capture(const DesktopRegion& region) {
- TickTime capture_start_time = TickTime::Now();
+ int64_t capture_start_time_nanos = rtc::TimeNanos();
queue_.MoveToNextFrame();
@@ -169,7 +168,8 @@ void ScreenCapturerWinMagnifier::Capture(const DesktopRegion& region) {
frame->mutable_updated_region()->Clear();
helper_.TakeInvalidRegion(frame->mutable_updated_region());
frame->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
+ (rtc::TimeNanos() - capture_start_time_nanos) /
+ rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureCompleted(frame);
}
@@ -433,7 +433,7 @@ void ScreenCapturerWinMagnifier::CreateCurrentFrameIfNecessary(
? SharedMemoryDesktopFrame::Create(size,
shared_memory_factory_.get())
: std::unique_ptr<DesktopFrame>(new BasicDesktopFrame(size));
- queue_.ReplaceCurrentFrame(frame.release());
+ queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(frame.release()));
}
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
index d5e3946d627..623c8a30034 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
@@ -18,10 +18,10 @@
#include <wincodec.h>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
#include "webrtc/system_wrappers/include/atomic32.h"
@@ -47,7 +47,7 @@ class ScreenCapturerWinMagnifier : public ScreenCapturer {
// Overridden from ScreenCapturer:
void Start(Callback* callback) override;
void SetSharedMemoryFactory(
- rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) override;
+ std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void Capture(const DesktopRegion& region) override;
bool GetScreenList(ScreenList* screens) override;
bool SelectScreen(ScreenId id) override;
@@ -118,7 +118,7 @@ class ScreenCapturerWinMagnifier : public ScreenCapturer {
ScreenCapturerHelper helper_;
// Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
+ ScreenCaptureFrameQueue<SharedDesktopFrame> queue_;
// Class to calculate the difference between two screen bitmaps.
std::unique_ptr<Differ> differ_;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
index 7c80490f609..85bca4c8246 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
@@ -10,6 +10,7 @@
#include <windows.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.cc
deleted file mode 100644
index c5176d5e605..00000000000
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.cc
+++ /dev/null
@@ -1,22 +0,0 @@
- /*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/desktop_capture/window_capturer.h"
-
-#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
-
-namespace webrtc {
-
-// static
-WindowCapturer* WindowCapturer::Create() {
- return Create(DesktopCaptureOptions::CreateDefault());
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
index 9ba441a8ecb..eb9b9feaf04 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
@@ -38,9 +38,6 @@ class WindowCapturer : public DesktopCapturer {
static WindowCapturer* Create(const DesktopCaptureOptions& options);
- // TODO(sergeyu): Remove this method. crbug.com/172183
- static WindowCapturer* Create();
-
virtual ~WindowCapturer() {}
// Get list of windows. Returns false in case of a failure.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
index 22061edbe7a..ac5fdb6bc1a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
@@ -15,6 +15,7 @@
#include <Cocoa/Cocoa.h>
#include <CoreFoundation/CoreFoundation.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/macutils.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
@@ -23,7 +24,6 @@
#include "webrtc/modules/desktop_capture/mac/full_screen_chrome_window_detector.h"
#include "webrtc/modules/desktop_capture/mac/window_list_utils.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
index b74f17e39bf..5f32c3d71cc 100755
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
@@ -12,6 +12,7 @@
#include <assert.h>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
index 0d594a2a096..702324372bd 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
@@ -15,6 +15,7 @@
#include <memory>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/win32.h"
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
index 68e1725db31..8ead98109a8 100755
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
@@ -19,6 +19,7 @@
#include <algorithm>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h b/chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h
index d905b9e51c1..6cade2decc0 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/shared_x_display.h
@@ -19,6 +19,7 @@
#include <string>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/system_wrappers/include/atomic32.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
index d1e6632f082..b868b044a81 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
@@ -13,6 +13,7 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_X11_X_SERVER_PIXEL_BUFFER_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_X11_X_SERVER_PIXEL_BUFFER_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include <X11/Xutil.h>
diff --git a/chromium/third_party/webrtc/modules/include/module_common_types.h b/chromium/third_party/webrtc/modules/include/module_common_types.h
index 82d87d5c5c7..3572cd6fc5c 100644
--- a/chromium/third_party/webrtc/modules/include/module_common_types.h
+++ b/chromium/third_party/webrtc/modules/include/module_common_types.h
@@ -71,9 +71,10 @@ struct RTPVideoHeaderVP8 {
};
enum TemporalStructureMode {
- kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP...
- kTemporalStructureMode2, // 2 temporal layers 0-1-0-1...
- kTemporalStructureMode3 // 3 temporal layers 0-2-1-2-0-2-1-2...
+ kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP...
+ kTemporalStructureMode2, // 2 temporal layers 01...
+ kTemporalStructureMode3, // 3 temporal layers 0212...
+ kTemporalStructureMode4 // 3 temporal layers 02120212...
};
struct GofInfoVP9 {
@@ -121,6 +122,52 @@ struct GofInfoVP9 {
pid_diff[3][0] = 1;
pid_diff[3][1] = 2;
break;
+ case kTemporalStructureMode4:
+ num_frames_in_gof = 8;
+ temporal_idx[0] = 0;
+ temporal_up_switch[0] = false;
+ num_ref_pics[0] = 1;
+ pid_diff[0][0] = 4;
+
+ temporal_idx[1] = 2;
+ temporal_up_switch[1] = true;
+ num_ref_pics[1] = 1;
+ pid_diff[1][0] = 1;
+
+ temporal_idx[2] = 1;
+ temporal_up_switch[2] = true;
+ num_ref_pics[2] = 1;
+ pid_diff[2][0] = 2;
+
+ temporal_idx[3] = 2;
+ temporal_up_switch[3] = false;
+ num_ref_pics[3] = 2;
+ pid_diff[3][0] = 1;
+ pid_diff[3][1] = 2;
+
+ temporal_idx[4] = 0;
+ temporal_up_switch[0] = false;
+ num_ref_pics[4] = 1;
+ pid_diff[4][0] = 4;
+
+ temporal_idx[5] = 2;
+ temporal_up_switch[1] = false;
+ num_ref_pics[5] = 2;
+ pid_diff[5][0] = 1;
+ pid_diff[5][1] = 2;
+
+ temporal_idx[6] = 1;
+ temporal_up_switch[2] = false;
+ num_ref_pics[6] = 2;
+ pid_diff[6][0] = 2;
+ pid_diff[6][1] = 4;
+
+ temporal_idx[7] = 2;
+ temporal_up_switch[3] = false;
+ num_ref_pics[7] = 2;
+ pid_diff[7][0] = 1;
+ pid_diff[7][1] = 2;
+ break;
default:
assert(false);
}
@@ -143,6 +190,7 @@ struct GofInfoVP9 {
bool temporal_up_switch[kMaxVp9FramesInGof];
uint8_t num_ref_pics[kMaxVp9FramesInGof];
uint8_t pid_diff[kMaxVp9FramesInGof][kMaxVp9RefPics];
+ uint16_t pid_start;
};
struct RTPVideoHeaderVP9 {
@@ -432,7 +480,6 @@ enum FecMaskType {
// Struct containing forward error correction settings.
struct FecProtectionParams {
int fec_rate;
- bool use_uep_protection;
int max_fec_frames;
FecMaskType fec_mask_type;
};
@@ -447,25 +494,6 @@ class CallStatsObserver {
virtual ~CallStatsObserver() {}
};
-struct VideoContentMetrics {
- VideoContentMetrics()
- : motion_magnitude(0.0f),
- spatial_pred_err(0.0f),
- spatial_pred_err_h(0.0f),
- spatial_pred_err_v(0.0f) {}
-
- void Reset() {
- motion_magnitude = 0.0f;
- spatial_pred_err = 0.0f;
- spatial_pred_err_h = 0.0f;
- spatial_pred_err_v = 0.0f;
- }
- float motion_magnitude;
- float spatial_pred_err;
- float spatial_pred_err_h;
- float spatial_pred_err_v;
-};
-
/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
* allows for adding and subtracting frames while keeping track of the resulting
* states.
diff --git a/chromium/third_party/webrtc/modules/media_file/media_file_impl.cc b/chromium/third_party/webrtc/modules/media_file/media_file_impl.cc
index 76bcca74d2c..27fe9613a3b 100644
--- a/chromium/third_party/webrtc/modules/media_file/media_file_impl.cc
+++ b/chromium/third_party/webrtc/modules/media_file/media_file_impl.cc
@@ -14,7 +14,6 @@
#include "webrtc/modules/media_file/media_file_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/modules.gyp b/chromium/third_party/webrtc/modules/modules.gyp
index d5df8539051..e11bbfe88b3 100644
--- a/chromium/third_party/webrtc/modules/modules.gyp
+++ b/chromium/third_party/webrtc/modules/modules.gyp
@@ -26,7 +26,6 @@
'video_coding/video_coding.gypi',
'video_capture/video_capture.gypi',
'video_processing/video_processing.gypi',
- 'video_render/video_render.gypi',
],
'conditions': [
['include_tests==1', {
@@ -64,7 +63,6 @@
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
'<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8',
- '<(webrtc_root)/modules/video_coding/codecs/vp9/vp9.gyp:webrtc_vp9',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_root)/test/test.gyp:test_support',
@@ -86,7 +84,6 @@
'audio_coding/test/PCMFile.cc',
'audio_coding/test/PacketLossTest.cc',
'audio_coding/test/RTPFile.cc',
- 'audio_coding/test/SpatialAudio.cc',
'audio_coding/test/TestAllCodecs.cc',
'audio_coding/test/TestRedFec.cc',
'audio_coding/test/TestStereo.cc',
@@ -133,6 +130,7 @@
'audio_processing',
'audioproc_test_utils',
'bitrate_controller',
+ 'builtin_audio_decoder_factory',
'bwe_simulator',
'cng',
'isac_fix',
@@ -167,7 +165,7 @@
'<(webrtc_root)/tools/tools.gyp:agc_test_utils',
],
'sources': [
- 'audio_coding/codecs/audio_encoder_unittest.cc',
+ 'audio_coding/codecs/audio_decoder_factory_unittest.cc',
'audio_coding/codecs/cng/audio_encoder_cng_unittest.cc',
'audio_coding/acm2/acm_receiver_unittest_oldapi.cc',
'audio_coding/acm2/audio_coding_module_unittest_oldapi.cc',
@@ -214,6 +212,7 @@
'audio_coding/neteq/post_decode_vad_unittest.cc',
'audio_coding/neteq/random_vector_unittest.cc',
'audio_coding/neteq/sync_buffer_unittest.cc',
+ 'audio_coding/neteq/tick_timer_unittest.cc',
'audio_coding/neteq/timestamp_scaler_unittest.cc',
'audio_coding/neteq/time_stretch_unittest.cc',
'audio_coding/neteq/mock/mock_audio_decoder.h',
@@ -270,6 +269,7 @@
'audio_processing/vad/voice_activity_detector_unittest.cc',
'bitrate_controller/bitrate_controller_unittest.cc',
'bitrate_controller/send_side_bandwidth_estimation_unittest.cc',
+ 'congestion_controller/congestion_controller_unittest.cc',
'media_file/media_file_unittest.cc',
'module_common_types_unittest.cc',
'pacing/bitrate_prober_unittest.cc',
@@ -338,6 +338,7 @@
'rtp_rtcp/source/rtp_format_vp8_unittest.cc',
'rtp_rtcp/source/rtp_format_vp9_unittest.cc',
'rtp_rtcp/source/rtp_packet_history_unittest.cc',
+ 'rtp_rtcp/source/rtp_packet_unittest.cc',
'rtp_rtcp/source/rtp_payload_registry_unittest.cc',
'rtp_rtcp/source/rtp_rtcp_impl_unittest.cc',
'rtp_rtcp/source/rtp_header_extension_unittest.cc',
@@ -361,9 +362,8 @@
'video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc',
'video_coding/codecs/vp8/simulcast_unittest.cc',
'video_coding/codecs/vp8/simulcast_unittest.h',
- 'video_coding/codecs/vp9/screenshare_layers_unittest.cc',
+ 'video_coding/frame_buffer2_unittest.cc',
'video_coding/include/mock/mock_vcm_callbacks.h',
- 'video_coding/bitrate_adjuster_unittest.cc',
'video_coding/decoding_state_unittest.cc',
'video_coding/histogram_unittest.cc',
'video_coding/jitter_buffer_unittest.cc',
@@ -379,19 +379,21 @@
'video_coding/video_coding_robustness_unittest.cc',
'video_coding/video_receiver_unittest.cc',
'video_coding/video_sender_unittest.cc',
- 'video_coding/qm_select_unittest.cc',
'video_coding/test/stream_generator.cc',
'video_coding/test/stream_generator.h',
'video_coding/utility/frame_dropper_unittest.cc',
+ 'video_coding/utility/ivf_file_writer_unittest.cc',
'video_coding/utility/quality_scaler_unittest.cc',
- 'video_processing/test/brightness_detection_test.cc',
- 'video_processing/test/content_metrics_test.cc',
- 'video_processing/test/deflickering_test.cc',
'video_processing/test/denoiser_test.cc',
'video_processing/test/video_processing_unittest.cc',
'video_processing/test/video_processing_unittest.h',
],
'conditions': [
+ ['libvpx_build_vp9==1', {
+ 'sources': [
+ 'video_coding/codecs/vp9/screenshare_layers_unittest.cc',
+ ],
+ }],
['enable_bwe_test_logging==1', {
'defines': [ 'BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1' ],
}, {
@@ -444,6 +446,7 @@
'sources': [
'audio_processing/audio_processing_impl_locking_unittest.cc',
'audio_processing/audio_processing_impl_unittest.cc',
+ 'audio_processing/audio_processing_unittest.cc',
'audio_processing/echo_control_mobile_unittest.cc',
'audio_processing/echo_cancellation_unittest.cc',
'audio_processing/gain_control_unittest.cc',
@@ -453,7 +456,6 @@
'audio_processing/voice_detection_unittest.cc',
'audio_processing/test/audio_buffer_tools.cc',
'audio_processing/test/audio_buffer_tools.h',
- 'audio_processing/test/audio_processing_unittest.cc',
'audio_processing/test/bitexactness_tools.cc',
'audio_processing/test/bitexactness_tools.h',
'audio_processing/test/debug_dump_replayer.cc',
@@ -504,16 +506,6 @@
'<(DEPTH)/data/voice_engine/audio_tiny48.wav',
'<(DEPTH)/resources/att-downlink.rx',
'<(DEPTH)/resources/att-uplink.rx',
- '<(DEPTH)/resources/audio_coding/neteq4_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref_win_32.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref_win_64.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_rtcp_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_rtcp_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_32.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_64.pcm',
'<(DEPTH)/resources/audio_coding/neteq_opus.rtp',
'<(DEPTH)/resources/audio_coding/neteq_universal_new.rtp',
'<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
@@ -626,10 +618,17 @@
['OS=="android"', {
'targets': [
{
- 'target_name': 'modules_unittests_apk_target',
+ 'target_name': 'audio_codec_speed_tests_apk_target',
'type': 'none',
'dependencies': [
- '<(apk_tests_path):modules_unittests_apk',
+ '<(apk_tests_path):audio_codec_speed_tests_apk',
+ ],
+ },
+ {
+ 'target_name': 'audio_decoder_unittests_apk_target',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):audio_decoder_unittests_apk',
],
},
{
@@ -639,8 +638,75 @@
'<(apk_tests_path):modules_tests_apk',
],
},
+ {
+ 'target_name': 'modules_unittests_apk_target',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):modules_unittests_apk',
+ ],
+ },
],
- }],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'audio_codec_speed_tests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):audio_codec_speed_tests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'audio_codec_speed_tests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'audio_decoder_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):audio_decoder_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'audio_decoder_unittests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'modules_tests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):modules_tests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'modules_tests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'modules_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):modules_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'modules_unittests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
+ }], # OS=="android"
['test_isolation_mode != "noop"', {
'targets': [
{
@@ -708,19 +774,6 @@
'modules_unittests.isolate',
],
},
- {
- 'target_name': 'video_render_tests_run',
- 'type': 'none',
- 'dependencies': [
- 'video_render_tests',
- ],
- 'includes': [
- '../build/isolate.gypi',
- ],
- 'sources': [
- 'video_render_tests.isolate',
- ],
- },
],
}],
],
diff --git a/chromium/third_party/webrtc/modules/modules_java.gyp b/chromium/third_party/webrtc/modules/modules_java.gyp
index 060de2a0678..2a72fb30a4c 100644
--- a/chromium/third_party/webrtc/modules/modules_java.gyp
+++ b/chromium/third_party/webrtc/modules/modules_java.gyp
@@ -18,14 +18,5 @@
'includes': [ '../../build/java.gypi' ],
}, # audio_device_module_java
- {
- 'target_name': 'video_render_module_java',
- 'type': 'none',
- 'variables': {
- 'java_in_dir': 'video_render/android/java',
- 'additional_src_dirs': [ '../base/java/src', ],
- },
- 'includes': [ '../../build/java.gypi' ],
- }, # video_render_module_java
],
}
diff --git a/chromium/third_party/webrtc/modules/modules_java_chromium.gyp b/chromium/third_party/webrtc/modules/modules_java_chromium.gyp
index 32d2d8d24e8..ebc53d60ff8 100644
--- a/chromium/third_party/webrtc/modules/modules_java_chromium.gyp
+++ b/chromium/third_party/webrtc/modules/modules_java_chromium.gyp
@@ -16,13 +16,5 @@
},
'includes': [ '../../../build/java.gypi' ],
}, # audio_device_module_java
- {
- 'target_name': 'video_render_module_java',
- 'type': 'none',
- 'variables': {
- 'java_in_dir': 'video_render/android/java',
- },
- 'includes': [ '../../../build/java.gypi' ],
- }, # video_render_module_java
],
}
diff --git a/chromium/third_party/webrtc/modules/modules_tests_apk.isolate b/chromium/third_party/webrtc/modules/modules_tests_apk.isolate
new file mode 100644
index 00000000000..ffdd967b4fc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/modules_tests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'modules_tests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_modules_tests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/modules_tests_apk/',
+ '<(PRODUCT_DIR)/bin/run_modules_tests',
+ 'modules_tests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/modules_unittests.isolate b/chromium/third_party/webrtc/modules/modules_unittests.isolate
index a7fc4e8661f..af7e6ef46e8 100644
--- a/chromium/third_party/webrtc/modules/modules_unittests.isolate
+++ b/chromium/third_party/webrtc/modules/modules_unittests.isolate
@@ -11,9 +11,6 @@
'variables': {
'files': [
'<(DEPTH)/data/audio_processing/output_data_fixed.pb',
- '<(DEPTH)/resources/audio_coding/neteq4_network_stats_android.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_rtcp_stats_android.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_android.pcm',
],
},
}],
@@ -25,16 +22,6 @@
'<(DEPTH)/data/voice_engine/audio_tiny48.wav',
'<(DEPTH)/resources/att-downlink.rx',
'<(DEPTH)/resources/att-uplink.rx',
- '<(DEPTH)/resources/audio_coding/neteq4_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_network_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref_win_32.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_ref_win_64.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_opus_rtcp_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_rtcp_stats.dat',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_32.pcm',
- '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_64.pcm',
'<(DEPTH)/resources/audio_coding/neteq_opus.rtp',
'<(DEPTH)/resources/audio_coding/neteq_universal_new.rtp',
'<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
diff --git a/chromium/third_party/webrtc/modules/modules_unittests_apk.isolate b/chromium/third_party/webrtc/modules/modules_unittests_apk.isolate
new file mode 100644
index 00000000000..cd60cf75c86
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/modules_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'modules_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_modules_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/modules_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_modules_unittests',
+ 'modules_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc b/chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc
index fbd9b817419..8e8e36ea34b 100644
--- a/chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc
+++ b/chromium/third_party/webrtc/modules/pacing/bitrate_prober.cc
@@ -15,13 +15,14 @@
#include <limits>
#include <sstream>
+#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/pacing/paced_sender.h"
namespace webrtc {
namespace {
-int ComputeDeltaFromBitrate(size_t packet_size, int bitrate_bps) {
+int ComputeDeltaFromBitrate(size_t packet_size, uint32_t bitrate_bps) {
assert(bitrate_bps > 0);
// Compute the time delta needed to send packet_size bytes at bitrate_bps
// bps. Result is in milliseconds.
@@ -33,8 +34,8 @@ int ComputeDeltaFromBitrate(size_t packet_size, int bitrate_bps) {
BitrateProber::BitrateProber()
: probing_state_(kDisabled),
packet_size_last_send_(0),
- time_last_send_ms_(-1) {
-}
+ time_last_send_ms_(-1),
+ next_cluster_id_(0) {}
void BitrateProber::SetEnabled(bool enable) {
if (enable) {
@@ -52,7 +53,7 @@ bool BitrateProber::IsProbing() const {
return probing_state_ == kProbing;
}
-void BitrateProber::OnIncomingPacket(int bitrate_bps,
+void BitrateProber::OnIncomingPacket(uint32_t bitrate_bps,
size_t packet_size,
int64_t now_ms) {
// Don't initialize probing unless we have something large enough to start
@@ -61,24 +62,24 @@ void BitrateProber::OnIncomingPacket(int bitrate_bps,
return;
if (probing_state_ != kAllowedToProbe)
return;
- probe_bitrates_.clear();
// Max number of packets used for probing.
const int kMaxNumProbes = 2;
const int kPacketsPerProbe = 5;
const float kProbeBitrateMultipliers[kMaxNumProbes] = {3, 6};
- int bitrates_bps[kMaxNumProbes];
std::stringstream bitrate_log;
- bitrate_log << "Start probing for bandwidth, bitrates:";
+ bitrate_log << "Start probing for bandwidth, (bitrate:packets): ";
for (int i = 0; i < kMaxNumProbes; ++i) {
- bitrates_bps[i] = kProbeBitrateMultipliers[i] * bitrate_bps;
- bitrate_log << " " << bitrates_bps[i];
- // We need one extra to get 5 deltas for the first probe.
- if (i == 0)
- probe_bitrates_.push_back(bitrates_bps[i]);
- for (int j = 0; j < kPacketsPerProbe; ++j)
- probe_bitrates_.push_back(bitrates_bps[i]);
+ ProbeCluster cluster;
+ // We need one extra to get 5 deltas for the first probe, therefore (i == 0)
+ cluster.max_probe_packets = kPacketsPerProbe + (i == 0 ? 1 : 0);
+ cluster.probe_bitrate_bps = kProbeBitrateMultipliers[i] * bitrate_bps;
+ cluster.id = next_cluster_id_++;
+
+ bitrate_log << "(" << cluster.probe_bitrate_bps << ":"
+ << cluster.max_probe_packets << ") ";
+
+ clusters_.push(cluster);
}
- bitrate_log << ", num packets: " << probe_bitrates_.size();
LOG(LS_INFO) << bitrate_log.str().c_str();
// Set last send time to current time so TimeUntilNextProbe doesn't short
// circuit due to inactivity.
@@ -87,10 +88,11 @@ void BitrateProber::OnIncomingPacket(int bitrate_bps,
}
int BitrateProber::TimeUntilNextProbe(int64_t now_ms) {
- if (probing_state_ != kDisabled && probe_bitrates_.empty()) {
+ if (probing_state_ != kDisabled && clusters_.empty()) {
probing_state_ = kWait;
}
- if (probe_bitrates_.empty() || time_last_send_ms_ == -1) {
+
+ if (clusters_.empty() || time_last_send_ms_ == -1) {
// No probe started, probe finished, or too long since last probe packet.
return -1;
}
@@ -107,8 +109,8 @@ int BitrateProber::TimeUntilNextProbe(int64_t now_ms) {
// sent before.
int time_until_probe_ms = 0;
if (packet_size_last_send_ != 0 && probing_state_ == kProbing) {
- int next_delta_ms = ComputeDeltaFromBitrate(packet_size_last_send_,
- probe_bitrates_.front());
+ int next_delta_ms = ComputeDeltaFromBitrate(
+ packet_size_last_send_, clusters_.front().probe_bitrate_bps);
time_until_probe_ms = next_delta_ms - elapsed_time_ms;
// There is no point in trying to probe with less than 1 ms between packets
// as it essentially means trying to probe at infinite bandwidth.
@@ -129,6 +131,12 @@ int BitrateProber::TimeUntilNextProbe(int64_t now_ms) {
return std::max(time_until_probe_ms, 0);
}
+int BitrateProber::CurrentClusterId() const {
+ RTC_DCHECK(!clusters_.empty());
+ RTC_DCHECK_EQ(kProbing, probing_state_);
+ return clusters_.front().id;
+}
+
size_t BitrateProber::RecommendedPacketSize() const {
return packet_size_last_send_;
}
@@ -141,7 +149,11 @@ void BitrateProber::PacketSent(int64_t now_ms, size_t packet_size) {
time_last_send_ms_ = now_ms;
if (probing_state_ != kProbing)
return;
- if (!probe_bitrates_.empty())
- probe_bitrates_.pop_front();
+ if (!clusters_.empty()) {
+ ProbeCluster* cluster = &clusters_.front();
+ ++cluster->sent_probe_packets;
+ if (cluster->sent_probe_packets == cluster->max_probe_packets)
+ clusters_.pop();
+ }
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/pacing/bitrate_prober.h b/chromium/third_party/webrtc/modules/pacing/bitrate_prober.h
index 84fbc522fc9..e8967abde45 100644
--- a/chromium/third_party/webrtc/modules/pacing/bitrate_prober.h
+++ b/chromium/third_party/webrtc/modules/pacing/bitrate_prober.h
@@ -13,6 +13,7 @@
#include <cstddef>
#include <list>
+#include <queue>
#include "webrtc/typedefs.h"
@@ -34,12 +35,17 @@ class BitrateProber {
// Initializes a new probing session if the prober is allowed to probe. Does
// not initialize the prober unless the packet size is large enough to probe
// with.
- void OnIncomingPacket(int bitrate_bps, size_t packet_size, int64_t now_ms);
+ void OnIncomingPacket(uint32_t bitrate_bps,
+ size_t packet_size,
+ int64_t now_ms);
// Returns the number of milliseconds until the next packet should be sent to
// get accurate probing.
int TimeUntilNextProbe(int64_t now_ms);
+ // Which cluster that is currently being used for probing.
+ int CurrentClusterId() const;
+
// Returns the number of bytes that the prober recommends for the next probe
// packet.
size_t RecommendedPacketSize() const;
@@ -51,13 +57,21 @@ class BitrateProber {
private:
enum ProbingState { kDisabled, kAllowedToProbe, kProbing, kWait };
+ struct ProbeCluster {
+ int max_probe_packets = 0;
+ int sent_probe_packets = 0;
+ int probe_bitrate_bps = 0;
+ int id = -1;
+ };
+
ProbingState probing_state_;
// Probe bitrate per packet. These are used to compute the delta relative to
// the previous probe packet based on the size and time when that packet was
// sent.
- std::list<int> probe_bitrates_;
+ std::queue<ProbeCluster> clusters_;
size_t packet_size_last_send_;
int64_t time_last_send_ms_;
+ int next_cluster_id_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_PACING_BITRATE_PROBER_H_
diff --git a/chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc b/chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc
index 59ee479973d..9e38220e012 100644
--- a/chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/bitrate_prober_unittest.cc
@@ -26,6 +26,7 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
prober.OnIncomingPacket(300000, 1000, now_ms);
EXPECT_TRUE(prober.IsProbing());
+ EXPECT_EQ(0, prober.CurrentClusterId());
// First packet should probe as soon as possible.
EXPECT_EQ(0, prober.TimeUntilNextProbe(now_ms));
@@ -37,12 +38,14 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) {
EXPECT_EQ(4, prober.TimeUntilNextProbe(now_ms));
now_ms += 4;
EXPECT_EQ(0, prober.TimeUntilNextProbe(now_ms));
+ EXPECT_EQ(0, prober.CurrentClusterId());
prober.PacketSent(now_ms, 1000);
}
for (int i = 0; i < 5; ++i) {
EXPECT_EQ(4, prober.TimeUntilNextProbe(now_ms));
now_ms += 4;
EXPECT_EQ(0, prober.TimeUntilNextProbe(now_ms));
+ EXPECT_EQ(1, prober.CurrentClusterId());
prober.PacketSent(now_ms, 1000);
}
diff --git a/chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h b/chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h
index c710dbcbea5..bd7d7aaa499 100644
--- a/chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h
+++ b/chromium/third_party/webrtc/modules/pacing/mock/mock_paced_sender.h
@@ -22,15 +22,17 @@ namespace webrtc {
class MockPacedSender : public PacedSender {
public:
- MockPacedSender() : PacedSender(Clock::GetRealTimeClock(), NULL, 0, 0, 0) {}
+ MockPacedSender() : PacedSender(Clock::GetRealTimeClock(), nullptr) {}
MOCK_METHOD6(SendPacket, bool(Priority priority,
uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
size_t bytes,
bool retransmission));
+ MOCK_METHOD1(SetEstimatedBitrate, void(uint32_t));
MOCK_CONST_METHOD0(QueueInMs, int64_t());
MOCK_CONST_METHOD0(QueueInPackets, int());
+ MOCK_CONST_METHOD0(ExpectedQueueTimeMs, int64_t());
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
index b56d28510f2..167be23ab62 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
@@ -246,20 +246,18 @@ const int64_t PacedSender::kMaxQueueLengthMs = 2000;
const float PacedSender::kDefaultPaceMultiplier = 2.5f;
PacedSender::PacedSender(Clock* clock,
- Callback* callback,
- int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps)
+ PacketSender* packet_sender)
: clock_(clock),
- callback_(callback),
+ packet_sender_(packet_sender),
critsect_(CriticalSectionWrapper::CreateCriticalSection()),
paused_(false),
probing_enabled_(true),
- media_budget_(new paced_sender::IntervalBudget(max_bitrate_kbps)),
- padding_budget_(new paced_sender::IntervalBudget(min_bitrate_kbps)),
+ media_budget_(new paced_sender::IntervalBudget(0)),
+ padding_budget_(new paced_sender::IntervalBudget(0)),
prober_(new BitrateProber()),
- bitrate_bps_(1000 * bitrate_kbps),
- max_bitrate_kbps_(max_bitrate_kbps),
+ estimated_bitrate_bps_(0),
+ min_send_bitrate_kbps_(0u),
+ pacing_bitrate_kbps_(0),
time_last_update_us_(clock->TimeInMicroseconds()),
packets_(new paced_sender::PacketQueue(clock)),
packet_counter_(0) {
@@ -283,16 +281,22 @@ void PacedSender::SetProbingEnabled(bool enabled) {
probing_enabled_ = enabled;
}
-void PacedSender::UpdateBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps) {
+void PacedSender::SetEstimatedBitrate(uint32_t bitrate_bps) {
CriticalSectionScoped cs(critsect_.get());
- // Don't set media bitrate here as it may be boosted in order to meet max
- // queue time constraint. Just update max_bitrate_kbps_ and let media_budget_
- // be updated in Process().
- padding_budget_->set_target_rate_kbps(min_bitrate_kbps);
- bitrate_bps_ = 1000 * bitrate_kbps;
- max_bitrate_kbps_ = max_bitrate_kbps;
+ estimated_bitrate_bps_ = bitrate_bps;
+ pacing_bitrate_kbps_ =
+ std::max(min_send_bitrate_kbps_, estimated_bitrate_bps_ / 1000) *
+ kDefaultPaceMultiplier;
+}
+
+void PacedSender::SetAllocatedSendBitrate(int allocated_bitrate,
+ int padding_bitrate) {
+ CriticalSectionScoped cs(critsect_.get());
+ min_send_bitrate_kbps_ = allocated_bitrate / 1000;
+ pacing_bitrate_kbps_ =
+ std::max(min_send_bitrate_kbps_, estimated_bitrate_bps_ / 1000) *
+ kDefaultPaceMultiplier;
+ padding_budget_->set_target_rate_kbps(padding_bitrate / 1000);
}
void PacedSender::InsertPacket(RtpPacketSender::Priority priority,
@@ -302,11 +306,13 @@ void PacedSender::InsertPacket(RtpPacketSender::Priority priority,
size_t bytes,
bool retransmission) {
CriticalSectionScoped cs(critsect_.get());
+ RTC_DCHECK(estimated_bitrate_bps_ > 0)
+ << "SetEstimatedBitrate must be called before InsertPacket.";
if (probing_enabled_ && !prober_->IsProbing())
prober_->SetEnabled(true);
int64_t now_ms = clock_->TimeInMilliseconds();
- prober_->OnIncomingPacket(bitrate_bps_, bytes, now_ms);
+ prober_->OnIncomingPacket(estimated_bitrate_bps_, bytes, now_ms);
if (capture_time_ms < 0)
capture_time_ms = now_ms;
@@ -318,8 +324,9 @@ void PacedSender::InsertPacket(RtpPacketSender::Priority priority,
int64_t PacedSender::ExpectedQueueTimeMs() const {
CriticalSectionScoped cs(critsect_.get());
- RTC_DCHECK_GT(max_bitrate_kbps_, 0);
- return static_cast<int64_t>(packets_->SizeInBytes() * 8 / max_bitrate_kbps_);
+ RTC_DCHECK_GT(pacing_bitrate_kbps_, 0u);
+ return static_cast<int64_t>(packets_->SizeInBytes() * 8 /
+ pacing_bitrate_kbps_);
}
size_t PacedSender::QueueSizePackets() const {
@@ -360,7 +367,7 @@ void PacedSender::Process() {
CriticalSectionScoped cs(critsect_.get());
int64_t elapsed_time_ms = (now_us - time_last_update_us_ + 500) / 1000;
time_last_update_us_ = now_us;
- int target_bitrate_kbps = max_bitrate_kbps_;
+ int target_bitrate_kbps = pacing_bitrate_kbps_;
// TODO(holmer): Remove the !paused_ check when issue 5307 has been fixed.
if (!paused_ && elapsed_time_ms > 0) {
size_t queue_size_bytes = packets_->SizeInBytes();
@@ -390,8 +397,10 @@ void PacedSender::Process() {
// element from the priority queue but keep it in storage, so that we can
// reinsert it if send fails.
const paced_sender::Packet& packet = packets_->BeginPop();
+ int probe_cluster_id =
+ prober_->IsProbing() ? prober_->CurrentClusterId() : -1;
- if (SendPacket(packet)) {
+ if (SendPacket(packet, probe_cluster_id)) {
// Send succeeded, remove it from the queue.
packets_->FinalizePop(packet);
if (prober_->IsProbing())
@@ -418,17 +427,17 @@ void PacedSender::Process() {
SendPadding(static_cast<size_t>(padding_needed));
}
-bool PacedSender::SendPacket(const paced_sender::Packet& packet) {
+bool PacedSender::SendPacket(const paced_sender::Packet& packet,
+ int probe_cluster_id) {
// TODO(holmer): Because of this bug issue 5307 we have to send audio
// packets even when the pacer is paused. Here we assume audio packets are
// always high priority and that they are the only high priority packets.
if (paused_ && packet.priority != kHighPriority)
return false;
critsect_->Leave();
- const bool success = callback_->TimeToSendPacket(packet.ssrc,
- packet.sequence_number,
- packet.capture_time_ms,
- packet.retransmission);
+ const bool success = packet_sender_->TimeToSendPacket(
+ packet.ssrc, packet.sequence_number, packet.capture_time_ms,
+ packet.retransmission, probe_cluster_id);
critsect_->Enter();
if (success) {
@@ -447,7 +456,7 @@ bool PacedSender::SendPacket(const paced_sender::Packet& packet) {
void PacedSender::SendPadding(size_t padding_needed) {
critsect_->Leave();
- size_t bytes_sent = callback_->TimeToSendPadding(padding_needed);
+ size_t bytes_sent = packet_sender_->TimeToSendPadding(padding_needed);
critsect_->Enter();
if (bytes_sent > 0) {
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender.h b/chromium/third_party/webrtc/modules/pacing/paced_sender.h
index 16569b04045..d42b9b38489 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender.h
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender.h
@@ -33,7 +33,7 @@ class PacketQueue;
class PacedSender : public Module, public RtpPacketSender {
public:
- class Callback {
+ class PacketSender {
public:
// Note: packets sent as a result of a callback should not pass by this
// module again.
@@ -42,13 +42,14 @@ class PacedSender : public Module, public RtpPacketSender {
virtual bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) = 0;
+ bool retransmission,
+ int probe_cluster_id) = 0;
// Called when it's a good time to send a padding data.
// Returns the number of bytes sent.
virtual size_t TimeToSendPadding(size_t bytes) = 0;
protected:
- virtual ~Callback() {}
+ virtual ~PacketSender() {}
};
// Expected max pacer delay in ms. If ExpectedQueueTimeMs() is higher than
@@ -56,8 +57,6 @@ class PacedSender : public Module, public RtpPacketSender {
// encoding them). Bitrate sent may temporarily exceed target set by
// UpdateBitrate() so that this limit will be upheld.
static const int64_t kMaxQueueLengthMs;
- // Pace in kbits/s until we receive first estimate.
- static const int kDefaultInitialPaceKbps = 2000;
// Pacing-rate relative to our target send rate.
// Multiplicative factor that is applied to the target bitrate to calculate
// the number of bytes that can be transmitted per interval.
@@ -68,10 +67,7 @@ class PacedSender : public Module, public RtpPacketSender {
static const size_t kMinProbePacketSize = 200;
PacedSender(Clock* clock,
- Callback* callback,
- int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps);
+ PacketSender* packet_sender);
virtual ~PacedSender();
@@ -86,14 +82,20 @@ class PacedSender : public Module, public RtpPacketSender {
// effect.
void SetProbingEnabled(bool enabled);
- // Set target bitrates for the pacer.
- // We will pace out bursts of packets at a bitrate of |max_bitrate_kbps|.
- // |bitrate_kbps| is our estimate of what we are allowed to send on average.
- // Padding packets will be utilized to reach |min_bitrate| unless enough media
- // packets are available.
- void UpdateBitrate(int bitrate_kbps,
- int max_bitrate_kbps,
- int min_bitrate_kbps);
+ // Sets the estimated capacity of the network. Must be called once before
+ // packets can be sent.
+ // |bitrate_bps| is our estimate of what we are allowed to send on average.
+ // We will pace out bursts of packets at a bitrate of
+ // |bitrate_bps| * kDefaultPaceMultiplier.
+ virtual void SetEstimatedBitrate(uint32_t bitrate_bps);
+
+ // Sets the bitrate that has been allocated for encoders.
+ // |allocated_bitrate| might be higher that the estimated available network
+ // bitrate and if so, the pacer will send with |allocated_bitrate|.
+ // Padding packets will be utilized to reach |padding_bitrate| unless enough
+ // media packets are available.
+ void SetAllocatedSendBitrate(int allocated_bitrate_bps,
+ int padding_bitrate_bps);
// Returns true if we send the packet now, else it will add the packet
// information to the queue and call TimeToSendPacket when it's time to send.
@@ -129,12 +131,12 @@ class PacedSender : public Module, public RtpPacketSender {
void UpdateBytesPerInterval(int64_t delta_time_in_ms)
EXCLUSIVE_LOCKS_REQUIRED(critsect_);
- bool SendPacket(const paced_sender::Packet& packet)
+ bool SendPacket(const paced_sender::Packet& packet, int probe_cluster_id)
EXCLUSIVE_LOCKS_REQUIRED(critsect_);
void SendPadding(size_t padding_needed) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
Clock* const clock_;
- Callback* const callback_;
+ PacketSender* const packet_sender_;
std::unique_ptr<CriticalSectionWrapper> critsect_;
bool paused_ GUARDED_BY(critsect_);
@@ -152,8 +154,9 @@ class PacedSender : public Module, public RtpPacketSender {
std::unique_ptr<BitrateProber> prober_ GUARDED_BY(critsect_);
// Actual configured bitrates (media_budget_ may temporarily be higher in
// order to meet pace time constraint).
- int bitrate_bps_ GUARDED_BY(critsect_);
- int max_bitrate_kbps_ GUARDED_BY(critsect_);
+ uint32_t estimated_bitrate_bps_ GUARDED_BY(critsect_);
+ uint32_t min_send_bitrate_kbps_ GUARDED_BY(critsect_);
+ uint32_t pacing_bitrate_kbps_ GUARDED_BY(critsect_);
int64_t time_last_update_us_ GUARDED_BY(critsect_);
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
index 941c81335b6..6a0a006c326 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
@@ -22,32 +22,33 @@ using testing::Return;
namespace webrtc {
namespace test {
-static const int kTargetBitrate = 800;
-static const float kPaceMultiplier = 1.5f;
+static const int kTargetBitrateBps = 800000;
-class MockPacedSenderCallback : public PacedSender::Callback {
+class MockPacedSenderCallback : public PacedSender::PacketSender {
public:
- MOCK_METHOD4(TimeToSendPacket,
+ MOCK_METHOD5(TimeToSendPacket,
bool(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission));
+ bool retransmission,
+ int probe_cluster_id));
MOCK_METHOD1(TimeToSendPadding,
size_t(size_t bytes));
};
-class PacedSenderPadding : public PacedSender::Callback {
+class PacedSenderPadding : public PacedSender::PacketSender {
public:
PacedSenderPadding() : padding_sent_(0) {}
bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) {
+ bool retransmission,
+ int probe_cluster_id) override {
return true;
}
- size_t TimeToSendPadding(size_t bytes) {
+ size_t TimeToSendPadding(size_t bytes) override {
const size_t kPaddingPacketSize = 224;
size_t num_packets = (bytes + kPaddingPacketSize - 1) / kPaddingPacketSize;
padding_sent_ += kPaddingPacketSize * num_packets;
@@ -60,7 +61,7 @@ class PacedSenderPadding : public PacedSender::Callback {
size_t padding_sent_;
};
-class PacedSenderProbing : public PacedSender::Callback {
+class PacedSenderProbing : public PacedSender::PacketSender {
public:
PacedSenderProbing(const std::list<int>& expected_deltas, Clock* clock)
: prev_packet_time_ms_(-1),
@@ -71,12 +72,13 @@ class PacedSenderProbing : public PacedSender::Callback {
bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) {
+ bool retransmission,
+ int probe_cluster_id) override {
ExpectAndCountPacket();
return true;
}
- size_t TimeToSendPadding(size_t bytes) {
+ size_t TimeToSendPadding(size_t bytes) override {
ExpectAndCountPacket();
return bytes;
}
@@ -108,15 +110,14 @@ class PacedSenderTest : public ::testing::Test {
PacedSenderTest() : clock_(123456) {
srand(0);
// Need to initialize PacedSender after we initialize clock.
- send_bucket_.reset(new PacedSender(&clock_,
- &callback_,
- kTargetBitrate,
- kPaceMultiplier * kTargetBitrate,
- 0));
+ send_bucket_.reset(new PacedSender(&clock_, &callback_));
// Default to bitrate probing disabled for testing purposes. Probing tests
// have to enable probing, either by creating a new PacedSender instance or
// by calling SetProbingEnabled(true).
send_bucket_->SetProbingEnabled(false);
+ send_bucket_->SetEstimatedBitrate(kTargetBitrateBps);
+
+ clock_.AdvanceTimeMilliseconds(send_bucket_->TimeUntilNextProcess());
}
void SendAndExpectPacket(PacedSender::Priority priority,
@@ -127,8 +128,8 @@ class PacedSenderTest : public ::testing::Test {
bool retransmission) {
send_bucket_->InsertPacket(priority, ssrc, sequence_number, capture_time_ms,
size, retransmission);
- EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, sequence_number, capture_time_ms, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
+ capture_time_ms, false, _))
.Times(1)
.WillRepeatedly(Return(true));
}
@@ -141,29 +142,21 @@ class PacedSenderTest : public ::testing::Test {
TEST_F(PacedSenderTest, QueuePacket) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
+
int64_t queued_packet_timestamp = clock_.TimeInMilliseconds();
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number, queued_packet_timestamp, 250,
false);
+ EXPECT_EQ(packets_to_send + 1, send_bucket_->QueueSizePackets());
send_bucket_->Process();
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
@@ -171,86 +164,79 @@ TEST_F(PacedSenderTest, QueuePacket) {
EXPECT_EQ(1, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(1);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
- EXPECT_CALL(
- callback_,
- TimeToSendPacket(ssrc, sequence_number++, queued_packet_timestamp, false))
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number++,
+ queued_packet_timestamp, false, _))
.Times(1)
.WillRepeatedly(Return(true));
send_bucket_->Process();
sequence_number++;
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
+
+ // We can send packets_to_send -1 packets of size 250 during the current
+ // interval since one packet has already been sent.
+ for (size_t i = 0; i < packets_to_send - 1; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
250, false);
+ EXPECT_EQ(packets_to_send, send_bucket_->QueueSizePackets());
send_bucket_->Process();
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
}
TEST_F(PacedSenderTest, PaceQueuedPackets) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- for (int i = 0; i < 3; ++i) {
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
}
- for (int j = 0; j < 30; ++j) {
+
+ for (size_t j = 0; j < packets_to_send_per_interval * 10; ++j) {
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
250, false);
}
+ EXPECT_EQ(packets_to_send_per_interval + packets_to_send_per_interval * 10,
+ send_bucket_->QueueSizePackets());
send_bucket_->Process();
+ EXPECT_EQ(packets_to_send_per_interval * 10,
+ send_bucket_->QueueSizePackets());
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
for (int k = 0; k < 10; ++k) {
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, _, false))
- .Times(3)
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, _, false, _))
+ .Times(packets_to_send_per_interval)
.WillRepeatedly(Return(true));
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
send_bucket_->Process();
}
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
send_bucket_->Process();
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number, clock_.TimeInMilliseconds(), 250,
false);
send_bucket_->Process();
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
}
TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
@@ -258,18 +244,18 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
uint16_t sequence_number = 1234;
uint16_t queued_sequence_number;
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- for (int i = 0; i < 3; ++i) {
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
}
queued_sequence_number = sequence_number;
- for (int j = 0; j < 30; ++j) {
+ for (size_t j = 0; j < packets_to_send_per_interval * 10; ++j) {
// Send in duplicate packets.
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number, clock_.TimeInMilliseconds(),
@@ -284,9 +270,9 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
- for (int i = 0; i < 3; ++i) {
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, queued_sequence_number++, _, false))
+ TimeToSendPacket(ssrc, queued_sequence_number++, _, false, _))
.Times(1)
.WillRepeatedly(Return(true));
}
@@ -297,28 +283,16 @@ TEST_F(PacedSenderTest, PaceQueuedPacketsWithDuplicates) {
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
send_bucket_->Process();
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
250, false);
send_bucket_->Process();
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
}
TEST_F(PacedSenderTest, CanQueuePacketsWithSameSequenceNumberOnDifferentSsrcs) {
@@ -348,29 +322,27 @@ TEST_F(PacedSenderTest, Padding) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- send_bucket_->UpdateBitrate(
- kTargetBitrate, kPaceMultiplier * kTargetBitrate, kTargetBitrate);
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- clock_.TimeInMilliseconds(),
- 250,
- false);
+ send_bucket_->SetEstimatedBitrate(kTargetBitrateBps);
+ send_bucket_->SetAllocatedSendBitrate(kTargetBitrateBps, kTargetBitrateBps);
+
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
// No padding is expected since we have sent too much already.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
+ EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
+ send_bucket_->Process();
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
+
+ // 5 milliseconds later should not send padding since we filled the buffers
+ // initially.
+ EXPECT_CALL(callback_, TimeToSendPadding(250)).Times(0);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
@@ -391,8 +363,9 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
int64_t capture_time_ms = 56789;
const int kTimeStep = 5;
const int64_t kBitrateWindow = 100;
- send_bucket_->UpdateBitrate(
- kTargetBitrate, kPaceMultiplier * kTargetBitrate, kTargetBitrate);
+ send_bucket_->SetEstimatedBitrate(kTargetBitrateBps);
+ send_bucket_->SetAllocatedSendBitrate(kTargetBitrateBps, kTargetBitrateBps);
+
int64_t start_time = clock_.TimeInMilliseconds();
while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) {
SendAndExpectPacket(PacedSender::kNormalPriority,
@@ -401,10 +374,10 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
capture_time_ms,
250,
false);
- clock_.AdvanceTimeMilliseconds(kTimeStep);
EXPECT_CALL(callback_, TimeToSendPadding(250)).Times(1).
WillOnce(Return(250));
send_bucket_->Process();
+ clock_.AdvanceTimeMilliseconds(kTimeStep);
}
}
@@ -415,11 +388,11 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
const int kTimeStep = 5;
const int64_t kBitrateWindow = 10000;
PacedSenderPadding callback;
- send_bucket_.reset(new PacedSender(
- &clock_, &callback, kTargetBitrate, kPaceMultiplier * kTargetBitrate, 0));
+ send_bucket_.reset(new PacedSender(&clock_, &callback));
send_bucket_->SetProbingEnabled(false);
- send_bucket_->UpdateBitrate(
- kTargetBitrate, kPaceMultiplier * kTargetBitrate, kTargetBitrate);
+ send_bucket_->SetEstimatedBitrate(kTargetBitrateBps);
+ send_bucket_->SetAllocatedSendBitrate(kTargetBitrateBps, kTargetBitrateBps);
+
int64_t start_time = clock_.TimeInMilliseconds();
size_t media_bytes = 0;
while (clock_.TimeInMilliseconds() - start_time < kBitrateWindow) {
@@ -432,9 +405,10 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
clock_.AdvanceTimeMilliseconds(kTimeStep);
send_bucket_->Process();
}
- EXPECT_NEAR(kTargetBitrate,
+ EXPECT_NEAR(kTargetBitrateBps / 1000,
static_cast<int>(8 * (media_bytes + callback.padding_sent()) /
- kBitrateWindow), 1);
+ kBitrateWindow),
+ 1);
}
TEST_F(PacedSenderTest, Priority) {
@@ -444,54 +418,45 @@ TEST_F(PacedSenderTest, Priority) {
int64_t capture_time_ms = 56789;
int64_t capture_time_ms_low_priority = 1234567;
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- SendAndExpectPacket(PacedSender::kLowPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
send_bucket_->Process();
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
// Expect normal and low priority to be queued and high to pass through.
send_bucket_->InsertPacket(PacedSender::kLowPriority, ssrc_low_priority,
sequence_number++, capture_time_ms_low_priority,
250, false);
- send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, capture_time_ms, 250, false);
- send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, capture_time_ms, 250, false);
- send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
- sequence_number++, capture_time_ms, 250, false);
+
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
+ sequence_number++, capture_time_ms, 250, false);
+ }
send_bucket_->InsertPacket(PacedSender::kHighPriority, ssrc,
sequence_number++, capture_time_ms, 250, false);
// Expect all high and normal priority to be sent out first.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, capture_time_ms, false))
- .Times(4)
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, _, capture_time_ms, false, _))
+ .Times(packets_to_send_per_interval + 1)
.WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
send_bucket_->Process();
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
EXPECT_CALL(callback_,
- TimeToSendPacket(
- ssrc_low_priority, _, capture_time_ms_low_priority, false))
+ TimeToSendPacket(ssrc_low_priority, _,
+ capture_time_ms_low_priority, false, _))
.Times(1)
.WillRepeatedly(Return(true));
@@ -513,23 +478,30 @@ TEST_F(PacedSenderTest, HighPrioDoesntAffectBudget) {
capture_time_ms, 250, false);
}
send_bucket_->Process();
- // Low prio packets does affect the budget, so we should only be able to send
- // 3 at once, the 4th should be queued.
- for (int i = 0; i < 3; ++i) {
+ // Low prio packets does affect the budget.
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
SendAndExpectPacket(PacedSender::kLowPriority, ssrc, sequence_number++,
- capture_time_ms, 250, false);
+ clock_.TimeInMilliseconds(), 250, false);
}
send_bucket_->InsertPacket(PacedSender::kLowPriority, ssrc, sequence_number,
capture_time_ms, 250, false);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
send_bucket_->Process();
- EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, sequence_number++, capture_time_ms, false))
- .Times(1);
+ EXPECT_EQ(1u, send_bucket_->QueueSizePackets());
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number++,
+ capture_time_ms, false, _))
+ .Times(1)
+ .WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
clock_.AdvanceTimeMilliseconds(5);
send_bucket_->Process();
+ EXPECT_EQ(0u, send_bucket_->QueueSizePackets());
}
TEST_F(PacedSenderTest, Pause) {
@@ -540,25 +512,16 @@ TEST_F(PacedSenderTest, Pause) {
EXPECT_EQ(0, send_bucket_->QueueInMs());
- // Due to the multiplicative factor we can send 3 packets not 2 packets.
- SendAndExpectPacket(PacedSender::kLowPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
- SendAndExpectPacket(PacedSender::kNormalPriority,
- ssrc,
- sequence_number++,
- capture_time_ms,
- 250,
- false);
+ // Due to the multiplicative factor we can send 5 packets during a send
+ // interval. (network capacity * multiplier / (8 bits per byte *
+ // (packet size * #send intervals per second)
+ const size_t packets_to_send_per_interval =
+ kTargetBitrateBps * PacedSender::kDefaultPaceMultiplier / (8 * 250 * 200);
+ for (size_t i = 0; i < packets_to_send_per_interval; ++i) {
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), 250, false);
+ }
+
send_bucket_->Process();
send_bucket_->Pause();
@@ -583,7 +546,7 @@ TEST_F(PacedSenderTest, Pause) {
// Expect no packet to come out while paused.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
- EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _)).Times(0);
+ EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _, _)).Times(0);
for (int i = 0; i < 10; ++i) {
clock_.AdvanceTimeMilliseconds(5);
@@ -592,10 +555,11 @@ TEST_F(PacedSenderTest, Pause) {
}
// Expect high prio packets to come out first followed by all packets in the
// way they were added.
- EXPECT_CALL(callback_, TimeToSendPacket(_, _, capture_time_ms, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(_, _, capture_time_ms, false, _))
.Times(3)
.WillRepeatedly(Return(true));
- EXPECT_CALL(callback_, TimeToSendPacket(_, _, second_capture_time_ms, false))
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(_, _, second_capture_time_ms, false, _))
.Times(1)
.WillRepeatedly(Return(true));
send_bucket_->Resume();
@@ -624,8 +588,8 @@ TEST_F(PacedSenderTest, ResendPacket) {
EXPECT_EQ(clock_.TimeInMilliseconds() - capture_time_ms,
send_bucket_->QueueInMs());
// Fails to send first packet so only one call.
- EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, sequence_number, capture_time_ms, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
+ capture_time_ms, false, _))
.Times(1)
.WillOnce(Return(false));
clock_.AdvanceTimeMilliseconds(10000);
@@ -636,13 +600,12 @@ TEST_F(PacedSenderTest, ResendPacket) {
send_bucket_->QueueInMs());
// Fails to send second packet.
- EXPECT_CALL(callback_,
- TimeToSendPacket(ssrc, sequence_number, capture_time_ms, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
+ capture_time_ms, false, _))
.Times(1)
.WillOnce(Return(true));
- EXPECT_CALL(
- callback_,
- TimeToSendPacket(ssrc, sequence_number + 1, capture_time_ms + 1, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
+ capture_time_ms + 1, false, _))
.Times(1)
.WillOnce(Return(false));
clock_.AdvanceTimeMilliseconds(10000);
@@ -653,9 +616,8 @@ TEST_F(PacedSenderTest, ResendPacket) {
send_bucket_->QueueInMs());
// Send second packet and queue becomes empty.
- EXPECT_CALL(
- callback_,
- TimeToSendPacket(ssrc, sequence_number + 1, capture_time_ms + 1, false))
+ EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
+ capture_time_ms + 1, false, _))
.Times(1)
.WillOnce(Return(true));
clock_.AdvanceTimeMilliseconds(10000);
@@ -668,18 +630,18 @@ TEST_F(PacedSenderTest, ExpectedQueueTimeMs) {
uint16_t sequence_number = 1234;
const size_t kNumPackets = 60;
const size_t kPacketSize = 1200;
- const int32_t kMaxBitrate = kPaceMultiplier * 30;
+ const int32_t kMaxBitrate = PacedSender::kDefaultPaceMultiplier * 30000;
EXPECT_EQ(0, send_bucket_->ExpectedQueueTimeMs());
- send_bucket_->UpdateBitrate(30, kMaxBitrate, 0);
+ send_bucket_->SetEstimatedBitrate(30000);
for (size_t i = 0; i < kNumPackets; ++i) {
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
clock_.TimeInMilliseconds(), kPacketSize, false);
}
- // Queue in ms = 1000 * (bytes in queue) / (kbit per second * 1000 / 8)
+ // Queue in ms = 1000 * (bytes in queue) *8 / (bits per second)
int64_t queue_in_ms =
- static_cast<int64_t>(kNumPackets * kPacketSize * 8 / kMaxBitrate);
+ static_cast<int64_t>(1000 * kNumPackets * kPacketSize * 8 / kMaxBitrate);
EXPECT_EQ(queue_in_ms, send_bucket_->ExpectedQueueTimeMs());
int64_t time_start = clock_.TimeInMilliseconds();
@@ -697,7 +659,7 @@ TEST_F(PacedSenderTest, ExpectedQueueTimeMs) {
// Allow for aliasing, duration should be within one pack of max time limit.
EXPECT_NEAR(duration, PacedSender::kMaxQueueLengthMs,
- static_cast<int64_t>(kPacketSize * 8 / kMaxBitrate));
+ static_cast<int64_t>(1000 * kPacketSize * 8 / kMaxBitrate));
}
TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
@@ -705,7 +667,7 @@ TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
uint16_t sequence_number = 1234;
EXPECT_EQ(0, send_bucket_->QueueInMs());
- send_bucket_->UpdateBitrate(30, kPaceMultiplier * 30, 0);
+ send_bucket_->SetEstimatedBitrate(30000);
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
sequence_number,
@@ -723,25 +685,23 @@ TEST_F(PacedSenderTest, ProbingWithInitialFrame) {
const int kNumPackets = 11;
const int kNumDeltas = kNumPackets - 1;
const size_t kPacketSize = 1200;
- const int kInitialBitrateKbps = 300;
+ const int kInitialBitrateBps = 300000;
uint32_t ssrc = 12346;
uint16_t sequence_number = 1234;
+
const int expected_deltas[kNumDeltas] = {10, 10, 10, 10, 10, 5, 5, 5, 5, 5};
std::list<int> expected_deltas_list(expected_deltas,
expected_deltas + kNumDeltas);
PacedSenderProbing callback(expected_deltas_list, &clock_);
- send_bucket_.reset(
- new PacedSender(&clock_,
- &callback,
- kInitialBitrateKbps,
- kPaceMultiplier * kInitialBitrateKbps,
- 0));
+ send_bucket_.reset(new PacedSender(&clock_, &callback));
+ send_bucket_->SetEstimatedBitrate(kInitialBitrateBps);
for (int i = 0; i < kNumPackets; ++i) {
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
sequence_number++, clock_.TimeInMilliseconds(),
kPacketSize, false);
}
+
while (callback.packets_sent() < kNumPackets) {
int time_until_process = send_bucket_->TimeUntilNextProcess();
if (time_until_process <= 0) {
@@ -756,15 +716,15 @@ TEST_F(PacedSenderTest, ProbingWithTooSmallInitialFrame) {
const int kNumPackets = 11;
const int kNumDeltas = kNumPackets - 1;
const size_t kPacketSize = 1200;
- const int kInitialBitrateKbps = 300;
+ const int kInitialBitrateBps = 300000;
uint32_t ssrc = 12346;
uint16_t sequence_number = 1234;
const int expected_deltas[kNumDeltas] = {10, 10, 10, 10, 10, 5, 5, 5, 5, 5};
std::list<int> expected_deltas_list(expected_deltas,
expected_deltas + kNumDeltas);
PacedSenderProbing callback(expected_deltas_list, &clock_);
- send_bucket_.reset(new PacedSender(&clock_, &callback, kInitialBitrateKbps,
- kPaceMultiplier * kInitialBitrateKbps, 0));
+ send_bucket_.reset(new PacedSender(&clock_, &callback));
+ send_bucket_->SetEstimatedBitrate(kInitialBitrateBps);
for (int i = 0; i < kNumPackets - 5; ++i) {
send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
@@ -810,18 +770,22 @@ TEST_F(PacedSenderTest, PriorityInversion) {
// Packets from earlier frames should be sent first.
{
::testing::InSequence sequence;
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
- clock_.TimeInMilliseconds(), true))
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number,
+ clock_.TimeInMilliseconds(), true, _))
+ .WillOnce(Return(true));
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number + 1,
+ clock_.TimeInMilliseconds(), true, _))
+ .WillOnce(Return(true));
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number + 3,
+ clock_.TimeInMilliseconds() + 33, true, _))
.WillOnce(Return(true));
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
- clock_.TimeInMilliseconds(), true))
+ EXPECT_CALL(callback_,
+ TimeToSendPacket(ssrc, sequence_number + 2,
+ clock_.TimeInMilliseconds() + 33, true, _))
.WillOnce(Return(true));
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 3,
- clock_.TimeInMilliseconds() + 33,
- true)).WillOnce(Return(true));
- EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 2,
- clock_.TimeInMilliseconds() + 33,
- true)).WillOnce(Return(true));
while (send_bucket_->QueueSizePackets() > 0) {
int time_until_process = send_bucket_->TimeUntilNextProcess();
@@ -839,21 +803,22 @@ TEST_F(PacedSenderTest, PaddingOveruse) {
uint16_t sequence_number = 1234;
const size_t kPacketSize = 1200;
- // Min bitrate 0 => no padding, padding budget will stay at 0.
- send_bucket_->UpdateBitrate(60, 90, 0);
+ send_bucket_->Process();
+ send_bucket_->SetEstimatedBitrate(60000);
+ send_bucket_->SetAllocatedSendBitrate(60000, 0);
+
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
clock_.TimeInMilliseconds(), kPacketSize, false);
send_bucket_->Process();
// Add 30kbit padding. When increasing budget, media budget will increase from
- // negative (overuse) while padding budget will increase form 0.
+ // negative (overuse) while padding budget will increase from 0.
clock_.AdvanceTimeMilliseconds(5);
- send_bucket_->UpdateBitrate(60, 90, 30);
-
- send_bucket_->InsertPacket(PacedSender::kHighPriority, ssrc,
- sequence_number++, clock_.TimeInMilliseconds(),
- kPacketSize, false);
+ send_bucket_->SetAllocatedSendBitrate(60000, 30000);
+ SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
+ clock_.TimeInMilliseconds(), kPacketSize, false);
+ EXPECT_LT(5u, send_bucket_->ExpectedQueueTimeMs());
// Don't send padding if queue is non-empty, even if padding budget > 0.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
send_bucket_->Process();
@@ -864,9 +829,8 @@ TEST_F(PacedSenderTest, AverageQueueTime) {
uint16_t sequence_number = 1234;
const size_t kPacketSize = 1200;
const int kBitrateBps = 10 * kPacketSize * 8; // 10 packets per second.
- const int kBitrateKbps = (kBitrateBps + 500) / 1000;
- send_bucket_->UpdateBitrate(kBitrateKbps, kBitrateKbps, kBitrateKbps);
+ send_bucket_->SetEstimatedBitrate(kBitrateBps);
EXPECT_EQ(0, send_bucket_->AverageQueueTimeMs());
@@ -885,7 +849,7 @@ TEST_F(PacedSenderTest, AverageQueueTime) {
// Only first packet (queued for 20ms) should be removed, leave the second
// packet (queued for 10ms) alone in the queue.
EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number,
- first_capture_time, false))
+ first_capture_time, false, _))
.Times(1)
.WillRepeatedly(Return(true));
send_bucket_->Process();
@@ -894,7 +858,7 @@ TEST_F(PacedSenderTest, AverageQueueTime) {
clock_.AdvanceTimeMilliseconds(10);
EXPECT_CALL(callback_, TimeToSendPacket(ssrc, sequence_number + 1,
- first_capture_time + 10, false))
+ first_capture_time + 10, false, _))
.Times(1)
.WillRepeatedly(Return(true));
for (int i = 0; i < 3; ++i) {
@@ -905,5 +869,37 @@ TEST_F(PacedSenderTest, AverageQueueTime) {
EXPECT_EQ(0, send_bucket_->AverageQueueTimeMs());
}
+TEST_F(PacedSenderTest, ProbeClusterId) {
+ uint32_t ssrc = 12346;
+ uint16_t sequence_number = 1234;
+ const size_t kPacketSize = 1200;
+
+ send_bucket_->SetProbingEnabled(true);
+ for (int i = 0; i < 11; ++i) {
+ send_bucket_->InsertPacket(PacedSender::kNormalPriority, ssrc,
+ sequence_number + i, clock_.TimeInMilliseconds(),
+ kPacketSize, false);
+ }
+
+ // First probing cluster.
+ EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _, 0))
+ .Times(6)
+ .WillRepeatedly(Return(true));
+ for (int i = 0; i < 6; ++i)
+ send_bucket_->Process();
+
+ // Second probing cluster.
+ EXPECT_CALL(callback_, TimeToSendPacket(_, _, _, _, 1))
+ .Times(5)
+ .WillRepeatedly(Return(true));
+ for (int i = 0; i < 5; ++i)
+ send_bucket_->Process();
+
+ // No more probing packets.
+ EXPECT_CALL(callback_, TimeToSendPadding(_))
+ .Times(1);
+ send_bucket_->Process();
+}
+
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router.cc b/chromium/third_party/webrtc/modules/pacing/packet_router.cc
index 5c7a7ab29a3..1884958aca4 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router.cc
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router.cc
@@ -43,7 +43,8 @@ void PacketRouter::RemoveRtpModule(RtpRtcp* rtp_module) {
bool PacketRouter::TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_timestamp,
- bool retransmission) {
+ bool retransmission,
+ int probe_cluster_id) {
RTC_DCHECK(pacer_thread_checker_.CalledOnValidThread());
rtc::CritScope cs(&modules_crit_);
for (auto* rtp_module : rtp_modules_) {
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router.h b/chromium/third_party/webrtc/modules/pacing/packet_router.h
index 635b931225c..81d85404eee 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router.h
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router.h
@@ -30,7 +30,7 @@ class TransportFeedback;
// PacketRouter routes outgoing data to the correct sending RTP module, based
// on the simulcast layer in RTPVideoHeader.
-class PacketRouter : public PacedSender::Callback,
+class PacketRouter : public PacedSender::PacketSender,
public TransportSequenceNumberAllocator {
public:
PacketRouter();
@@ -43,7 +43,8 @@ class PacketRouter : public PacedSender::Callback,
bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_timestamp,
- bool retransmission) override;
+ bool retransmission,
+ int probe_cluster_id) override;
size_t TimeToSendPadding(size_t bytes) override;
diff --git a/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc b/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
index faf270ced37..006b9f2bf48 100644
--- a/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/packet_router_unittest.cc
@@ -53,7 +53,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
.WillOnce(Return(true));
EXPECT_CALL(rtp_2, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc1, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
// Send on the second module by letting rtp_2 be sending, but not rtp_1.
++sequence_number;
@@ -69,7 +69,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
.Times(1)
.WillOnce(Return(true));
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc2, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
// No module is sending, hence no packet should be sent.
EXPECT_CALL(rtp_1, SendingMedia()).Times(1).WillOnce(Return(false));
@@ -77,7 +77,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
EXPECT_CALL(rtp_2, SendingMedia()).Times(1).WillOnce(Return(false));
EXPECT_CALL(rtp_2, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc1, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
// Add a packet with incorrect ssrc and test it's dropped in the router.
EXPECT_CALL(rtp_1, SendingMedia()).Times(1).WillOnce(Return(true));
@@ -87,7 +87,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
EXPECT_CALL(rtp_1, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_CALL(rtp_2, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc1 + kSsrc2, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
packet_router_->RemoveRtpModule(&rtp_1);
@@ -97,7 +97,7 @@ TEST_F(PacketRouterTest, TimeToSendPacket) {
EXPECT_CALL(rtp_2, SSRC()).Times(1).WillOnce(Return(kSsrc2));
EXPECT_CALL(rtp_2, TimeToSendPacket(_, _, _, _)).Times(0);
EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc1, sequence_number,
- timestamp, retransmission));
+ timestamp, retransmission, -1));
packet_router_->RemoveRtpModule(&rtp_2);
}
@@ -167,7 +167,7 @@ TEST_F(PacketRouterTest, SenderOnlyFunctionsRespectSendingMedia) {
// Verify that TimeToSendPacket does not end up in a receiver.
EXPECT_CALL(rtp, TimeToSendPacket(_, _, _, _)).Times(0);
- EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc, 1, 1, false));
+ EXPECT_TRUE(packet_router_->TimeToSendPacket(kSsrc, 1, 1, false, -1));
// Verify that TimeToSendPadding does not end up in a receiver.
EXPECT_CALL(rtp, TimeToSendPadding(_)).Times(0);
EXPECT_EQ(0u, packet_router_->TimeToSendPadding(200));
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
index 4967913558c..d44788be274 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
@@ -11,6 +11,7 @@
#include <memory>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
#include "webrtc/modules/remote_bitrate_estimator/test/packet_receiver.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h
index 2c35df872bc..61773037d74 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_estimator.h
@@ -28,7 +28,7 @@ class MockRemoteBitrateEstimator : public RemoteBitrateEstimator {
public:
MOCK_METHOD1(IncomingPacketFeedbackVector,
void(const std::vector<PacketInfo>&));
- MOCK_METHOD4(IncomingPacket, void(int64_t, size_t, const RTPHeader&, bool));
+ MOCK_METHOD3(IncomingPacket, void(int64_t, size_t, const RTPHeader&));
MOCK_METHOD1(RemoveStream, void(uint32_t));
MOCK_CONST_METHOD2(LatestEstimate, bool(std::vector<uint32_t>*, uint32_t*));
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
index e56c273fc4b..d7d8d2c8d2e 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
@@ -58,8 +58,7 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module {
// Note that |arrival_time_ms| can be of an arbitrary time base.
virtual void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) = 0;
+ const RTPHeader& header) = 0;
// Removes all data for |ssrc|.
virtual void RemoveStream(uint32_t ssrc) = 0;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
index a643c1f1030..83e87f95712 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/send_time_history.h
@@ -24,7 +24,7 @@ class SendTimeHistory {
SendTimeHistory(Clock* clock, int64_t packet_age_limit);
virtual ~SendTimeHistory();
- void AddAndRemoveOld(uint16_t sequence_number, size_t length, bool was_paced);
+ void AddAndRemoveOld(uint16_t sequence_number, size_t length);
bool OnSentPacket(uint16_t sequence_number, int64_t timestamp);
// Look up PacketInfo for a sent packet, based on the sequence number, and
// populate all fields except for receive_time. The packet parameter must
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
index f75bc2b03ea..1b7ce07583d 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/inter_arrival.cc
@@ -18,7 +18,7 @@
namespace webrtc {
-static const int kBurstDeltaThresholdMs = 5;
+static const int kBurstDeltaThresholdMs = 5;
InterArrival::InterArrival(uint32_t timestamp_group_length_ticks,
double timestamp_to_ms_coeff,
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
index 32663d729b8..7b20cf7f9db 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
@@ -62,6 +62,7 @@
'type': 'static_library',
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
],
'sources': [
'test/bwe.cc',
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
index 7c2abb2f08f..82335055b43 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc
@@ -14,12 +14,12 @@
#include <algorithm>
+#include "webrtc/base/checks.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/pacing/paced_sender.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
@@ -79,23 +79,19 @@ bool RemoteBitrateEstimatorAbsSendTime::IsWithinClusterBounds(
}
RemoteBitrateEstimatorAbsSendTime::RemoteBitrateEstimatorAbsSendTime(
- RemoteBitrateObserver* observer,
- Clock* clock)
+ RemoteBitrateObserver* observer)
: observer_(observer),
inter_arrival_(),
- estimator_(OverUseDetectorOptions()),
+ estimator_(),
detector_(OverUseDetectorOptions()),
incoming_bitrate_(kBitrateWindowMs, 8000),
total_probes_received_(0),
first_packet_time_ms_(-1),
last_update_ms_(-1),
- ssrcs_(),
- clock_(clock) {
+ ssrcs_() {
RTC_DCHECK(observer_);
- RTC_DCHECK(clock_);
LOG(LS_INFO) << "RemoteBitrateEstimatorAbsSendTime: Instantiating.";
network_thread_.DetachFromThread();
- process_thread_.DetachFromThread();
}
void RemoteBitrateEstimatorAbsSendTime::ComputeClusters(
@@ -180,8 +176,7 @@ RemoteBitrateEstimatorAbsSendTime::ProcessClusters(int64_t now_ms) {
std::min(best_it->GetSendBitrateBps(), best_it->GetRecvBitrateBps());
// Make sure that a probe sent on a lower bitrate than our estimate can't
// reduce the estimate.
- if (IsBitrateImproving(probe_bitrate_bps) &&
- probe_bitrate_bps > static_cast<int>(incoming_bitrate_.Rate(now_ms))) {
+ if (IsBitrateImproving(probe_bitrate_bps)) {
LOG(LS_INFO) << "Probe successful, sent at "
<< best_it->GetSendBitrateBps() << " bps, received at "
<< best_it->GetRecvBitrateBps()
@@ -215,14 +210,14 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketFeedbackVector(
for (const auto& packet_info : packet_feedback_vector) {
IncomingPacketInfo(packet_info.arrival_time_ms,
ConvertMsTo24Bits(packet_info.send_time_ms),
- packet_info.payload_size, 0, packet_info.was_paced);
+ packet_info.payload_size, 0);
}
}
-void RemoteBitrateEstimatorAbsSendTime::IncomingPacket(int64_t arrival_time_ms,
- size_t payload_size,
- const RTPHeader& header,
- bool was_paced) {
+void RemoteBitrateEstimatorAbsSendTime::IncomingPacket(
+ int64_t arrival_time_ms,
+ size_t payload_size,
+ const RTPHeader& header) {
RTC_DCHECK(network_thread_.CalledOnValidThread());
if (!header.extension.hasAbsoluteSendTime) {
LOG(LS_WARNING) << "RemoteBitrateEstimatorAbsSendTimeImpl: Incoming packet "
@@ -230,36 +225,31 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacket(int64_t arrival_time_ms,
return;
}
IncomingPacketInfo(arrival_time_ms, header.extension.absoluteSendTime,
- payload_size, header.ssrc, was_paced);
+ payload_size, header.ssrc);
}
void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
int64_t arrival_time_ms,
uint32_t send_time_24bits,
size_t payload_size,
- uint32_t ssrc,
- bool was_paced) {
+ uint32_t ssrc) {
assert(send_time_24bits < (1ul << 24));
// Shift up send time to use the full 32 bits that inter_arrival works with,
// so wrapping works properly.
uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift;
int64_t send_time_ms = static_cast<int64_t>(timestamp) * kTimestampToMs;
- int64_t now_ms = clock_->TimeInMilliseconds();
+ int64_t now_ms = arrival_time_ms;
// TODO(holmer): SSRCs are only needed for REMB, should be broken out from
// here.
incoming_bitrate_.Update(payload_size, now_ms);
if (first_packet_time_ms_ == -1)
- first_packet_time_ms_ = clock_->TimeInMilliseconds();
+ first_packet_time_ms_ = arrival_time_ms;
uint32_t ts_delta = 0;
int64_t t_delta = 0;
int size_delta = 0;
- // For now only try to detect probes while we don't have a valid estimate, and
- // make sure the packet was paced. We currently assume that only packets
- // larger than 200 bytes are paced by the sender.
- was_paced = was_paced && payload_size > PacedSender::kMinProbePacketSize;
bool update_estimate = false;
uint32_t target_bitrate_bps = 0;
std::vector<uint32_t> ssrcs;
@@ -267,9 +257,14 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
rtc::CritScope lock(&crit_);
TimeoutStreams(now_ms);
+ RTC_DCHECK(inter_arrival_.get());
+ RTC_DCHECK(estimator_.get());
ssrcs_[ssrc] = now_ms;
- if (was_paced &&
+ // For now only try to detect probes while we don't have a valid estimate.
+ // We currently assume that only packets larger than 200 bytes are paced by
+ // the sender.
+ if (payload_size > PacedSender::kMinProbePacketSize &&
(!remote_rate_.ValidEstimate() ||
now_ms - first_packet_time_ms_ < kInitialProbingIntervalMs)) {
// TODO(holmer): Use a map instead to get correct order?
@@ -295,9 +290,9 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
if (inter_arrival_->ComputeDeltas(timestamp, arrival_time_ms, payload_size,
&ts_delta, &t_delta, &size_delta)) {
double ts_delta_ms = (1000.0 * ts_delta) / (1 << kInterArrivalShift);
- estimator_.Update(t_delta, ts_delta_ms, size_delta, detector_.State());
- detector_.Detect(estimator_.offset(), ts_delta_ms,
- estimator_.num_of_deltas(), arrival_time_ms);
+ estimator_->Update(t_delta, ts_delta_ms, size_delta, detector_.State());
+ detector_.Detect(estimator_->offset(), ts_delta_ms,
+ estimator_->num_of_deltas(), arrival_time_ms);
}
if (!update_estimate) {
@@ -319,7 +314,7 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo(
// and the target bitrate is too high compared to what we are receiving.
const RateControlInput input(detector_.State(),
incoming_bitrate_.Rate(now_ms),
- estimator_.var_noise());
+ estimator_->var_noise());
remote_rate_.Update(&input, now_ms);
target_bitrate_bps = remote_rate_.UpdateBandwidthEstimate(now_ms);
update_estimate = remote_rate_.ValidEstimate();
@@ -352,6 +347,7 @@ void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(int64_t now_ms) {
inter_arrival_.reset(
new InterArrival((kTimestampGroupLengthMs << kInterArrivalShift) / 1000,
kTimestampToMs, true));
+ estimator_.reset(new OveruseEstimator(OverUseDetectorOptions()));
// We deliberately don't reset the first_packet_time_ms_ here for now since
// we only probe for bandwidth in the beginning of a call right now.
}
@@ -359,7 +355,6 @@ void RemoteBitrateEstimatorAbsSendTime::TimeoutStreams(int64_t now_ms) {
void RemoteBitrateEstimatorAbsSendTime::OnRttUpdate(int64_t avg_rtt_ms,
int64_t max_rtt_ms) {
- RTC_DCHECK(process_thread_.CalledOnValidThread());
rtc::CritScope lock(&crit_);
remote_rate_.SetRtt(avg_rtt_ms);
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
index 1f47dc3b244..9403a6cc8b9 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h
@@ -17,6 +17,7 @@
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/rate_statistics.h"
#include "webrtc/base/thread_checker.h"
@@ -67,8 +68,7 @@ struct Cluster {
class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
public:
- RemoteBitrateEstimatorAbsSendTime(RemoteBitrateObserver* observer,
- Clock* clock);
+ explicit RemoteBitrateEstimatorAbsSendTime(RemoteBitrateObserver* observer);
virtual ~RemoteBitrateEstimatorAbsSendTime() {}
void IncomingPacketFeedbackVector(
@@ -76,8 +76,7 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) override;
+ const RTPHeader& header) override;
// This class relies on Process() being called periodically (at least once
// every other second) for streams to be timed out properly. Therefore it
// shouldn't be detached from the ProcessThread except if it's about to be
@@ -102,8 +101,7 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
void IncomingPacketInfo(int64_t arrival_time_ms,
uint32_t send_time_24bits,
size_t payload_size,
- uint32_t ssrc,
- bool was_paced);
+ uint32_t ssrc);
void ComputeClusters(std::list<Cluster>* clusters) const;
@@ -121,7 +119,7 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
rtc::ThreadChecker network_thread_;
RemoteBitrateObserver* const observer_;
std::unique_ptr<InterArrival> inter_arrival_;
- OveruseEstimator estimator_;
+ std::unique_ptr<OveruseEstimator> estimator_;
OveruseDetector detector_;
RateStatistics incoming_bitrate_;
std::vector<int> recent_propagation_delta_ms_;
@@ -131,11 +129,9 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator {
int64_t first_packet_time_ms_;
int64_t last_update_ms_;
- rtc::ThreadChecker process_thread_;
rtc::CriticalSection crit_;
Ssrcs ssrcs_ GUARDED_BY(&crit_);
AimdRateControl remote_rate_ GUARDED_BY(&crit_);
- Clock* const clock_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RemoteBitrateEstimatorAbsSendTime);
};
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
index e8026a5764d..a4e4150e769 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc
@@ -20,7 +20,7 @@ class RemoteBitrateEstimatorAbsSendTimeTest :
RemoteBitrateEstimatorAbsSendTimeTest() {}
virtual void SetUp() {
bitrate_estimator_.reset(new RemoteBitrateEstimatorAbsSendTime(
- bitrate_observer_.get(), &clock_));
+ bitrate_observer_.get()));
}
protected:
RTC_DISALLOW_COPY_AND_ASSIGN(RemoteBitrateEstimatorAbsSendTimeTest);
@@ -35,15 +35,15 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, RateIncreaseReordering) {
}
TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, RateIncreaseRtpTimestamps) {
- RateIncreaseRtpTimestampsTestHelper(1232);
+ RateIncreaseRtpTimestampsTestHelper(1229);
}
TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, CapacityDropOneStream) {
- CapacityDropTestHelper(1, false, 633);
+ CapacityDropTestHelper(1, false, 667);
}
TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, CapacityDropOneStreamWrap) {
- CapacityDropTestHelper(1, true, 633);
+ CapacityDropTestHelper(1, true, 667);
}
TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, CapacityDropTwoStreamsWrap) {
@@ -90,7 +90,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, TestProcessAfterTimeout) {
// RemoteBitrateEstimator.
const int64_t kStreamTimeOutMs = 2000;
const int64_t kProcessIntervalMs = 1000;
- IncomingPacket(0, 1000, clock_.TimeInMilliseconds(), 0, 0, true);
+ IncomingPacket(0, 1000, clock_.TimeInMilliseconds(), 0, 0);
clock_.AdvanceTimeMilliseconds(kStreamTimeOutMs + 1);
// Trigger timeout.
bitrate_estimator_->Process();
@@ -106,16 +106,14 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, TestProbeDetection) {
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(10);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
// Second burst sent at 8 * 1000 / 5 = 1600 kbps.
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(5);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
bitrate_estimator_->Process();
@@ -132,12 +130,10 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(5);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
// Non-paced packet, arriving 5 ms after.
clock_.AdvanceTimeMilliseconds(5);
- IncomingPacket(0, 100, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- false);
+ IncomingPacket(0, 100, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
bitrate_estimator_->Process();
@@ -158,7 +154,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
now_ms = clock_.TimeInMilliseconds();
send_time_ms += 10;
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
// Second burst sent at 8 * 1000 / 5 = 1600 kbps, arriving at 8 * 1000 / 8 =
@@ -168,7 +164,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
now_ms = clock_.TimeInMilliseconds();
send_time_ms += 5;
IncomingPacket(0, 1000, now_ms, send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -188,7 +184,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
send_time_ms += 10;
now_ms = clock_.TimeInMilliseconds();
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -207,7 +203,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, TestProbeDetectionFasterArrival) {
send_time_ms += 10;
now_ms = clock_.TimeInMilliseconds();
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -225,7 +221,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, TestProbeDetectionSlowerArrival) {
send_time_ms += 5;
now_ms = clock_.TimeInMilliseconds();
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -245,7 +241,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest,
send_time_ms += 1;
now_ms = clock_.TimeInMilliseconds();
IncomingPacket(0, 1000, now_ms, 90 * send_time_ms,
- AbsSendTime(send_time_ms, 1000), true);
+ AbsSendTime(send_time_ms, 1000));
}
bitrate_estimator_->Process();
@@ -261,8 +257,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, ProbingIgnoresSmallPackets) {
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(10);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 200, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 200, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
bitrate_estimator_->Process();
@@ -273,8 +268,7 @@ TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, ProbingIgnoresSmallPackets) {
for (int i = 0; i < kProbeLength; ++i) {
clock_.AdvanceTimeMilliseconds(10);
now_ms = clock_.TimeInMilliseconds();
- IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000),
- true);
+ IncomingPacket(0, 1000, now_ms, 90 * now_ms, AbsSendTime(now_ms, 1000));
}
// Wait long enough so that we can call Process again.
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
index f38ef783067..b5adb9fa70a 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
@@ -66,10 +66,10 @@ RemoteBitrateEstimatorSingleStream::~RemoteBitrateEstimatorSingleStream() {
}
}
-void RemoteBitrateEstimatorSingleStream::IncomingPacket(int64_t arrival_time_ms,
- size_t payload_size,
- const RTPHeader& header,
- bool was_paced) {
+void RemoteBitrateEstimatorSingleStream::IncomingPacket(
+ int64_t arrival_time_ms,
+ size_t payload_size,
+ const RTPHeader& header) {
uint32_t ssrc = header.ssrc;
uint32_t rtp_timestamp = header.timestamp +
header.extension.transmissionTimeOffset;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
index 5516ea781da..2f74e1c4af0 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h
@@ -15,6 +15,7 @@
#include <memory>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/rate_statistics.h"
#include "webrtc/modules/remote_bitrate_estimator/aimd_rate_control.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
@@ -30,8 +31,7 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator {
void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) override;
+ const RTPHeader& header) override;
void Process() override;
int64_t TimeUntilNextProcess() override;
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
index 6fd0ad11b57..97e3abaa32b 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
@@ -47,7 +47,7 @@ TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropOneStreamWrap) {
}
TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropTwoStreamsWrap) {
- CapacityDropTestHelper(2, true, 767);
+ CapacityDropTestHelper(2, true, 600);
}
TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropThreeStreamsWrap) {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
index 8bfb8ed0fd3..4530053a869 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
@@ -221,8 +221,7 @@ void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc,
size_t payload_size,
int64_t arrival_time,
uint32_t rtp_timestamp,
- uint32_t absolute_send_time,
- bool was_paced) {
+ uint32_t absolute_send_time) {
RTPHeader header;
memset(&header, 0, sizeof(header));
header.ssrc = ssrc;
@@ -230,7 +229,7 @@ void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc,
header.extension.hasAbsoluteSendTime = true;
header.extension.absoluteSendTime = absolute_send_time;
bitrate_estimator_->IncomingPacket(arrival_time + kArrivalTimeClockOffsetMs,
- payload_size, header, was_paced);
+ payload_size, header);
}
// Generates a frame of packets belonging to a stream at a given bitrate and
@@ -255,7 +254,7 @@ bool RemoteBitrateEstimatorTest::GenerateAndProcessFrame(uint32_t ssrc,
clock_.TimeInMicroseconds());
IncomingPacket(packet->ssrc, packet->size,
(packet->arrival_time + 500) / 1000, packet->rtp_timestamp,
- AbsSendTime(packet->send_time, 1000000), true);
+ AbsSendTime(packet->send_time, 1000000));
if (bitrate_observer_->updated()) {
if (bitrate_observer_->latest_bitrate() < bitrate_bps)
overuse = true;
@@ -319,7 +318,7 @@ void RemoteBitrateEstimatorTest::InitialBehaviorTestHelper(
clock_.AdvanceTimeMilliseconds(1000);
// Inserting a packet. Still no valid estimate. We need to wait 5 seconds.
IncomingPacket(kDefaultSsrc, kMtu, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
bitrate_estimator_->Process();
EXPECT_FALSE(bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate_bps));
EXPECT_EQ(0u, ssrcs.size());
@@ -328,7 +327,7 @@ void RemoteBitrateEstimatorTest::InitialBehaviorTestHelper(
// Inserting packets for 5 seconds to get a valid estimate.
for (int i = 0; i < 5 * kFramerate + 1; ++i) {
IncomingPacket(kDefaultSsrc, kMtu, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
clock_.AdvanceTimeMilliseconds(1000 / kFramerate);
timestamp += 90 * kFrameIntervalMs;
absolute_send_time = AddAbsSendTime(absolute_send_time,
@@ -356,13 +355,13 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper(
uint32_t timestamp = 0;
uint32_t absolute_send_time = 0;
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
bitrate_estimator_->Process();
EXPECT_FALSE(bitrate_observer_->updated()); // No valid estimate.
// Inserting packets for one second to get a valid estimate.
for (int i = 0; i < 5 * kFramerate + 1; ++i) {
IncomingPacket(kDefaultSsrc, kMtu, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
timestamp += 90 * kFrameIntervalMs;
absolute_send_time = AddAbsSendTime(absolute_send_time,
@@ -379,12 +378,12 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper(
absolute_send_time = AddAbsSendTime(absolute_send_time,
2 * kFrameIntervalAbsSendTime);
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
- IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(),
- timestamp - 90 * kFrameIntervalMs,
- AddAbsSendTime(absolute_send_time,
- -static_cast<int>(kFrameIntervalAbsSendTime)),
- true);
+ absolute_send_time);
+ IncomingPacket(
+ kDefaultSsrc, 1000, clock_.TimeInMilliseconds(),
+ timestamp - 90 * kFrameIntervalMs,
+ AddAbsSendTime(absolute_send_time,
+ -static_cast<int>(kFrameIntervalAbsSendTime)));
}
bitrate_estimator_->Process();
EXPECT_TRUE(bitrate_observer_->updated());
@@ -517,7 +516,7 @@ void RemoteBitrateEstimatorTest::TestTimestampGroupingTestHelper() {
// time for the first estimate to be generated and for Process() to be called.
for (int i = 0; i <= 6 * kFramerate; ++i) {
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
bitrate_estimator_->Process();
clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
timestamp += 90 * kFrameIntervalMs;
@@ -538,7 +537,7 @@ void RemoteBitrateEstimatorTest::TestTimestampGroupingTestHelper() {
// Insert |kTimestampGroupLength| frames with just 1 timestamp ticks in
// between. Should be treated as part of the same group by the estimator.
IncomingPacket(kDefaultSsrc, 100, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
clock_.AdvanceTimeMilliseconds(kFrameIntervalMs / kTimestampGroupLength);
timestamp += 1;
absolute_send_time = AddAbsSendTime(absolute_send_time,
@@ -568,7 +567,7 @@ void RemoteBitrateEstimatorTest::TestWrappingHelper(
for (size_t i = 0; i < 3000; ++i) {
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
timestamp += kFrameIntervalMs;
clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
absolute_send_time = AddAbsSendTime(absolute_send_time,
@@ -583,9 +582,9 @@ void RemoteBitrateEstimatorTest::TestWrappingHelper(
absolute_send_time = AddAbsSendTime(absolute_send_time,
AbsSendTime(silence_time_s, 1));
bitrate_estimator_->Process();
- for (size_t i = 0; i < 10; ++i) {
+ for (size_t i = 0; i < 21; ++i) {
IncomingPacket(kDefaultSsrc, 1000, clock_.TimeInMilliseconds(), timestamp,
- absolute_send_time, true);
+ absolute_send_time);
timestamp += kFrameIntervalMs;
clock_.AdvanceTimeMilliseconds(2 * kFrameIntervalMs);
absolute_send_time = AddAbsSendTime(absolute_send_time,
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
index 12ac9e8502d..b4bff670e75 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
@@ -173,8 +173,7 @@ class RemoteBitrateEstimatorTest : public ::testing::Test {
size_t payload_size,
int64_t arrival_time,
uint32_t rtp_timestamp,
- uint32_t absolute_send_time,
- bool was_paced);
+ uint32_t absolute_send_time);
// Generates a frame of packets belonging to a stream at a given bitrate and
// with a given ssrc. The stream is pushed through a very simple simulated
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
index 31cd9f98028..e307242f8d3 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
@@ -16,6 +16,7 @@
#include <algorithm>
#include <sstream>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/random.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
index eace9fc8b0a..2172bce9380 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.cc
@@ -43,8 +43,7 @@ void RemoteEstimatorProxy::IncomingPacketFeedbackVector(
void RemoteEstimatorProxy::IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) {
+ const RTPHeader& header) {
if (!header.extension.hasTransportSequenceNumber) {
LOG(LS_WARNING) << "RemoteEstimatorProxy: Incoming packet "
"is missing the transport sequence number extension!";
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
index 93d5244b67b..66373e29778 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy.h
@@ -39,8 +39,7 @@ class RemoteEstimatorProxy : public RemoteBitrateEstimator {
const std::vector<PacketInfo>& packet_feedback_vector) override;
void IncomingPacket(int64_t arrival_time_ms,
size_t payload_size,
- const RTPHeader& header,
- bool was_paced) override;
+ const RTPHeader& header) override;
void RemoveStream(uint32_t ssrc) override {}
bool LatestEstimate(std::vector<unsigned int>* ssrcs,
unsigned int* bitrate_bps) const override;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
index 3c3c7297e18..a1264b2ff95 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc
@@ -37,7 +37,7 @@ class RemoteEstimatorProxyTest : public ::testing::Test {
header.extension.hasTransportSequenceNumber = true;
header.extension.transportSequenceNumber = seq;
header.ssrc = kMediaSsrc;
- proxy_.IncomingPacket(time_ms, kDefaultPacketSize, header, true);
+ proxy_.IncomingPacket(time_ms, kDefaultPacketSize, header);
}
void Process() {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc
index a58d12a1600..f4fe2208a81 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history.cc
@@ -26,9 +26,7 @@ void SendTimeHistory::Clear() {
history_.clear();
}
-void SendTimeHistory::AddAndRemoveOld(uint16_t sequence_number,
- size_t length,
- bool was_paced) {
+void SendTimeHistory::AddAndRemoveOld(uint16_t sequence_number, size_t length) {
EraseOld();
if (history_.empty())
@@ -36,7 +34,7 @@ void SendTimeHistory::AddAndRemoveOld(uint16_t sequence_number,
history_.insert(std::pair<uint16_t, PacketInfo>(
sequence_number, PacketInfo(clock_->TimeInMilliseconds(), 0, -1,
- sequence_number, length, was_paced)));
+ sequence_number, length)));
}
bool SendTimeHistory::OnSentPacket(uint16_t sequence_number,
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc
index b525813cdca..7500f575fb7 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/send_time_history_unittest.cc
@@ -33,9 +33,8 @@ class SendTimeHistoryTest : public ::testing::Test {
void AddPacketWithSendTime(uint16_t sequence_number,
size_t length,
- bool was_paced,
int64_t send_time_ms) {
- history_.AddAndRemoveOld(sequence_number, length, was_paced);
+ history_.AddAndRemoveOld(sequence_number, length);
history_.OnSentPacket(sequence_number, send_time_ms);
}
@@ -46,42 +45,40 @@ class SendTimeHistoryTest : public ::testing::Test {
// Help class extended so we can do EXPECT_EQ and collections.
class PacketInfo : public webrtc::PacketInfo {
public:
- PacketInfo() : webrtc::PacketInfo(-1, 0, 0, 0, 0, false) {}
+ PacketInfo() : webrtc::PacketInfo(-1, 0, 0, 0, 0) {}
PacketInfo(int64_t arrival_time_ms, uint16_t sequence_number)
- : PacketInfo(arrival_time_ms, 0, sequence_number, 0, false) {}
+ : PacketInfo(arrival_time_ms, 0, sequence_number, 0) {}
PacketInfo(int64_t arrival_time_ms,
int64_t send_time_ms,
uint16_t sequence_number,
- size_t payload_size,
- bool was_paced)
+ size_t payload_size)
: webrtc::PacketInfo(-1,
arrival_time_ms,
send_time_ms,
sequence_number,
- payload_size,
- was_paced) {}
+ payload_size) {}
bool operator==(const PacketInfo& other) const {
return arrival_time_ms == other.arrival_time_ms &&
send_time_ms == other.send_time_ms &&
sequence_number == other.sequence_number &&
- payload_size == other.payload_size && was_paced == other.was_paced;
+ payload_size == other.payload_size;
}
};
TEST_F(SendTimeHistoryTest, AddRemoveOne) {
const uint16_t kSeqNo = 10;
- const PacketInfo kSentPacket(0, 1, kSeqNo, 1, true);
- AddPacketWithSendTime(kSeqNo, 1, true, 1);
+ const PacketInfo kSentPacket(0, 1, kSeqNo, 1);
+ AddPacketWithSendTime(kSeqNo, 1, 1);
- PacketInfo received_packet(0, 0, kSeqNo, 0, false);
+ PacketInfo received_packet(0, 0, kSeqNo, 0);
EXPECT_TRUE(history_.GetInfo(&received_packet, false));
EXPECT_EQ(kSentPacket, received_packet);
- PacketInfo received_packet2(0, 0, kSeqNo, 0, false);
+ PacketInfo received_packet2(0, 0, kSeqNo, 0);
EXPECT_TRUE(history_.GetInfo(&received_packet2, true));
EXPECT_EQ(kSentPacket, received_packet2);
- PacketInfo received_packet3(0, 0, kSeqNo, 0, false);
+ PacketInfo received_packet3(0, 0, kSeqNo, 0);
EXPECT_FALSE(history_.GetInfo(&received_packet3, true));
}
@@ -90,9 +87,8 @@ TEST_F(SendTimeHistoryTest, PopulatesExpectedFields) {
const int64_t kSendTime = 1000;
const int64_t kReceiveTime = 2000;
const size_t kPayloadSize = 42;
- const bool kPaced = true;
- AddPacketWithSendTime(kSeqNo, kPayloadSize, kPaced, kSendTime);
+ AddPacketWithSendTime(kSeqNo, kPayloadSize, kSendTime);
PacketInfo info(kReceiveTime, kSeqNo);
EXPECT_TRUE(history_.GetInfo(&info, true));
@@ -100,7 +96,6 @@ TEST_F(SendTimeHistoryTest, PopulatesExpectedFields) {
EXPECT_EQ(kSendTime, info.send_time_ms);
EXPECT_EQ(kSeqNo, info.sequence_number);
EXPECT_EQ(kPayloadSize, info.payload_size);
- EXPECT_EQ(kPaced, info.was_paced);
}
TEST_F(SendTimeHistoryTest, AddThenRemoveOutOfOrder) {
@@ -109,19 +104,16 @@ TEST_F(SendTimeHistoryTest, AddThenRemoveOutOfOrder) {
const size_t num_items = 100;
const size_t kPacketSize = 400;
const size_t kTransmissionTime = 1234;
- const bool kPaced = true;
for (size_t i = 0; i < num_items; ++i) {
sent_packets.push_back(PacketInfo(0, static_cast<int64_t>(i),
- static_cast<uint16_t>(i), kPacketSize,
- kPaced));
+ static_cast<uint16_t>(i), kPacketSize));
received_packets.push_back(
PacketInfo(static_cast<int64_t>(i) + kTransmissionTime, 0,
- static_cast<uint16_t>(i), kPacketSize, false));
+ static_cast<uint16_t>(i), kPacketSize));
}
for (size_t i = 0; i < num_items; ++i) {
history_.AddAndRemoveOld(sent_packets[i].sequence_number,
- sent_packets[i].payload_size,
- sent_packets[i].was_paced);
+ sent_packets[i].payload_size);
}
for (size_t i = 0; i < num_items; ++i)
history_.OnSentPacket(sent_packets[i].sequence_number,
@@ -143,19 +135,19 @@ TEST_F(SendTimeHistoryTest, HistorySize) {
const int kItems = kDefaultHistoryLengthMs / 100;
for (int i = 0; i < kItems; ++i) {
clock_.AdvanceTimeMilliseconds(100);
- AddPacketWithSendTime(i, 0, false, i * 100);
+ AddPacketWithSendTime(i, 0, i * 100);
}
for (int i = 0; i < kItems; ++i) {
- PacketInfo info(0, 0, static_cast<uint16_t>(i), 0, false);
+ PacketInfo info(0, 0, static_cast<uint16_t>(i), 0);
EXPECT_TRUE(history_.GetInfo(&info, false));
EXPECT_EQ(i * 100, info.send_time_ms);
}
clock_.AdvanceTimeMilliseconds(101);
- AddPacketWithSendTime(kItems, 0, false, kItems * 101);
- PacketInfo info(0, 0, 0, 0, false);
+ AddPacketWithSendTime(kItems, 0, kItems * 101);
+ PacketInfo info(0, 0, 0, 0);
EXPECT_FALSE(history_.GetInfo(&info, false));
for (int i = 1; i < (kItems + 1); ++i) {
- PacketInfo info2(0, 0, static_cast<uint16_t>(i), 0, false);
+ PacketInfo info2(0, 0, static_cast<uint16_t>(i), 0);
EXPECT_TRUE(history_.GetInfo(&info2, false));
int64_t expected_time_ms = (i == kItems) ? i * 101 : i * 100;
EXPECT_EQ(expected_time_ms, info2.send_time_ms);
@@ -164,16 +156,16 @@ TEST_F(SendTimeHistoryTest, HistorySize) {
TEST_F(SendTimeHistoryTest, HistorySizeWithWraparound) {
const uint16_t kMaxSeqNo = std::numeric_limits<uint16_t>::max();
- AddPacketWithSendTime(kMaxSeqNo - 2, 0, false, 0);
+ AddPacketWithSendTime(kMaxSeqNo - 2, 0, 0);
clock_.AdvanceTimeMilliseconds(100);
- AddPacketWithSendTime(kMaxSeqNo - 1, 1, false, 100);
+ AddPacketWithSendTime(kMaxSeqNo - 1, 1, 100);
clock_.AdvanceTimeMilliseconds(100);
- AddPacketWithSendTime(kMaxSeqNo, 0, false, 200);
+ AddPacketWithSendTime(kMaxSeqNo, 0, 200);
clock_.AdvanceTimeMilliseconds(kDefaultHistoryLengthMs - 200 + 1);
- AddPacketWithSendTime(0, 0, false, kDefaultHistoryLengthMs);
+ AddPacketWithSendTime(0, 0, kDefaultHistoryLengthMs);
PacketInfo info(0, static_cast<uint16_t>(kMaxSeqNo - 2));
EXPECT_FALSE(history_.GetInfo(&info, false));
@@ -189,7 +181,7 @@ TEST_F(SendTimeHistoryTest, HistorySizeWithWraparound) {
EXPECT_TRUE(history_.GetInfo(&info5, true));
clock_.AdvanceTimeMilliseconds(100);
- AddPacketWithSendTime(1, 0, false, 1100);
+ AddPacketWithSendTime(1, 0, 1100);
PacketInfo info6(0, static_cast<uint16_t>(kMaxSeqNo - 2));
EXPECT_FALSE(history_.GetInfo(&info6, false));
@@ -206,26 +198,26 @@ TEST_F(SendTimeHistoryTest, HistorySizeWithWraparound) {
TEST_F(SendTimeHistoryTest, InterlievedGetAndRemove) {
const uint16_t kSeqNo = 1;
const int64_t kTimestamp = 2;
- PacketInfo packets[3] = {{0, kTimestamp, kSeqNo, 0, false},
- {0, kTimestamp + 1, kSeqNo + 1, 0, false},
- {0, kTimestamp + 2, kSeqNo + 2, 0, false}};
+ PacketInfo packets[3] = {{0, kTimestamp, kSeqNo, 0},
+ {0, kTimestamp + 1, kSeqNo + 1, 0},
+ {0, kTimestamp + 2, kSeqNo + 2, 0}};
AddPacketWithSendTime(packets[0].sequence_number, packets[0].payload_size,
- packets[0].was_paced, packets[0].send_time_ms);
+ packets[0].send_time_ms);
AddPacketWithSendTime(packets[1].sequence_number, packets[1].payload_size,
- packets[1].was_paced, packets[1].send_time_ms);
- PacketInfo info(0, 0, packets[0].sequence_number, 0, false);
+ packets[1].send_time_ms);
+ PacketInfo info(0, 0, packets[0].sequence_number, 0);
EXPECT_TRUE(history_.GetInfo(&info, true));
EXPECT_EQ(packets[0], info);
AddPacketWithSendTime(packets[2].sequence_number, packets[2].payload_size,
- packets[2].was_paced, packets[2].send_time_ms);
+ packets[2].send_time_ms);
- PacketInfo info2(0, 0, packets[1].sequence_number, 0, false);
+ PacketInfo info2(0, 0, packets[1].sequence_number, 0);
EXPECT_TRUE(history_.GetInfo(&info2, true));
EXPECT_EQ(packets[1], info2);
- PacketInfo info3(0, 0, packets[2].sequence_number, 0, false);
+ PacketInfo info3(0, 0, packets[2].sequence_number, 0);
EXPECT_TRUE(history_.GetInfo(&info3, true));
EXPECT_EQ(packets[2], info3);
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc
index 5e282c6f087..b21a269ec6b 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.cc
@@ -13,6 +13,7 @@
#include <limits>
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h"
#include "webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h"
#include "webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h
index bd016cf318b..fc3018f9967 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe.h
@@ -16,6 +16,7 @@
#include <sstream>
#include <string>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/gtest_prod_util.h"
#include "webrtc/modules/remote_bitrate_estimator/test/packet.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
index 41bf836c9e9..a9fd617118a 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.cc
@@ -14,6 +14,8 @@
#include <sstream>
+#include "webrtc/base/constructormagic.h"
+
namespace webrtc {
namespace testing {
namespace bwe {
@@ -97,18 +99,14 @@ Packet::Packet()
creation_time_us_(-1),
send_time_us_(-1),
sender_timestamp_us_(-1),
- payload_size_(0),
- paced_(false) {
-}
+ payload_size_(0) {}
Packet::Packet(int flow_id, int64_t send_time_us, size_t payload_size)
: flow_id_(flow_id),
creation_time_us_(send_time_us),
send_time_us_(send_time_us),
sender_timestamp_us_(send_time_us),
- payload_size_(payload_size),
- paced_(false) {
-}
+ payload_size_(payload_size) {}
Packet::~Packet() {
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
index 223b20f21c5..1fe3a228e4b 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/bwe_test_framework.h
@@ -25,6 +25,7 @@
#include <vector>
#include "webrtc/base/common.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/random.h"
#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
#include "webrtc/modules/include/module_common_types.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
index 37009c77018..2a33440bc3b 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/nada.h
@@ -21,6 +21,7 @@
#include <map>
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe.h"
#include "webrtc/voice_engine/channel.h"
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
index d469e675e41..e2d3da9632a 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.cc
@@ -69,7 +69,7 @@ RembReceiver::RembReceiver(int flow_id, bool plot)
recv_stats_(ReceiveStatistics::Create(&clock_)),
latest_estimate_bps_(-1),
last_feedback_ms_(-1),
- estimator_(new RemoteBitrateEstimatorAbsSendTime(this, &clock_)) {
+ estimator_(new RemoteBitrateEstimatorAbsSendTime(this)) {
std::stringstream ss;
ss << "Estimate_" << flow_id_ << "#1";
estimate_log_prefix_ = ss.str();
@@ -95,7 +95,7 @@ void RembReceiver::ReceivePacket(int64_t arrival_time_ms,
step_ms = std::max<int64_t>(estimator_->TimeUntilNextProcess(), 0);
}
estimator_->IncomingPacket(arrival_time_ms, media_packet.payload_size(),
- media_packet.header(), true);
+ media_packet.header());
clock_.AdvanceTimeMilliseconds(arrival_time_ms - clock_.TimeInMilliseconds());
ASSERT_TRUE(arrival_time_ms == clock_.TimeInMilliseconds());
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h
index 5e6b6edb256..3dc4f388c8f 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/remb.h
@@ -15,6 +15,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc
index 36dff1fb2ac..c54a7b05949 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/estimators/send_side.cc
@@ -23,7 +23,7 @@ const int kFeedbackIntervalMs = 50;
FullBweSender::FullBweSender(int kbps, BitrateObserver* observer, Clock* clock)
: bitrate_controller_(
BitrateController::CreateBitrateController(clock, observer)),
- rbe_(new RemoteBitrateEstimatorAbsSendTime(this, clock)),
+ rbe_(new RemoteBitrateEstimatorAbsSendTime(this)),
feedback_observer_(bitrate_controller_->CreateRtcpBandwidthObserver()),
clock_(clock),
send_time_history_(clock_, 10000),
@@ -93,8 +93,7 @@ void FullBweSender::OnPacketsSent(const Packets& packets) {
if (packet->GetPacketType() == Packet::kMedia) {
MediaPacket* media_packet = static_cast<MediaPacket*>(packet);
send_time_history_.AddAndRemoveOld(media_packet->header().sequenceNumber,
- media_packet->payload_size(),
- packet->paced());
+ media_packet->payload_size());
send_time_history_.OnSentPacket(media_packet->header().sequenceNumber,
media_packet->sender_timestamp_ms());
}
@@ -126,7 +125,7 @@ void SendSideBweReceiver::ReceivePacket(int64_t arrival_time_ms,
const MediaPacket& media_packet) {
packet_feedback_vector_.push_back(PacketInfo(
-1, arrival_time_ms, media_packet.sender_timestamp_ms(),
- media_packet.header().sequenceNumber, media_packet.payload_size(), true));
+ media_packet.header().sequenceNumber, media_packet.payload_size()));
// Log received packet information.
BweReceiver::ReceivePacket(arrival_time_ms, media_packet);
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h
index 4a361c4dc20..9aa596c1bd7 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet.h
@@ -42,8 +42,6 @@ class Packet {
virtual void set_sender_timestamp_us(int64_t sender_timestamp_us) {
sender_timestamp_us_ = sender_timestamp_us;
}
- virtual void set_paced(bool paced) { paced_ = paced; }
- virtual bool paced() const { return paced_; }
virtual int64_t creation_time_ms() const {
return (creation_time_us_ + 500) / 1000;
}
@@ -58,7 +56,6 @@ class Packet {
int64_t send_time_us_; // Time the packet left last processor touching it.
int64_t sender_timestamp_us_; // Time the packet left the Sender.
size_t payload_size_; // Size of the (non-existent, simulated) payload.
- bool paced_; // True if sent through paced sender.
};
class MediaPacket : public Packet {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
index 3bcbc0a071b..7ffeb5584b0 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.cc
@@ -157,13 +157,9 @@ PacedVideoSender::PacedVideoSender(PacketProcessorListener* listener,
VideoSource* source,
BandwidthEstimatorType estimator)
: VideoSender(listener, source, estimator),
- pacer_(&clock_,
- this,
- source->bits_per_second() / 1000,
- PacedSender::kDefaultPaceMultiplier * source->bits_per_second() /
- 1000,
- 0) {
+ pacer_(&clock_, this) {
modules_.push_back(&pacer_);
+ pacer_.SetEstimatedBitrate(source->bits_per_second());
}
PacedVideoSender::~PacedVideoSender() {
@@ -271,8 +267,6 @@ void PacedVideoSender::QueuePackets(Packets* batch,
}
Packets to_transfer;
to_transfer.splice(to_transfer.begin(), queue_, queue_.begin(), it);
- for (Packet* packet : to_transfer)
- packet->set_paced(true);
bwe_->OnPacketsSent(to_transfer);
batch->merge(to_transfer, DereferencingComparator<Packet>);
}
@@ -280,7 +274,8 @@ void PacedVideoSender::QueuePackets(Packets* batch,
bool PacedVideoSender::TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) {
+ bool retransmission,
+ int probe_cluster_id) {
for (Packets::iterator it = pacer_queue_.begin(); it != pacer_queue_.end();
++it) {
MediaPacket* media_packet = static_cast<MediaPacket*>(*it);
@@ -310,9 +305,7 @@ void PacedVideoSender::OnNetworkChanged(uint32_t target_bitrate_bps,
uint8_t fraction_lost,
int64_t rtt) {
VideoSender::OnNetworkChanged(target_bitrate_bps, fraction_lost, rtt);
- pacer_.UpdateBitrate(
- target_bitrate_bps / 1000,
- PacedSender::kDefaultPaceMultiplier * target_bitrate_bps / 1000, 0);
+ pacer_.SetEstimatedBitrate(target_bitrate_bps);
}
const int kNoLimit = std::numeric_limits<int>::max();
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
index 5ed4a3bc380..4990574bdec 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/test/packet_sender.h
@@ -100,7 +100,7 @@ class VideoSender : public PacketSender, public BitrateObserver {
RTC_DISALLOW_COPY_AND_ASSIGN(VideoSender);
};
-class PacedVideoSender : public VideoSender, public PacedSender::Callback {
+class PacedVideoSender : public VideoSender, public PacedSender::PacketSender {
public:
PacedVideoSender(PacketProcessorListener* listener,
VideoSource* source,
@@ -113,7 +113,8 @@ class PacedVideoSender : public VideoSender, public PacedSender::Callback {
bool TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
- bool retransmission) override;
+ bool retransmission,
+ int probe_cluster_id) override;
size_t TimeToSendPadding(size_t bytes) override;
// Implements BitrateObserver.
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
index f138035de53..7ae6ede363e 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
@@ -117,7 +117,7 @@ bool ParseArgsAndSetupEstimator(int argc,
switch (extension) {
case webrtc::kRtpExtensionAbsoluteSendTime: {
*estimator =
- new webrtc::RemoteBitrateEstimatorAbsSendTime(observer, clock);
+ new webrtc::RemoteBitrateEstimatorAbsSendTime(observer);
*estimator_used = "AbsoluteSendTimeRemoteBitrateEstimator";
break;
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
index 08dc0e63a02..f5dbaef3717 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
@@ -83,7 +83,7 @@ int main(int argc, char** argv) {
packet_length = packet.original_length;
}
rbe->IncomingPacket(clock.TimeInMilliseconds(),
- packet_length - header.headerLength, header, true);
+ packet_length - header.headerLength, header);
++packet_counter;
}
if (!rtp_reader->NextPacket(&packet)) {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
index f7e07a5dc51..a02f407d072 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h"
+#include <algorithm>
#include <limits>
#include "webrtc/base/checks.h"
@@ -26,6 +27,17 @@ const int64_t kBaseTimestampScaleFactor =
rtcp::TransportFeedback::kDeltaScaleFactor * (1 << 8);
const int64_t kBaseTimestampRangeSizeUs = kBaseTimestampScaleFactor * (1 << 24);
+class PacketInfoComparator {
+ public:
+ inline bool operator()(const PacketInfo& lhs, const PacketInfo& rhs) {
+ if (lhs.arrival_time_ms != rhs.arrival_time_ms)
+ return lhs.arrival_time_ms < rhs.arrival_time_ms;
+ if (lhs.send_time_ms != rhs.send_time_ms)
+ return lhs.send_time_ms < rhs.send_time_ms;
+ return lhs.sequence_number < rhs.sequence_number;
+ }
+};
+
TransportFeedbackAdapter::TransportFeedbackAdapter(
BitrateController* bitrate_controller,
Clock* clock)
@@ -46,10 +58,9 @@ void TransportFeedbackAdapter::SetBitrateEstimator(
}
void TransportFeedbackAdapter::AddPacket(uint16_t sequence_number,
- size_t length,
- bool was_paced) {
+ size_t length) {
rtc::CritScope cs(&lock_);
- send_time_history_.AddAndRemoveOld(sequence_number, length, was_paced);
+ send_time_history_.AddAndRemoveOld(sequence_number, length);
}
void TransportFeedbackAdapter::OnSentPacket(uint16_t sequence_number,
@@ -104,6 +115,8 @@ void TransportFeedbackAdapter::OnTransportFeedback(
}
++sequence_number;
}
+ std::sort(packet_feedback_vector.begin(), packet_feedback_vector.end(),
+ PacketInfoComparator());
RTC_DCHECK(delta_it == delta_vec.end());
if (failed_lookups > 0) {
LOG(LS_WARNING) << "Failed to lookup send time for " << failed_lookups
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
index c97ef57cf05..867570f26b9 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter.h
@@ -38,9 +38,7 @@ class TransportFeedbackAdapter : public TransportFeedbackObserver,
}
// Implements TransportFeedbackObserver.
- void AddPacket(uint16_t sequence_number,
- size_t length,
- bool was_paced) override;
+ void AddPacket(uint16_t sequence_number, size_t length) override;
void OnSentPacket(uint16_t sequence_number, int64_t send_time_ms);
void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
index f3be09206ec..239f2ec561d 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/transport_feedback_adapter_unittest.cc
@@ -92,15 +92,13 @@ class TransportFeedbackAdapterTest : public ::testing::Test {
EXPECT_EQ(truth[i].send_time_ms, input[i].send_time_ms);
EXPECT_EQ(truth[i].sequence_number, input[i].sequence_number);
EXPECT_EQ(truth[i].payload_size, input[i].payload_size);
- EXPECT_EQ(truth[i].was_paced, input[i].was_paced);
}
}
// Utility method, to reset arrival_time_ms before adding send time.
void OnSentPacket(PacketInfo info) {
info.arrival_time_ms = 0;
- adapter_->AddPacket(info.sequence_number, info.payload_size,
- info.was_paced);
+ adapter_->AddPacket(info.sequence_number, info.payload_size);
adapter_->OnSentPacket(info.sequence_number, info.send_time_ms);
}
@@ -114,11 +112,11 @@ class TransportFeedbackAdapterTest : public ::testing::Test {
TEST_F(TransportFeedbackAdapterTest, AdaptsFeedbackAndPopulatesSendTimes) {
std::vector<PacketInfo> packets;
- packets.push_back(PacketInfo(100, 200, 0, 1500, true));
- packets.push_back(PacketInfo(110, 210, 1, 1500, true));
- packets.push_back(PacketInfo(120, 220, 2, 1500, true));
- packets.push_back(PacketInfo(130, 230, 3, 1500, true));
- packets.push_back(PacketInfo(140, 240, 4, 1500, true));
+ packets.push_back(PacketInfo(100, 200, 0, 1500));
+ packets.push_back(PacketInfo(110, 210, 1, 1500));
+ packets.push_back(PacketInfo(120, 220, 2, 1500));
+ packets.push_back(PacketInfo(130, 230, 3, 1500));
+ packets.push_back(PacketInfo(140, 240, 4, 1500));
for (const PacketInfo& packet : packets)
OnSentPacket(packet);
@@ -145,11 +143,11 @@ TEST_F(TransportFeedbackAdapterTest, AdaptsFeedbackAndPopulatesSendTimes) {
TEST_F(TransportFeedbackAdapterTest, HandlesDroppedPackets) {
std::vector<PacketInfo> packets;
- packets.push_back(PacketInfo(100, 200, 0, 1500, true));
- packets.push_back(PacketInfo(110, 210, 1, 1500, true));
- packets.push_back(PacketInfo(120, 220, 2, 1500, true));
- packets.push_back(PacketInfo(130, 230, 3, 1500, true));
- packets.push_back(PacketInfo(140, 240, 4, 1500, true));
+ packets.push_back(PacketInfo(100, 200, 0, 1500));
+ packets.push_back(PacketInfo(110, 210, 1, 1500));
+ packets.push_back(PacketInfo(120, 220, 2, 1500));
+ packets.push_back(PacketInfo(130, 230, 3, 1500));
+ packets.push_back(PacketInfo(140, 240, 4, 1500));
const uint16_t kSendSideDropBefore = 1;
const uint16_t kReceiveSideDropAfter = 3;
@@ -190,9 +188,9 @@ TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) {
static_cast<int64_t>(1 << 8) *
static_cast<int64_t>((1 << 23) - 1) / 1000;
std::vector<PacketInfo> packets;
- packets.push_back(PacketInfo(kHighArrivalTimeMs - 64, 200, 0, 1500, true));
- packets.push_back(PacketInfo(kHighArrivalTimeMs + 64, 210, 1, 1500, true));
- packets.push_back(PacketInfo(kHighArrivalTimeMs, 220, 2, 1500, true));
+ packets.push_back(PacketInfo(kHighArrivalTimeMs - 64, 200, 0, 1500));
+ packets.push_back(PacketInfo(kHighArrivalTimeMs + 64, 210, 1, 1500));
+ packets.push_back(PacketInfo(kHighArrivalTimeMs, 220, 2, 1500));
for (const PacketInfo& packet : packets)
OnSentPacket(packet);
@@ -207,8 +205,8 @@ TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) {
packets[i].sequence_number, packets[i].arrival_time_ms * 1000));
rtc::Buffer raw_packet = feedback->Build();
- feedback = rtc::ScopedToUnique(rtcp::TransportFeedback::ParseFrom(
- raw_packet.data(), raw_packet.size()));
+ feedback = rtcp::TransportFeedback::ParseFrom(raw_packet.data(),
+ raw_packet.size());
std::vector<PacketInfo> expected_packets;
expected_packets.push_back(packets[i]);
@@ -223,6 +221,39 @@ TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) {
}
}
+TEST_F(TransportFeedbackAdapterTest, HandlesReordering) {
+ std::vector<PacketInfo> packets;
+ packets.push_back(PacketInfo(120, 200, 0, 1500));
+ packets.push_back(PacketInfo(110, 210, 1, 1500));
+ packets.push_back(PacketInfo(100, 220, 2, 1500));
+ std::vector<PacketInfo> expected_packets;
+ expected_packets.push_back(packets[2]);
+ expected_packets.push_back(packets[1]);
+ expected_packets.push_back(packets[0]);
+
+ for (const PacketInfo& packet : packets)
+ OnSentPacket(packet);
+
+ rtcp::TransportFeedback feedback;
+ feedback.WithBase(packets[0].sequence_number,
+ packets[0].arrival_time_ms * 1000);
+
+ for (const PacketInfo& packet : packets) {
+ EXPECT_TRUE(feedback.WithReceivedPacket(packet.sequence_number,
+ packet.arrival_time_ms * 1000));
+ }
+
+ feedback.Build();
+
+ EXPECT_CALL(*bitrate_estimator_, IncomingPacketFeedbackVector(_))
+ .Times(1)
+ .WillOnce(Invoke([expected_packets,
+ this](const std::vector<PacketInfo>& feedback_vector) {
+ ComparePacketVectors(expected_packets, feedback_vector);
+ }));
+ adapter_->OnTransportFeedback(feedback);
+}
+
TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
std::vector<PacketInfo> sent_packets;
const int64_t kSmallDeltaUs =
@@ -257,6 +288,14 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
info.arrival_time_ms += (kLargePositiveDeltaUs + 1000) / 1000;
++info.sequence_number;
+ // Expected to be ordered on arrival time when the feedback message has been
+ // parsed.
+ std::vector<PacketInfo> expected_packets;
+ expected_packets.push_back(sent_packets[0]);
+ expected_packets.push_back(sent_packets[3]);
+ expected_packets.push_back(sent_packets[1]);
+ expected_packets.push_back(sent_packets[2]);
+
// Packets will be added to send history.
for (const PacketInfo& packet : sent_packets)
OnSentPacket(packet);
@@ -276,17 +315,17 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
info.arrival_time_ms * 1000));
rtc::Buffer raw_packet = feedback->Build();
- feedback = rtc::ScopedToUnique(
- rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()));
+ feedback =
+ rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size());
std::vector<PacketInfo> received_feedback;
EXPECT_TRUE(feedback.get() != nullptr);
EXPECT_CALL(*bitrate_estimator_, IncomingPacketFeedbackVector(_))
.Times(1)
- .WillOnce(Invoke([sent_packets, &received_feedback](
+ .WillOnce(Invoke([expected_packets, &received_feedback](
const std::vector<PacketInfo>& feedback_vector) {
- EXPECT_EQ(sent_packets.size(), feedback_vector.size());
+ EXPECT_EQ(expected_packets.size(), feedback_vector.size());
received_feedback = feedback_vector;
}));
adapter_->OnTransportFeedback(*feedback.get());
@@ -297,8 +336,8 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
EXPECT_TRUE(feedback->WithReceivedPacket(info.sequence_number,
info.arrival_time_ms * 1000));
raw_packet = feedback->Build();
- feedback = rtc::ScopedToUnique(
- rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()));
+ feedback =
+ rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size());
EXPECT_TRUE(feedback.get() != nullptr);
EXPECT_CALL(*bitrate_estimator_, IncomingPacketFeedbackVector(_))
@@ -310,9 +349,9 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
}));
adapter_->OnTransportFeedback(*feedback.get());
- sent_packets.push_back(info);
+ expected_packets.push_back(info);
- ComparePacketVectors(sent_packets, received_feedback);
+ ComparePacketVectors(expected_packets, received_feedback);
}
} // namespace test
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn b/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
index d386951cb00..9d69811ef31 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/BUILD.gn
@@ -118,9 +118,15 @@ source_set("rtp_rtcp") {
"source/rtp_format_vp9.h",
"source/rtp_header_extension.cc",
"source/rtp_header_extension.h",
+ "source/rtp_header_extensions.cc",
+ "source/rtp_header_extensions.h",
"source/rtp_header_parser.cc",
+ "source/rtp_packet.cc",
+ "source/rtp_packet.h",
"source/rtp_packet_history.cc",
"source/rtp_packet_history.h",
+ "source/rtp_packet_received.h",
+ "source/rtp_packet_to_send.h",
"source/rtp_payload_registry.cc",
"source/rtp_receiver_audio.cc",
"source/rtp_receiver_audio.h",
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h
index 56c6e48691c..207e749a02a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h
@@ -11,7 +11,9 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_INCLUDE_REMOTE_NTP_TIME_ESTIMATOR_H_
#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_REMOTE_NTP_TIME_ESTIMATOR_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/include/rtp_to_ntp.h"
namespace webrtc {
@@ -40,7 +42,7 @@ class RemoteNtpTimeEstimator {
private:
Clock* clock_;
- rtc::scoped_ptr<TimestampExtrapolator> ts_extrapolator_;
+ std::unique_ptr<TimestampExtrapolator> ts_extrapolator_;
RtcpList rtcp_list_;
int64_t last_timing_log_ms_;
RTC_DISALLOW_COPY_AND_ASSIGN(RemoteNtpTimeEstimator);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h
index fae864107f0..a199755aafe 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h
@@ -12,8 +12,9 @@
#define WEBRTC_MODULES_RTP_RTCP_INCLUDE_RTP_PAYLOAD_REGISTRY_H_
#include <map>
+#include <memory>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
@@ -85,15 +86,6 @@ class RTPPayloadRegistry {
bool IsRtx(const RTPHeader& header) const;
- // DEPRECATED. Use RestoreOriginalPacket below that takes a uint8_t*
- // restored_packet, instead of a uint8_t**.
- // TODO(noahric): Remove this when all callers have been updated.
- bool RestoreOriginalPacket(uint8_t** restored_packet,
- const uint8_t* packet,
- size_t* packet_length,
- uint32_t original_ssrc,
- const RTPHeader& header) const;
-
bool RestoreOriginalPacket(uint8_t* restored_packet,
const uint8_t* packet,
size_t* packet_length,
@@ -110,19 +102,10 @@ class RTPPayloadRegistry {
int GetPayloadTypeFrequency(uint8_t payload_type) const;
- // DEPRECATED. Use PayloadTypeToPayload below that returns const Payload*
- // instead of taking output parameter.
- // TODO(danilchap): Remove this when all callers have been updated.
- bool PayloadTypeToPayload(const uint8_t payload_type,
- RtpUtility::Payload*& payload) const { // NOLINT
- payload =
- const_cast<RtpUtility::Payload*>(PayloadTypeToPayload(payload_type));
- return payload != nullptr;
- }
const RtpUtility::Payload* PayloadTypeToPayload(uint8_t payload_type) const;
void ResetLastReceivedPayloadTypes() {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
last_received_payload_type_ = -1;
last_received_media_payload_type_ = -1;
}
@@ -136,34 +119,34 @@ class RTPPayloadRegistry {
bool ReportMediaPayloadType(uint8_t media_payload_type);
int8_t red_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return red_payload_type_;
}
int8_t ulpfec_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return ulpfec_payload_type_;
}
int8_t last_received_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return last_received_payload_type_;
}
void set_last_received_payload_type(int8_t last_received_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
last_received_payload_type_ = last_received_payload_type;
}
int8_t last_received_media_payload_type() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return last_received_media_payload_type_;
}
bool use_rtx_payload_mapping_on_restore() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return use_rtx_payload_mapping_on_restore_;
}
void set_use_rtx_payload_mapping_on_restore(bool val) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
use_rtx_payload_mapping_on_restore_ = val;
}
@@ -178,9 +161,9 @@ class RTPPayloadRegistry {
bool IsRtxInternal(const RTPHeader& header) const;
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ rtc::CriticalSection crit_sect_;
RtpUtility::PayloadTypeMap payload_type_map_;
- rtc::scoped_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
+ std::unique_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
int8_t red_payload_type_;
int8_t ulpfec_payload_type_;
int8_t incoming_payload_type_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
index f393e41eabd..9db1c63da78 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_receiver.h
@@ -75,12 +75,6 @@ class RtpReceiver {
PayloadUnion payload_specific,
bool in_order) = 0;
- // Returns the currently configured NACK method.
- virtual NACKMethod NACK() const = 0;
-
- // Turn negative acknowledgement (NACK) requests on/off.
- virtual void SetNACKStatus(const NACKMethod method) = 0;
-
// Gets the last received timestamp. Returns true if a packet has been
// received, false otherwise.
virtual bool Timestamp(uint32_t* timestamp) const = 0;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
index d01465b9f8f..66589888bda 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp.h
@@ -16,6 +16,7 @@
#include <utility>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/include/module.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
@@ -77,7 +78,7 @@ class RtpRtcp : public Module {
FrameCountObserver* send_frame_count_observer;
SendSideDelayObserver* send_side_delay_observer;
RtcEventLog* event_log;
-
+ SendPacketObserver* send_packet_observer;
RTC_DISALLOW_COPY_AND_ASSIGN(Configuration);
};
@@ -210,10 +211,10 @@ class RtpRtcp : public Module {
*/
virtual void SetSequenceNumber(uint16_t seq) = 0;
- // Returns true if the ssrc matched this module, false otherwise.
- virtual bool SetRtpStateForSsrc(uint32_t ssrc,
- const RtpState& rtp_state) = 0;
- virtual bool GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) = 0;
+ virtual void SetRtpState(const RtpState& rtp_state) = 0;
+ virtual void SetRtxState(const RtpState& rtp_state) = 0;
+ virtual RtpState GetRtpState() const = 0;
+ virtual RtpState GetRtxState() const = 0;
/*
* Get SSRC
@@ -539,6 +540,9 @@ class RtpRtcp : public Module {
/*
* Send NACK for the packets specified.
+ *
+ * Note: This assumes the caller keeps track of timing and doesn't rely on
+ * the RTP module to do this.
*/
virtual void SendNack(const std::vector<uint16_t>& sequence_numbers) = 0;
@@ -594,12 +598,6 @@ class RtpRtcp : public Module {
*
* return -1 on failure else 0
*/
- // DEPRECATED. Use SendREDPayloadType below that takes output parameter
- // by pointer instead of by reference.
- // TODO(danilchap): Remove this when all callers have been updated.
- int32_t SendREDPayloadType(int8_t& payloadType) const { // NOLINT
- return SendREDPayloadType(&payloadType);
- }
virtual int32_t SendREDPayloadType(int8_t* payload_type) const = 0;
/*
* Store the audio level in dBov for header-extension-for-audio-level-
@@ -632,14 +630,6 @@ class RtpRtcp : public Module {
/*
* Get generic FEC setting
*/
- // DEPRECATED. Use GenericFECStatus below that takes output parameters
- // by pointers instead of by references.
- // TODO(danilchap): Remove this when all callers have been updated.
- void GenericFECStatus(bool& enable, // NOLINT
- uint8_t& payloadTypeRED, // NOLINT
- uint8_t& payloadTypeFEC) { // NOLINT
- GenericFECStatus(&enable, &payloadTypeRED, &payloadTypeFEC);
- }
virtual void GenericFECStatus(bool* enable,
uint8_t* payload_type_red,
uint8_t* payload_type_fec) = 0;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
index 9acef79f383..8dc8b0651d2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h
@@ -99,8 +99,6 @@ enum KeyFrameRequestMethod { kKeyFrameReqPliRtcp, kKeyFrameReqFirRtcp };
enum RtpRtcpPacketType { kPacketRtp = 0, kPacketKeepAlive = 1 };
-enum NACKMethod { kNackOff = 0, kNackRtcp = 2 };
-
enum RetransmissionMode : uint8_t {
kRetransmitOff = 0x0,
kRetransmitFECPackets = 0x1,
@@ -247,32 +245,28 @@ class RtcpBandwidthObserver {
struct PacketInfo {
PacketInfo(int64_t arrival_time_ms, uint16_t sequence_number)
- : PacketInfo(-1, arrival_time_ms, -1, sequence_number, 0, false) {}
+ : PacketInfo(-1, arrival_time_ms, -1, sequence_number, 0) {}
PacketInfo(int64_t arrival_time_ms,
int64_t send_time_ms,
uint16_t sequence_number,
- size_t payload_size,
- bool was_paced)
+ size_t payload_size)
: PacketInfo(-1,
arrival_time_ms,
send_time_ms,
sequence_number,
- payload_size,
- was_paced) {}
+ payload_size) {}
PacketInfo(int64_t creation_time_ms,
int64_t arrival_time_ms,
int64_t send_time_ms,
uint16_t sequence_number,
- size_t payload_size,
- bool was_paced)
+ size_t payload_size)
: creation_time_ms(creation_time_ms),
arrival_time_ms(arrival_time_ms),
send_time_ms(send_time_ms),
sequence_number(sequence_number),
- payload_size(payload_size),
- was_paced(was_paced) {}
+ payload_size(payload_size) {}
// Time corresponding to when this object was created.
int64_t creation_time_ms;
@@ -287,8 +281,6 @@ struct PacketInfo {
uint16_t sequence_number;
// Size of the packet excluding RTP headers.
size_t payload_size;
- // True if the packet was paced out by the pacer.
- bool was_paced;
};
class TransportFeedbackObserver {
@@ -298,9 +290,7 @@ class TransportFeedbackObserver {
// Note: Transport-wide sequence number as sequence number. Arrival time
// must be set to 0.
- virtual void AddPacket(uint16_t sequence_number,
- size_t length,
- bool was_paced) = 0;
+ virtual void AddPacket(uint16_t sequence_number, size_t length) = 0;
virtual void OnTransportFeedback(const rtcp::TransportFeedback& feedback) = 0;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index 9991aa21108..bf5e9369215 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -85,12 +85,12 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_CONST_METHOD0(StartTimestamp,
uint32_t());
MOCK_METHOD1(SetStartTimestamp, void(const uint32_t timestamp));
- MOCK_CONST_METHOD0(SequenceNumber,
- uint16_t());
+ MOCK_CONST_METHOD0(SequenceNumber, uint16_t());
MOCK_METHOD1(SetSequenceNumber, void(const uint16_t seq));
- MOCK_METHOD2(SetRtpStateForSsrc,
- bool(uint32_t ssrc, const RtpState& rtp_state));
- MOCK_METHOD2(GetRtpStateForSsrc, bool(uint32_t ssrc, RtpState* rtp_state));
+ MOCK_METHOD1(SetRtpState, void(const RtpState& rtp_state));
+ MOCK_METHOD1(SetRtxState, void(const RtpState& rtp_state));
+ MOCK_CONST_METHOD0(GetRtpState, RtpState());
+ MOCK_CONST_METHOD0(GetRtxState, RtpState());
MOCK_CONST_METHOD0(SSRC,
uint32_t());
MOCK_METHOD1(SetSSRC,
@@ -202,10 +202,6 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_METHOD1(SetTMMBRStatus, void(const bool enable));
MOCK_METHOD1(OnBandwidthEstimateUpdate,
void(uint16_t bandWidthKbit));
- MOCK_CONST_METHOD0(NACK,
- NACKMethod());
- MOCK_METHOD2(SetNACKStatus,
- int32_t(const NACKMethod method, int oldestSequenceNumberToNack));
MOCK_CONST_METHOD0(SelectiveRetransmissions,
int());
MOCK_METHOD1(SetSelectiveRetransmissions,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi b/chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
index 23a64b752f7..3f1e935b2a6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/rtp_rtcp.gypi
@@ -35,10 +35,6 @@
'source/receive_statistics_impl.cc',
'source/receive_statistics_impl.h',
'source/remote_ntp_time_estimator.cc',
- 'source/rtp_header_parser.cc',
- 'source/rtp_rtcp_config.h',
- 'source/rtp_rtcp_impl.cc',
- 'source/rtp_rtcp_impl.h',
'source/rtcp_packet.cc',
'source/rtcp_packet.h',
'source/rtcp_packet/app.cc',
@@ -103,8 +99,18 @@
'source/rtcp_utility.h',
'source/rtp_header_extension.cc',
'source/rtp_header_extension.h',
+ 'source/rtp_header_extensions.cc',
+ 'source/rtp_header_extensions.h',
+ 'source/rtp_header_parser.cc',
+ 'source/rtp_packet.cc',
+ 'source/rtp_packet.h',
+ 'source/rtp_packet_received.h',
+ 'source/rtp_packet_to_send.h',
'source/rtp_receiver_impl.cc',
'source/rtp_receiver_impl.h',
+ 'source/rtp_rtcp_config.h',
+ 'source/rtp_rtcp_impl.cc',
+ 'source/rtp_rtcp_impl.h',
'source/rtp_sender.cc',
'source/rtp_sender.h',
'source/rtp_utility.cc',
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc
index 4e9fc72c1f8..49a23592bfe 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.cc
@@ -11,13 +11,11 @@
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
Bitrate::Bitrate(Clock* clock, Observer* observer)
: clock_(clock),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
packet_rate_(0),
bitrate_(0),
bitrate_next_idx_(0),
@@ -33,23 +31,23 @@ Bitrate::Bitrate(Clock* clock, Observer* observer)
Bitrate::~Bitrate() {}
void Bitrate::Update(const size_t bytes) {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
bytes_count_ += bytes;
packet_count_++;
}
uint32_t Bitrate::PacketRate() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
return packet_rate_;
}
uint32_t Bitrate::BitrateLast() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
return bitrate_;
}
uint32_t Bitrate::BitrateNow() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
int64_t now = clock_->TimeInMilliseconds();
int64_t diff_ms = now - time_last_rate_update_;
@@ -67,7 +65,7 @@ uint32_t Bitrate::BitrateNow() const {
}
int64_t Bitrate::time_last_rate_update() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
return time_last_rate_update_;
}
@@ -75,7 +73,7 @@ int64_t Bitrate::time_last_rate_update() const {
void Bitrate::Process() {
BitrateStatistics stats;
{
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
int64_t now = clock_->CurrentNtpInMilliseconds();
int64_t diff_ms = now - time_last_rate_update_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h
index 393d05d3e3e..7aaaead42d2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/bitrate.h
@@ -15,7 +15,7 @@
#include <list>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/typedefs.h"
@@ -23,7 +23,6 @@
namespace webrtc {
class Clock;
-class CriticalSectionWrapper;
class Bitrate {
public:
@@ -60,7 +59,7 @@ class Bitrate {
Clock* clock_;
private:
- rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+ rtc::CriticalSection crit_;
uint32_t packet_rate_;
uint32_t bitrate_;
uint8_t bitrate_next_idx_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
index ab21b8704a5..81e8b5926e2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.cc
@@ -13,20 +13,16 @@
#include <string.h>
namespace webrtc {
-DTMFqueue::DTMFqueue()
- : dtmf_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
- next_empty_index_(0) {
+DTMFqueue::DTMFqueue() : next_empty_index_(0) {
memset(dtmf_key_, 0, sizeof(dtmf_key_));
memset(dtmf_length, 0, sizeof(dtmf_length));
memset(dtmf_level_, 0, sizeof(dtmf_level_));
}
-DTMFqueue::~DTMFqueue() {
- delete dtmf_critsect_;
-}
+DTMFqueue::~DTMFqueue() {}
int32_t DTMFqueue::AddDTMF(uint8_t key, uint16_t len, uint8_t level) {
- CriticalSectionScoped lock(dtmf_critsect_);
+ rtc::CritScope lock(&dtmf_critsect_);
if (next_empty_index_ >= DTMF_OUTBAND_MAX) {
return -1;
@@ -40,7 +36,7 @@ int32_t DTMFqueue::AddDTMF(uint8_t key, uint16_t len, uint8_t level) {
}
int8_t DTMFqueue::NextDTMF(uint8_t* dtmf_key, uint16_t* len, uint8_t* level) {
- CriticalSectionScoped lock(dtmf_critsect_);
+ rtc::CritScope lock(&dtmf_critsect_);
if (next_empty_index_ == 0)
return -1;
@@ -60,12 +56,12 @@ int8_t DTMFqueue::NextDTMF(uint8_t* dtmf_key, uint16_t* len, uint8_t* level) {
}
bool DTMFqueue::PendingDTMF() {
- CriticalSectionScoped lock(dtmf_critsect_);
+ rtc::CritScope lock(&dtmf_critsect_);
return next_empty_index_ > 0;
}
void DTMFqueue::ResetDTMF() {
- CriticalSectionScoped lock(dtmf_critsect_);
+ rtc::CritScope lock(&dtmf_critsect_);
next_empty_index_ = 0;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h
index d1b3f5667cf..c0e616f9827 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/dtmf_queue.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -27,7 +27,7 @@ class DTMFqueue {
void ResetDTMF();
private:
- CriticalSectionWrapper* dtmf_critsect_;
+ rtc::CriticalSection dtmf_critsect_;
uint8_t next_empty_index_;
uint8_t dtmf_key_[DTMF_OUTBAND_MAX];
uint16_t dtmf_length[DTMF_OUTBAND_MAX];
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
index 2109574e396..83bd2849df4 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
@@ -12,11 +12,11 @@
#include <assert.h>
+#include <memory>
+
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
// RFC 5109
namespace webrtc {
@@ -26,8 +26,7 @@ FecReceiver* FecReceiver::Create(RtpData* callback) {
}
FecReceiverImpl::FecReceiverImpl(RtpData* callback)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- recovered_packet_callback_(callback),
+ : recovered_packet_callback_(callback),
fec_(new ForwardErrorCorrection()) {}
FecReceiverImpl::~FecReceiverImpl() {
@@ -42,7 +41,7 @@ FecReceiverImpl::~FecReceiverImpl() {
}
FecPacketCounter FecReceiverImpl::GetPacketCounter() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return packet_counter_;
}
@@ -77,7 +76,7 @@ FecPacketCounter FecReceiverImpl::GetPacketCounter() const {
int32_t FecReceiverImpl::AddReceivedRedPacket(
const RTPHeader& header, const uint8_t* incoming_rtp_packet,
size_t packet_length, uint8_t ulpfec_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
uint8_t REDHeaderLength = 1;
size_t payload_data_length = packet_length - header.headerLength;
@@ -89,7 +88,7 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
// Add to list without RED header, aka a virtual RTP packet
// we remove the RED header
- rtc::scoped_ptr<ForwardErrorCorrection::ReceivedPacket> received_packet(
+ std::unique_ptr<ForwardErrorCorrection::ReceivedPacket> received_packet(
new ForwardErrorCorrection::ReceivedPacket);
received_packet->pkt = new ForwardErrorCorrection::Packet;
@@ -137,7 +136,7 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
}
++packet_counter_.num_packets;
- rtc::scoped_ptr<ForwardErrorCorrection::ReceivedPacket>
+ std::unique_ptr<ForwardErrorCorrection::ReceivedPacket>
second_received_packet;
if (blockLength > 0) {
// handle block length, split into 2 packets
@@ -219,21 +218,21 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
}
int32_t FecReceiverImpl::ProcessReceivedFec() {
- crit_sect_->Enter();
+ crit_sect_.Enter();
if (!received_packet_list_.empty()) {
// Send received media packet to VCM.
if (!received_packet_list_.front()->is_fec) {
ForwardErrorCorrection::Packet* packet =
received_packet_list_.front()->pkt;
- crit_sect_->Leave();
+ crit_sect_.Leave();
if (!recovered_packet_callback_->OnRecoveredPacket(packet->data,
packet->length)) {
return -1;
}
- crit_sect_->Enter();
+ crit_sect_.Enter();
}
if (fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_) != 0) {
- crit_sect_->Leave();
+ crit_sect_.Leave();
return -1;
}
assert(received_packet_list_.empty());
@@ -246,15 +245,15 @@ int32_t FecReceiverImpl::ProcessReceivedFec() {
continue;
ForwardErrorCorrection::Packet* packet = (*it)->pkt;
++packet_counter_.num_recovered_packets;
- crit_sect_->Leave();
+ crit_sect_.Leave();
if (!recovered_packet_callback_->OnRecoveredPacket(packet->data,
packet->length)) {
return -1;
}
- crit_sect_->Enter();
+ crit_sect_.Enter();
(*it)->returned = true;
}
- crit_sect_->Leave();
+ crit_sect_.Leave();
return 0;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
index 6a63813f408..0ebca9bce2d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
@@ -13,7 +13,7 @@
// This header is included to get the nested declaration of Packet structure.
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
@@ -21,8 +21,6 @@
namespace webrtc {
-class CriticalSectionWrapper;
-
class FecReceiverImpl : public FecReceiver {
public:
explicit FecReceiverImpl(RtpData* callback);
@@ -38,7 +36,7 @@ class FecReceiverImpl : public FecReceiver {
FecPacketCounter GetPacketCounter() const override;
private:
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ rtc::CriticalSection crit_sect_;
RtpData* recovered_packet_callback_;
ForwardErrorCorrection* fec_;
// TODO(holmer): In the current version received_packet_list_ is never more
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
index ee8f408720e..cd60d9b094c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
@@ -11,10 +11,10 @@
#include <string.h>
#include <list>
+#include <memory>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
@@ -92,9 +92,9 @@ class ReceiverFecTest : public ::testing::Test {
uint8_t ulpfec_payload_type);
MockRtpData rtp_data_callback_;
- rtc::scoped_ptr<ForwardErrorCorrection> fec_;
- rtc::scoped_ptr<FecReceiver> receiver_fec_;
- rtc::scoped_ptr<FrameGenerator> generator_;
+ std::unique_ptr<ForwardErrorCorrection> fec_;
+ std::unique_ptr<FecReceiver> receiver_fec_;
+ std::unique_ptr<FrameGenerator> generator_;
};
void DeletePackets(std::list<Packet*>* packets) {
@@ -415,12 +415,12 @@ void ReceiverFecTest::SurvivesMaliciousPacket(const uint8_t* data,
size_t length,
uint8_t ulpfec_payload_type) {
webrtc::RTPHeader header;
- rtc::scoped_ptr<webrtc::RtpHeaderParser> parser(
+ std::unique_ptr<webrtc::RtpHeaderParser> parser(
webrtc::RtpHeaderParser::Create());
ASSERT_TRUE(parser->Parse(data, length, &header));
webrtc::NullRtpData null_callback;
- rtc::scoped_ptr<webrtc::FecReceiver> receiver_fec(
+ std::unique_ptr<webrtc::FecReceiver> receiver_fec(
webrtc::FecReceiver::Create(&null_callback));
receiver_fec->AddReceivedRedPacket(header, data, length, ulpfec_payload_type);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
index b85d813790d..623c658a174 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -15,6 +15,7 @@
#include <algorithm>
#include <iterator>
+#include <memory>
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
@@ -163,7 +164,7 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
// -- Generate packet masks --
// Always allocate space for a large mask.
- rtc::scoped_ptr<uint8_t[]> packet_mask(
+ std::unique_ptr<uint8_t[]> packet_mask(
new uint8_t[num_fec_packets * kMaskSizeLBitSet]);
memset(packet_mask.get(), 0, num_fec_packets * num_mask_bytes);
internal::GeneratePacketMasks(num_media_packets, num_fec_packets,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc
index 6d8b4074598..e23a3fa629b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.cc
@@ -9,13 +9,13 @@
*/
#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
+#include <memory>
#include <vector>
#include "webrtc/base/bitbuffer.h"
#include "webrtc/base/bytebuffer.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
namespace {
@@ -103,7 +103,7 @@ bool H264BitstreamParser::ParseSpsNalu(const uint8_t* sps, size_t length) {
sps_parsed_ = false;
// Parse out the SPS RBSP. It should be small, so it's ok that we create a
// copy. We'll eventually write this back.
- rtc::scoped_ptr<rtc::ByteBufferWriter> sps_rbsp(
+ std::unique_ptr<rtc::ByteBufferWriter> sps_rbsp(
ParseRbsp(sps + kNaluHeaderAndTypeSize, length - kNaluHeaderAndTypeSize));
rtc::BitBuffer sps_parser(reinterpret_cast<const uint8_t*>(sps_rbsp->Data()),
sps_rbsp->Length());
@@ -209,7 +209,7 @@ bool H264BitstreamParser::ParsePpsNalu(const uint8_t* pps, size_t length) {
// We're starting a new stream, so reset picture type rewriting values.
pps_ = PpsState();
pps_parsed_ = false;
- rtc::scoped_ptr<rtc::ByteBufferWriter> buffer(
+ std::unique_ptr<rtc::ByteBufferWriter> buffer(
ParseRbsp(pps + kNaluHeaderAndTypeSize, length - kNaluHeaderAndTypeSize));
rtc::BitBuffer parser(reinterpret_cast<const uint8_t*>(buffer->Data()),
buffer->Length());
@@ -317,7 +317,7 @@ bool H264BitstreamParser::ParseNonParameterSetNalu(const uint8_t* source,
RTC_CHECK(sps_parsed_);
RTC_CHECK(pps_parsed_);
last_slice_qp_delta_parsed_ = false;
- rtc::scoped_ptr<rtc::ByteBufferWriter> slice_rbsp(ParseRbsp(
+ std::unique_ptr<rtc::ByteBufferWriter> slice_rbsp(ParseRbsp(
source + kNaluHeaderAndTypeSize, source_length - kNaluHeaderAndTypeSize));
rtc::BitBuffer slice_reader(
reinterpret_cast<const uint8_t*>(slice_rbsp->Data()),
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
index e19c31bfece..9a7b9d3f7ec 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
@@ -11,10 +11,10 @@
#include <algorithm>
#include <iterator>
#include <list>
+#include <memory>
#include <set>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
@@ -105,7 +105,7 @@ class RtxLoopBackTransport : public webrtc::Transport {
size_t packet_length = len;
uint8_t restored_packet[1500];
RTPHeader header;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
if (!parser->Parse(ptr, len, &header)) {
return false;
}
@@ -191,7 +191,6 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
rtp_rtcp_module_->SetSSRC(kTestSsrc);
rtp_rtcp_module_->SetRTCPStatus(RtcpMode::kCompound);
- rtp_receiver_->SetNACKStatus(kNackRtcp);
rtp_rtcp_module_->SetStorePacketsStatus(true, 600);
EXPECT_EQ(0, rtp_rtcp_module_->SetSendingStatus(true));
rtp_rtcp_module_->SetSequenceNumber(kTestSequenceNumber);
@@ -279,11 +278,11 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
void TearDown() override { delete rtp_rtcp_module_; }
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
RTPPayloadRegistry rtp_payload_registry_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
+ std::unique_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* rtp_rtcp_module_;
- rtc::scoped_ptr<TestRtpFeedback> rtp_feedback_;
+ std::unique_ptr<TestRtpFeedback> rtp_feedback_;
RtxLoopBackTransport transport_;
VerifyingRtxReceiver receiver_;
uint8_t payload_data[65000];
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc
index 69a28ed4dbc..c7ea19db586 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec.cc
@@ -157,12 +157,11 @@ int ProducerFec::AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
(ExcessOverheadBelowMax() && MinimumMediaPacketsReached()))) {
assert(num_first_partition_ <=
static_cast<int>(ForwardErrorCorrection::kMaxMediaPackets));
- int ret = fec_->GenerateFEC(media_packets_fec_,
- params_.fec_rate,
- num_first_partition_,
- params_.use_uep_protection,
- params_.fec_mask_type,
- &fec_packets_);
+ // TODO(pbos): Consider whether unequal protection should be enabled or not,
+ // it is currently always disabled.
+ int ret = fec_->GenerateFEC(media_packets_fec_, params_.fec_rate,
+ num_first_partition_, false,
+ params_.fec_mask_type, &fec_packets_);
if (fec_packets_.empty()) {
num_frames_ = 0;
DeletePackets();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
index fad0f502f59..ec5228afd5b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
@@ -9,6 +9,7 @@
*/
#include <list>
+#include <memory>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
@@ -78,7 +79,7 @@ TEST_F(ProducerFecTest, NoEmptyFecWithSeqNumGaps) {
protected_packets.push_back({12, 3, 54, 0});
protected_packets.push_back({21, 0, 55, 0});
protected_packets.push_back({13, 3, 57, 1});
- FecProtectionParams params = {117, 0, 3, kFecMaskBursty};
+ FecProtectionParams params = {117, 3, kFecMaskBursty};
producer_->SetFecParameters(&params, 0);
uint8_t packet[28] = {0};
for (Packet p : protected_packets) {
@@ -111,7 +112,7 @@ TEST_F(ProducerFecTest, OneFrameFec) {
// of packets is within |kMaxExcessOverhead|, and (2) the total number of
// media packets for 1 frame is at least |minimum_media_packets_fec_|.
const int kNumPackets = 4;
- FecProtectionParams params = {15, false, 3};
+ FecProtectionParams params = {15, 3, kFecMaskRandom};
std::list<test::RawRtpPacket*> rtp_packets;
generator_->NewFrame(kNumPackets);
producer_->SetFecParameters(&params, 0); // Expecting one FEC packet.
@@ -152,7 +153,7 @@ TEST_F(ProducerFecTest, TwoFrameFec) {
const int kNumPackets = 2;
const int kNumFrames = 2;
- FecProtectionParams params = {15, 0, 3};
+ FecProtectionParams params = {15, 3, kFecMaskRandom};
std::list<test::RawRtpPacket*> rtp_packets;
producer_->SetFecParameters(&params, 0); // Expecting one FEC packet.
uint32_t last_timestamp = 0;
@@ -188,7 +189,7 @@ TEST_F(ProducerFecTest, TwoFrameFec) {
TEST_F(ProducerFecTest, BuildRedPacket) {
generator_->NewFrame(1);
test::RawRtpPacket* packet = generator_->NextPacket(0, 10);
- rtc::scoped_ptr<RedPacket> red_packet(producer_->BuildRedPacket(
+ std::unique_ptr<RedPacket> red_packet(producer_->BuildRedPacket(
packet->data, packet->length - kRtpHeaderSize, kRtpHeaderSize,
kRedPayloadType));
EXPECT_EQ(packet->length + 1, red_packet->length());
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
index 022fc9610f4..932be1bb9e1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
@@ -12,10 +12,10 @@
#include <math.h>
-#include "webrtc/base/scoped_ptr.h"
+#include <cstdlib>
+
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/time_util.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -29,7 +29,6 @@ StreamStatisticianImpl::StreamStatisticianImpl(
RtcpStatisticsCallback* rtcp_callback,
StreamDataCountersCallback* rtp_callback)
: clock_(clock),
- stream_lock_(CriticalSectionWrapper::CreateCriticalSection()),
incoming_bitrate_(clock, NULL),
ssrc_(0),
max_reordering_threshold_(kDefaultMaxReorderingThreshold),
@@ -59,7 +58,7 @@ void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header,
void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header,
size_t packet_length,
bool retransmitted) {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
bool in_order = InOrderPacketInternal(header.sequenceNumber);
ssrc_ = header.ssrc;
incoming_bitrate_.Update(packet_length);
@@ -116,7 +115,7 @@ void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header,
int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) -
(header.timestamp - last_received_timestamp_);
- time_diff_samples = abs(time_diff_samples);
+ time_diff_samples = std::abs(time_diff_samples);
// lib_jingle sometimes deliver crazy jumps in TS for the same stream.
// If this happens, don't update jitter value. Use 5 secs video frequency
@@ -136,7 +135,7 @@ void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header,
(last_received_timestamp_ +
last_received_transmission_time_offset_));
- time_diff_samples_ext = abs(time_diff_samples_ext);
+ time_diff_samples_ext = std::abs(time_diff_samples_ext);
if (time_diff_samples_ext < 450000) {
int32_t jitter_diffQ4TransmissionTimeOffset =
@@ -150,7 +149,7 @@ void StreamStatisticianImpl::NotifyRtpCallback() {
StreamDataCounters data;
uint32_t ssrc;
{
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
data = receive_counters_;
ssrc = ssrc_;
}
@@ -161,7 +160,7 @@ void StreamStatisticianImpl::NotifyRtcpCallback() {
RtcpStatistics data;
uint32_t ssrc;
{
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
data = last_reported_statistics_;
ssrc = ssrc_;
}
@@ -171,7 +170,7 @@ void StreamStatisticianImpl::NotifyRtcpCallback() {
void StreamStatisticianImpl::FecPacketReceived(const RTPHeader& header,
size_t packet_length) {
{
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
receive_counters_.fec.AddPacket(packet_length, header);
}
NotifyRtpCallback();
@@ -179,14 +178,14 @@ void StreamStatisticianImpl::FecPacketReceived(const RTPHeader& header,
void StreamStatisticianImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
max_reordering_threshold_ = max_reordering_threshold;
}
bool StreamStatisticianImpl::GetStatistics(RtcpStatistics* statistics,
bool reset) {
{
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
if (received_seq_first_ == 0 &&
receive_counters_.transmitted.payload_bytes == 0) {
// We have not received anything.
@@ -282,7 +281,7 @@ RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() {
void StreamStatisticianImpl::GetDataCounters(
size_t* bytes_received, uint32_t* packets_received) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
if (bytes_received) {
*bytes_received = receive_counters_.transmitted.payload_bytes +
receive_counters_.transmitted.header_bytes +
@@ -295,30 +294,30 @@ void StreamStatisticianImpl::GetDataCounters(
void StreamStatisticianImpl::GetReceiveStreamDataCounters(
StreamDataCounters* data_counters) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
*data_counters = receive_counters_;
}
uint32_t StreamStatisticianImpl::BitrateReceived() const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
return incoming_bitrate_.BitrateNow();
}
void StreamStatisticianImpl::ProcessBitrate() {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
incoming_bitrate_.Process();
}
void StreamStatisticianImpl::LastReceiveTimeNtp(uint32_t* secs,
uint32_t* frac) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
*secs = last_receive_time_ntp_.seconds();
*frac = last_receive_time_ntp_.fractions();
}
bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
const RTPHeader& header, int64_t min_rtt) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
if (InOrderPacketInternal(header.sequenceNumber)) {
return false;
}
@@ -352,7 +351,7 @@ bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
}
bool StreamStatisticianImpl::IsPacketInOrder(uint16_t sequence_number) const {
- CriticalSectionScoped cs(stream_lock_.get());
+ rtc::CritScope cs(&stream_lock_);
return InOrderPacketInternal(sequence_number);
}
@@ -377,7 +376,6 @@ ReceiveStatistics* ReceiveStatistics::Create(Clock* clock) {
ReceiveStatisticsImpl::ReceiveStatisticsImpl(Clock* clock)
: clock_(clock),
- receive_statistics_lock_(CriticalSectionWrapper::CreateCriticalSection()),
last_rate_update_ms_(0),
rtcp_stats_callback_(NULL),
rtp_stats_callback_(NULL) {}
@@ -394,7 +392,7 @@ void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header,
bool retransmitted) {
StreamStatisticianImpl* impl;
{
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
StatisticianImplMap::iterator it = statisticians_.find(header.ssrc);
if (it != statisticians_.end()) {
impl = it->second;
@@ -412,7 +410,7 @@ void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header,
void ReceiveStatisticsImpl::FecPacketReceived(const RTPHeader& header,
size_t packet_length) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
StatisticianImplMap::iterator it = statisticians_.find(header.ssrc);
// Ignore FEC if it is the first packet.
if (it != statisticians_.end()) {
@@ -421,7 +419,7 @@ void ReceiveStatisticsImpl::FecPacketReceived(const RTPHeader& header,
}
StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
StatisticianMap active_statisticians;
for (StatisticianImplMap::const_iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
@@ -438,7 +436,7 @@ StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const {
StreamStatistician* ReceiveStatisticsImpl::GetStatistician(
uint32_t ssrc) const {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
StatisticianImplMap::const_iterator it = statisticians_.find(ssrc);
if (it == statisticians_.end())
return NULL;
@@ -447,7 +445,7 @@ StreamStatistician* ReceiveStatisticsImpl::GetStatistician(
void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
for (StatisticianImplMap::iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
it->second->SetMaxReorderingThreshold(max_reordering_threshold);
@@ -455,7 +453,7 @@ void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
}
void ReceiveStatisticsImpl::Process() {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
for (StatisticianImplMap::iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
it->second->ProcessBitrate();
@@ -464,7 +462,7 @@ void ReceiveStatisticsImpl::Process() {
}
int64_t ReceiveStatisticsImpl::TimeUntilNextProcess() {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
int64_t time_since_last_update = clock_->TimeInMilliseconds() -
last_rate_update_ms_;
return std::max<int64_t>(
@@ -473,7 +471,7 @@ int64_t ReceiveStatisticsImpl::TimeUntilNextProcess() {
void ReceiveStatisticsImpl::RegisterRtcpStatisticsCallback(
RtcpStatisticsCallback* callback) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (callback != NULL)
assert(rtcp_stats_callback_ == NULL);
rtcp_stats_callback_ = callback;
@@ -481,20 +479,20 @@ void ReceiveStatisticsImpl::RegisterRtcpStatisticsCallback(
void ReceiveStatisticsImpl::StatisticsUpdated(const RtcpStatistics& statistics,
uint32_t ssrc) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (rtcp_stats_callback_)
rtcp_stats_callback_->StatisticsUpdated(statistics, ssrc);
}
void ReceiveStatisticsImpl::CNameChanged(const char* cname, uint32_t ssrc) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (rtcp_stats_callback_)
rtcp_stats_callback_->CNameChanged(cname, ssrc);
}
void ReceiveStatisticsImpl::RegisterRtpStatisticsCallback(
StreamDataCountersCallback* callback) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (callback != NULL)
assert(rtp_stats_callback_ == NULL);
rtp_stats_callback_ = callback;
@@ -502,7 +500,7 @@ void ReceiveStatisticsImpl::RegisterRtpStatisticsCallback(
void ReceiveStatisticsImpl::DataCountersUpdated(const StreamDataCounters& stats,
uint32_t ssrc) {
- CriticalSectionScoped cs(receive_statistics_lock_.get());
+ rtc::CritScope cs(&receive_statistics_lock_);
if (rtp_stats_callback_) {
rtp_stats_callback_->DataCountersUpdated(stats, ssrc);
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
index 6da8334da6e..39679673d08 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
@@ -16,15 +16,12 @@
#include <algorithm>
#include <map>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/ntp_time.h"
namespace webrtc {
-class CriticalSectionWrapper;
-
class StreamStatisticianImpl : public StreamStatistician {
public:
StreamStatisticianImpl(Clock* clock,
@@ -57,11 +54,11 @@ class StreamStatisticianImpl : public StreamStatistician {
void UpdateCounters(const RTPHeader& rtp_header,
size_t packet_length,
bool retransmitted);
- void NotifyRtpCallback() LOCKS_EXCLUDED(stream_lock_.get());
- void NotifyRtcpCallback() LOCKS_EXCLUDED(stream_lock_.get());
+ void NotifyRtpCallback() LOCKS_EXCLUDED(stream_lock_);
+ void NotifyRtcpCallback() LOCKS_EXCLUDED(stream_lock_);
Clock* clock_;
- rtc::scoped_ptr<CriticalSectionWrapper> stream_lock_;
+ rtc::CriticalSection stream_lock_;
Bitrate incoming_bitrate_;
uint32_t ssrc_;
int max_reordering_threshold_; // In number of packets or sequence numbers.
@@ -131,7 +128,7 @@ class ReceiveStatisticsImpl : public ReceiveStatistics,
typedef std::map<uint32_t, StreamStatisticianImpl*> StatisticianImplMap;
Clock* clock_;
- rtc::scoped_ptr<CriticalSectionWrapper> receive_statistics_lock_;
+ rtc::CriticalSection receive_statistics_lock_;
int64_t last_rate_update_ms_;
StatisticianImplMap statisticians_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
index c265c17c04b..f6cbe74e478 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
@@ -8,9 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/system_wrappers/include/clock.h"
@@ -36,7 +37,7 @@ class ReceiveStatisticsTest : public ::testing::Test {
protected:
SimulatedClock clock_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
RTPHeader header1_;
RTPHeader header2_;
};
@@ -156,8 +157,8 @@ TEST_F(ReceiveStatisticsTest, RtcpCallbacks) {
: RtcpStatisticsCallback(), num_calls_(0), ssrc_(0), stats_() {}
virtual ~TestCallback() {}
- virtual void StatisticsUpdated(const RtcpStatistics& statistics,
- uint32_t ssrc) {
+ void StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) override {
ssrc_ = ssrc;
stats_ = statistics;
++num_calls_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
index 183076ff591..bbfb52c6cc9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
@@ -74,13 +76,13 @@ class RtcpFormatRembTest : public ::testing::Test {
OverUseDetectorOptions over_use_detector_options_;
Clock* system_clock_;
ModuleRtpRtcpImpl* dummy_rtp_rtcp_impl_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
RTCPSender* rtcp_sender_;
RTCPReceiver* rtcp_receiver_;
TestTransport* test_transport_;
test::NullTransport null_transport_;
MockRemoteBitrateObserver remote_bitrate_observer_;
- rtc::scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+ std::unique_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
};
void RtcpFormatRembTest::SetUp() {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h
index 16bd3fc2a2d..f5a885cfdd7 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/app.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_APP_H_
#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h
index af3fbacc809..ad28cb39e7b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/bye.h
@@ -15,6 +15,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h
index 4fb92facc73..06b67c375cf 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h
index 49de7be1a80..34eb57f3484 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h
index a85576db2da..d7e715bc9ec 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h
@@ -13,6 +13,7 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h
index fb2be113a2b..b6acae5aab1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/nack.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h
index 10fafd229da..64caf1b5c41 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/pli.h
@@ -11,6 +11,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PLI_H_
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h
index 866eb2ce529..8568e7327c7 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RAPID_RESYNC_REQUEST_H_
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h
index 237d923cd7a..0630adbac24 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h"
@@ -49,7 +50,7 @@ class ReceiverReport : public RtcpPacket {
static const size_t kRrBaseLength = 4;
static const size_t kMaxNumberOfReportBlocks = 0x1F;
- size_t BlockLength() const {
+ size_t BlockLength() const override {
return kHeaderLength + kRrBaseLength +
report_blocks_.size() * ReportBlock::kLength;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc
index 3b33982a838..2f59fbbd55f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.cc
@@ -13,11 +13,11 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
-
-using webrtc::RTCPUtility::RtcpCommonHeader;
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/common_header.h"
namespace webrtc {
namespace rtcp {
+constexpr uint8_t Remb::kFeedbackMessageType;
// Receiver Estimated Max Bitrate (REMB) (draft-alvestrand-rmcat-remb).
//
// 0 1 2 3
@@ -36,32 +36,39 @@ namespace rtcp {
// 16 | SSRC feedback |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// : ... :
-bool Remb::Parse(const RtcpCommonHeader& header, const uint8_t* payload) {
- RTC_DCHECK(header.packet_type == kPacketType);
- RTC_DCHECK(header.count_or_format == kFeedbackMessageType);
+bool Remb::Parse(const CommonHeader& packet) {
+ RTC_DCHECK(packet.type() == kPacketType);
+ RTC_DCHECK_EQ(packet.fmt(), kFeedbackMessageType);
- if (header.payload_size_bytes < 16) {
- LOG(LS_WARNING) << "Payload length " << header.payload_size_bytes
+ if (packet.payload_size_bytes() < 16) {
+ LOG(LS_WARNING) << "Payload length " << packet.payload_size_bytes()
<< " is too small for Remb packet.";
return false;
}
+ const uint8_t* const payload = packet.payload();
if (kUniqueIdentifier != ByteReader<uint32_t>::ReadBigEndian(&payload[8])) {
LOG(LS_WARNING) << "REMB identifier not found, not a REMB packet.";
return false;
}
uint8_t number_of_ssrcs = payload[12];
- if (header.payload_size_bytes !=
+ if (packet.payload_size_bytes() !=
kCommonFeedbackLength + (2 + number_of_ssrcs) * 4) {
- LOG(LS_WARNING) << "Payload size " << header.payload_size_bytes
+ LOG(LS_WARNING) << "Payload size " << packet.payload_size_bytes()
<< " does not match " << number_of_ssrcs << " ssrcs.";
return false;
}
ParseCommonFeedback(payload);
uint8_t exponenta = payload[13] >> 2;
- uint32_t mantissa = (static_cast<uint32_t>(payload[13] & 0x03) << 16) |
+ uint64_t mantissa = (static_cast<uint32_t>(payload[13] & 0x03) << 16) |
ByteReader<uint16_t>::ReadBigEndian(&payload[14]);
bitrate_bps_ = (mantissa << exponenta);
+ bool shift_overflow = (bitrate_bps_ >> exponenta) != mantissa;
+ if (shift_overflow) {
+ LOG(LS_ERROR) << "Invalid remb bitrate value : " << mantissa
+ << "*2^" << static_cast<int>(exponenta);
+ return false;
+ }
const uint8_t* next_ssrc = payload + 16;
ssrcs_.clear();
@@ -111,7 +118,7 @@ bool Remb::Create(uint8_t* packet,
ByteWriter<uint32_t>::WriteBigEndian(packet + *index, kUniqueIdentifier);
*index += sizeof(uint32_t);
const uint32_t kMaxMantissa = 0x3ffff; // 18 bits.
- uint32_t mantissa = bitrate_bps_;
+ uint64_t mantissa = bitrate_bps_;
uint8_t exponenta = 0;
while (mantissa > kMaxMantissa) {
mantissa >>= 1;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h
index d58f052b145..9f10921c994 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb.h
@@ -14,28 +14,29 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
-#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
namespace webrtc {
namespace rtcp {
+class CommonHeader;
+
// Receiver Estimated Max Bitrate (REMB) (draft-alvestrand-rmcat-remb).
class Remb : public Psfb {
public:
- static const uint8_t kFeedbackMessageType = 15;
+ static constexpr uint8_t kFeedbackMessageType = 15;
Remb() : bitrate_bps_(0) {}
~Remb() override {}
// Parse assumes header is already parsed and validated.
- bool Parse(const RTCPUtility::RtcpCommonHeader& header,
- const uint8_t* payload); // Size of the payload is in the header.
+ bool Parse(const CommonHeader& packet);
bool AppliesTo(uint32_t ssrc);
bool AppliesToMany(const std::vector<uint32_t>& ssrcs);
- void WithBitrateBps(uint32_t bitrate_bps) { bitrate_bps_ = bitrate_bps; }
+ void WithBitrateBps(uint64_t bitrate_bps) { bitrate_bps_ = bitrate_bps; }
- uint32_t bitrate_bps() const { return bitrate_bps_; }
+ uint64_t bitrate_bps() const { return bitrate_bps_; }
const std::vector<uint32_t>& ssrcs() const { return ssrcs_; }
protected:
@@ -49,14 +50,14 @@ class Remb : public Psfb {
}
private:
- static const size_t kMaxNumberOfSsrcs = 0xff;
- static const uint32_t kUniqueIdentifier = 0x52454D42; // 'R' 'E' 'M' 'B'.
+ static constexpr size_t kMaxNumberOfSsrcs = 0xff;
+ static constexpr uint32_t kUniqueIdentifier = 0x52454D42; // 'R' 'E' 'M' 'B'.
// Media ssrc is unused, shadow base class setter and getter.
void To(uint32_t);
uint32_t media_ssrc() const;
- uint32_t bitrate_bps_;
+ uint64_t bitrate_bps_;
std::vector<uint32_t> ssrcs_;
RTC_DISALLOW_COPY_AND_ASSIGN(Remb);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc
index ee06972e2b0..d504143f6f6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc
@@ -12,32 +12,25 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/test/rtcp_packet_parser.h"
using testing::ElementsAreArray;
using testing::IsEmpty;
using testing::make_tuple;
using webrtc::rtcp::Remb;
-using webrtc::RTCPUtility::RtcpCommonHeader;
-using webrtc::RTCPUtility::RtcpParseCommonHeader;
namespace webrtc {
namespace {
-
const uint32_t kSenderSsrc = 0x12345678;
const uint32_t kRemoteSsrcs[] = {0x23456789, 0x2345678a, 0x2345678b};
const uint32_t kBitrateBps = 0x3fb93 * 2; // 522022;
+const uint64_t kBitrateBps64bit = 0x3fb93ULL << 30;
const uint8_t kPacket[] = {0x8f, 206, 0x00, 0x07, 0x12, 0x34, 0x56, 0x78,
0x00, 0x00, 0x00, 0x00, 'R', 'E', 'M', 'B',
0x03, 0x07, 0xfb, 0x93, 0x23, 0x45, 0x67, 0x89,
0x23, 0x45, 0x67, 0x8a, 0x23, 0x45, 0x67, 0x8b};
const size_t kPacketLength = sizeof(kPacket);
-
-bool ParseRemb(const uint8_t* buffer, size_t length, Remb* remb) {
- RtcpCommonHeader header;
- EXPECT_TRUE(RtcpParseCommonHeader(buffer, length, &header));
- EXPECT_EQ(length, header.BlockSize());
- return remb->Parse(header, buffer + RtcpCommonHeader::kHeaderSizeBytes);
-}
+} // namespace
TEST(RtcpPacketRembTest, Create) {
Remb remb;
@@ -55,7 +48,7 @@ TEST(RtcpPacketRembTest, Create) {
TEST(RtcpPacketRembTest, Parse) {
Remb remb;
- EXPECT_TRUE(ParseRemb(kPacket, kPacketLength, &remb));
+ EXPECT_TRUE(test::ParseSinglePacket(kPacket, &remb));
const Remb& parsed = remb;
EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
@@ -70,19 +63,31 @@ TEST(RtcpPacketRembTest, CreateAndParseWithoutSsrcs) {
rtc::Buffer packet = remb.Build();
Remb parsed;
- EXPECT_TRUE(ParseRemb(packet.data(), packet.size(), &parsed));
+ EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed));
EXPECT_EQ(kSenderSsrc, parsed.sender_ssrc());
EXPECT_EQ(kBitrateBps, parsed.bitrate_bps());
EXPECT_THAT(parsed.ssrcs(), IsEmpty());
}
+TEST(RtcpPacketRembTest, CreateAndParse64bitBitrate) {
+ Remb remb;
+ remb.WithBitrateBps(kBitrateBps64bit);
+ rtc::Buffer packet = remb.Build();
+
+ Remb parsed;
+ EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed));
+ EXPECT_EQ(kBitrateBps64bit, parsed.bitrate_bps());
+}
+
TEST(RtcpPacketRembTest, ParseFailsOnTooSmallPacketToBeRemb) {
- uint8_t packet[kPacketLength];
- memcpy(packet, kPacket, kPacketLength);
- packet[3] = 3; // Make it too small.
+ // Make it too small.
+ constexpr size_t kTooSmallSize = (1 + 3) * 4;
+ uint8_t packet[kTooSmallSize];
+ memcpy(packet, kPacket, kTooSmallSize);
+ packet[3] = 3;
Remb remb;
- EXPECT_FALSE(ParseRemb(packet, (1 + 3) * 4, &remb));
+ EXPECT_FALSE(test::ParseSinglePacket(packet, &remb));
}
TEST(RtcpPacketRembTest, ParseFailsWhenUniqueIdentifierIsNotRemb) {
@@ -91,7 +96,17 @@ TEST(RtcpPacketRembTest, ParseFailsWhenUniqueIdentifierIsNotRemb) {
packet[12] = 'N'; // Swap 'R' -> 'N' in the 'REMB' unique identifier.
Remb remb;
- EXPECT_FALSE(ParseRemb(packet, kPacketLength, &remb));
+ EXPECT_FALSE(test::ParseSinglePacket(packet, &remb));
+}
+
+TEST(RtcpPacketRembTest, ParseFailsWhenBitrateDoNotFitIn64bits) {
+ uint8_t packet[kPacketLength];
+ memcpy(packet, kPacket, kPacketLength);
+ packet[17] |= 0xfc; // Set exponenta component to maximum of 63.
+ packet[19] |= 0x02; // Ensure mantissa is at least 2.
+
+ Remb remb;
+ EXPECT_FALSE(test::ParseSinglePacket(packet, &remb));
}
TEST(RtcpPacketRembTest, ParseFailsWhenSsrcCountMismatchLength) {
@@ -100,7 +115,7 @@ TEST(RtcpPacketRembTest, ParseFailsWhenSsrcCountMismatchLength) {
packet[16]++; // Swap 3 -> 4 in the ssrcs count.
Remb remb;
- EXPECT_FALSE(ParseRemb(packet, kPacketLength, &remb));
+ EXPECT_FALSE(test::ParseSinglePacket(packet, &remb));
}
TEST(RtcpPacketRembTest, TooManySsrcs) {
@@ -126,5 +141,5 @@ TEST(RtcpPacketRembTest, TooManySsrcsForBatchAssign) {
// But not for another one.
EXPECT_FALSE(remb.AppliesTo(kRemoteSsrc));
}
-} // namespace
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h
index 1fa3352335e..7d4895b09cd 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/rpsi.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RPSI_H_
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h
index 5940edbb116..19d5b42b557 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sdes.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h
index e11bdb9a942..a544017f149 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sender_report.h
@@ -13,6 +13,7 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "webrtc/system_wrappers/include/ntp_time.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h
index 5d9e6c93e95..7b6b24f3b8d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/sli.h
@@ -15,6 +15,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/psfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h
index 26a44082d35..c84d0dfe0b9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h
index 4028563d071..15bfc5856fa 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h
@@ -14,6 +14,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/rtpfb.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc
index 4ad49561b80..5cdaa3aaa47 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc
@@ -651,10 +651,10 @@ bool TransportFeedback::Create(uint8_t* packet,
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// De-serialize packet.
-rtc::scoped_ptr<TransportFeedback> TransportFeedback::ParseFrom(
+std::unique_ptr<TransportFeedback> TransportFeedback::ParseFrom(
const uint8_t* buffer,
size_t length) {
- rtc::scoped_ptr<TransportFeedback> packet(new TransportFeedback());
+ std::unique_ptr<TransportFeedback> packet(new TransportFeedback());
if (length < kMinSizeBytes) {
LOG(LS_WARNING) << "Buffer too small (" << length
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
index ad6fd166f2b..7a74d7ffe83 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TRANSPORT_FEEDBACK_H_
#include <deque>
+#include <memory>
#include <vector>
#include "webrtc/base/constructormagic.h"
@@ -58,7 +59,7 @@ class TransportFeedback : public RtcpPacket {
static const uint8_t kFeedbackMessageType = 15; // TODO(sprang): IANA reg?
static const uint8_t kPayloadType = 205; // RTPFB, see RFC4585.
- static rtc::scoped_ptr<TransportFeedback> ParseFrom(const uint8_t* buffer,
+ static std::unique_ptr<TransportFeedback> ParseFrom(const uint8_t* buffer,
size_t length);
protected:
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc
index 3615065351d..203d70fab15 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc
@@ -11,6 +11,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
#include <limits>
+#include <memory>
#include "testing/gtest/include/gtest/gtest.h"
@@ -43,7 +44,7 @@ class FeedbackTester {
void WithInput(const uint16_t received_seq[],
const int64_t received_ts[],
uint16_t length) {
- rtc::scoped_ptr<int64_t[]> temp_deltas;
+ std::unique_ptr<int64_t[]> temp_deltas;
if (received_ts == nullptr) {
temp_deltas.reset(new int64_t[length]);
GenerateDeltas(received_seq, length, temp_deltas.get());
@@ -136,7 +137,7 @@ class FeedbackTester {
std::vector<int64_t> expected_deltas_;
size_t expected_size_;
int64_t default_delta_;
- rtc::scoped_ptr<TransportFeedback> feedback_;
+ std::unique_ptr<TransportFeedback> feedback_;
rtc::Buffer serialized_;
};
@@ -356,7 +357,7 @@ TEST(RtcpPacketTest, TransportFeedback_Aliasing) {
TEST(RtcpPacketTest, TransportFeedback_Limits) {
// Sequence number wrap above 0x8000.
- rtc::scoped_ptr<TransportFeedback> packet(new TransportFeedback());
+ std::unique_ptr<TransportFeedback> packet(new TransportFeedback());
packet->WithBase(0, 0);
EXPECT_TRUE(packet->WithReceivedPacket(0x8000, 1000));
@@ -446,7 +447,7 @@ TEST(RtcpPacketTest, TransportFeedback_Padding) {
&mod_buffer[2], ByteReader<uint16_t>::ReadBigEndian(&mod_buffer[2]) +
((kPaddingBytes + 3) / 4));
- rtc::scoped_ptr<TransportFeedback> parsed_packet(
+ std::unique_ptr<TransportFeedback> parsed_packet(
TransportFeedback::ParseFrom(mod_buffer, kExpectedSizeWithPadding));
ASSERT_TRUE(parsed_packet.get() != nullptr);
EXPECT_EQ(kExpectedSizeWords * 4, packet.size()); // Padding not included.
@@ -468,7 +469,7 @@ TEST(RtcpPacketTest, TransportFeedback_CorrectlySplitsVectorChunks) {
feedback.WithReceivedPacket(deltas, deltas * 1000 + kLargeTimeDelta);
rtc::Buffer serialized_packet = feedback.Build();
- rtc::scoped_ptr<TransportFeedback> deserialized_packet =
+ std::unique_ptr<TransportFeedback> deserialized_packet =
TransportFeedback::ParseFrom(serialized_packet.data(),
serialized_packet.size());
EXPECT_TRUE(deserialized_packet.get() != nullptr);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
index 1b5d4f32a62..58218ddbfd1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
@@ -6,8 +6,6 @@
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
- *
- * This file includes unit tests for the RtcpPacket.
*/
#include "testing/gmock/include/gmock/gmock.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
index 0faf2a42575..3e8e47fbd37 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -49,13 +49,9 @@ RTCPReceiver::RTCPReceiver(
receiver_only_(receiver_only),
_lastReceived(0),
_rtpRtcp(*owner),
- _criticalSectionFeedbacks(
- CriticalSectionWrapper::CreateCriticalSection()),
_cbRtcpBandwidthObserver(rtcp_bandwidth_observer),
_cbRtcpIntraFrameObserver(rtcp_intra_frame_observer),
_cbTransportFeedbackObserver(transport_feedback_observer),
- _criticalSectionRTCPReceiver(
- CriticalSectionWrapper::CreateCriticalSection()),
main_ssrc_(0),
_remoteSSRC(0),
_remoteSenderInfo(),
@@ -76,9 +72,6 @@ RTCPReceiver::RTCPReceiver(
}
RTCPReceiver::~RTCPReceiver() {
- delete _criticalSectionRTCPReceiver;
- delete _criticalSectionFeedbacks;
-
ReportBlockMap::iterator it = _receivedReportBlockMap.begin();
for (; it != _receivedReportBlockMap.end(); ++it) {
ReportBlockInfoMap* info_map = &(it->second);
@@ -103,12 +96,12 @@ RTCPReceiver::~RTCPReceiver() {
}
int64_t RTCPReceiver::LastReceived() {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
return _lastReceived;
}
int64_t RTCPReceiver::LastReceivedReceiverReport() const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
int64_t last_received_rr = -1;
for (ReceivedInfoMap::const_iterator it = _receivedInfoMap.begin();
it != _receivedInfoMap.end(); ++it) {
@@ -120,7 +113,7 @@ int64_t RTCPReceiver::LastReceivedReceiverReport() const {
}
void RTCPReceiver::SetRemoteSSRC(uint32_t ssrc) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
// new SSRC reset old reports
memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
@@ -131,7 +124,7 @@ void RTCPReceiver::SetRemoteSSRC(uint32_t ssrc) {
}
uint32_t RTCPReceiver::RemoteSSRC() const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
return _remoteSSRC;
}
@@ -139,7 +132,7 @@ void RTCPReceiver::SetSsrcs(uint32_t main_ssrc,
const std::set<uint32_t>& registered_ssrcs) {
uint32_t old_ssrc = 0;
{
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
old_ssrc = main_ssrc_;
main_ssrc_ = main_ssrc;
registered_ssrcs_ = registered_ssrcs;
@@ -156,7 +149,7 @@ int32_t RTCPReceiver::RTT(uint32_t remoteSSRC,
int64_t* avgRTT,
int64_t* minRTT,
int64_t* maxRTT) const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
RTCPReportBlockInformation* reportBlock =
GetReportBlockInformation(remoteSSRC, main_ssrc_);
@@ -180,13 +173,13 @@ int32_t RTCPReceiver::RTT(uint32_t remoteSSRC,
}
void RTCPReceiver::SetRtcpXrRrtrStatus(bool enable) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
xr_rrtr_status_ = enable;
}
bool RTCPReceiver::GetAndResetXrRrRtt(int64_t* rtt_ms) {
assert(rtt_ms);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (xr_rr_rtt_ms_ == 0) {
return false;
}
@@ -202,7 +195,7 @@ bool RTCPReceiver::NTP(uint32_t* ReceivedNTPsecs,
uint32_t* RTCPArrivalTimeFrac,
uint32_t* rtcp_timestamp) const
{
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if(ReceivedNTPsecs)
{
*ReceivedNTPsecs = _remoteSenderInfo.NTPseconds; // NTP from incoming SendReport
@@ -228,7 +221,7 @@ bool RTCPReceiver::NTP(uint32_t* ReceivedNTPsecs,
bool RTCPReceiver::LastReceivedXrReferenceTimeInfo(
RtcpReceiveTimeInfo* info) const {
assert(info);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (_lastReceivedXRNTPsecs == 0 && _lastReceivedXRNTPfrac == 0) {
return false;
}
@@ -251,7 +244,7 @@ bool RTCPReceiver::LastReceivedXrReferenceTimeInfo(
int32_t RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const {
assert(senderInfo);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (_lastReceivedSRNTPsecs == 0) {
return -1;
}
@@ -264,7 +257,7 @@ int32_t RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const {
int32_t RTCPReceiver::StatisticsReceived(
std::vector<RTCPReportBlock>* receiveBlocks) const {
assert(receiveBlocks);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
ReportBlockMap::const_iterator it = _receivedReportBlockMap.begin();
for (; it != _receivedReportBlockMap.end(); ++it) {
const ReportBlockInfoMap* info_map = &(it->second);
@@ -280,7 +273,7 @@ int32_t
RTCPReceiver::IncomingRTCPPacket(RTCPPacketInformation& rtcpPacketInformation,
RTCPUtility::RTCPParserV2* rtcpParser)
{
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
_lastReceived = _clock->TimeInMilliseconds();
@@ -590,7 +583,7 @@ RTCPReportBlockInformation* RTCPReceiver::GetReportBlockInformation(
RTCPCnameInformation*
RTCPReceiver::CreateCnameInformation(uint32_t remoteSSRC) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPCnameInformation*>::iterator it =
_receivedCnameMap.find(remoteSSRC);
@@ -606,7 +599,7 @@ RTCPReceiver::CreateCnameInformation(uint32_t remoteSSRC) {
RTCPCnameInformation*
RTCPReceiver::GetCnameInformation(uint32_t remoteSSRC) const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPCnameInformation*>::const_iterator it =
_receivedCnameMap.find(remoteSSRC);
@@ -619,7 +612,7 @@ RTCPReceiver::GetCnameInformation(uint32_t remoteSSRC) const {
RTCPReceiveInformation*
RTCPReceiver::CreateReceiveInformation(uint32_t remoteSSRC) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::iterator it =
_receivedInfoMap.find(remoteSSRC);
@@ -634,7 +627,7 @@ RTCPReceiver::CreateReceiveInformation(uint32_t remoteSSRC) {
RTCPReceiveInformation*
RTCPReceiver::GetReceiveInformation(uint32_t remoteSSRC) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::iterator it =
_receivedInfoMap.find(remoteSSRC);
@@ -651,7 +644,7 @@ void RTCPReceiver::UpdateReceiveInformation(
}
bool RTCPReceiver::RtcpRrTimeout(int64_t rtcp_interval_ms) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (_lastReceivedRrMs == 0)
return false;
@@ -665,7 +658,7 @@ bool RTCPReceiver::RtcpRrTimeout(int64_t rtcp_interval_ms) {
}
bool RTCPReceiver::RtcpRrSequenceNumberTimeout(int64_t rtcp_interval_ms) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
if (_lastIncreasedSequenceNumberMs == 0)
return false;
@@ -680,7 +673,7 @@ bool RTCPReceiver::RtcpRrSequenceNumberTimeout(int64_t rtcp_interval_ms) {
}
bool RTCPReceiver::UpdateRTCPReceiveInformationTimers() {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
bool updateBoundingSet = false;
int64_t timeNow = _clock->TimeInMilliseconds();
@@ -724,7 +717,7 @@ bool RTCPReceiver::UpdateRTCPReceiveInformationTimers() {
}
int32_t RTCPReceiver::BoundingSet(bool* tmmbrOwner, TMMBRSet* boundingSetRec) {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::iterator receiveInfoIt =
_receivedInfoMap.find(_remoteSSRC);
@@ -773,7 +766,7 @@ void RTCPReceiver::HandleSDESChunk(RTCPUtility::RTCPParserV2& rtcpParser) {
cnameInfo->name[RTCP_CNAME_SIZE - 1] = 0;
strncpy(cnameInfo->name, rtcpPacket.CName.CName, RTCP_CNAME_SIZE - 1);
{
- CriticalSectionScoped lock(_criticalSectionFeedbacks);
+ rtc::CritScope lock(&_criticalSectionFeedbacks);
if (stats_callback_ != NULL) {
stats_callback_->CNameChanged(rtcpPacket.CName.CName,
rtcpPacket.CName.SenderSSRC);
@@ -1283,12 +1276,12 @@ int32_t RTCPReceiver::UpdateTMMBR() {
void RTCPReceiver::RegisterRtcpStatisticsCallback(
RtcpStatisticsCallback* callback) {
- CriticalSectionScoped cs(_criticalSectionFeedbacks);
+ rtc::CritScope cs(&_criticalSectionFeedbacks);
stats_callback_ = callback;
}
RtcpStatisticsCallback* RTCPReceiver::GetRtcpStatisticsCallback() {
- CriticalSectionScoped cs(_criticalSectionFeedbacks);
+ rtc::CritScope cs(&_criticalSectionFeedbacks);
return stats_callback_;
}
@@ -1305,7 +1298,7 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
std::set<uint32_t> registered_ssrcs;
{
// We don't want to hold this critsect when triggering the callbacks below.
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
local_ssrc = main_ssrc_;
registered_ssrcs = registered_ssrcs_;
}
@@ -1378,7 +1371,7 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
}
if (!receiver_only_) {
- CriticalSectionScoped cs(_criticalSectionFeedbacks);
+ rtc::CritScope cs(&_criticalSectionFeedbacks);
if (stats_callback_) {
for (ReportBlockList::const_iterator it =
rtcpPacketInformation.report_blocks.begin();
@@ -1400,7 +1393,7 @@ int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC,
char cName[RTCP_CNAME_SIZE]) const {
assert(cName);
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
RTCPCnameInformation* cnameInfo = GetCnameInformation(remoteSSRC);
if (cnameInfo == NULL) {
return -1;
@@ -1414,7 +1407,7 @@ int32_t RTCPReceiver::CNAME(uint32_t remoteSSRC,
int32_t RTCPReceiver::TMMBRReceived(uint32_t size,
uint32_t accNumCandidates,
TMMBRSet* candidateSet) const {
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ rtc::CritScope lock(&_criticalSectionRTCPReceiver);
std::map<uint32_t, RTCPReceiveInformation*>::const_iterator
receiveInfoIt = _receivedInfoMap.begin();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
index 475ab1e26f7..28c28cb69bb 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -15,6 +15,7 @@
#include <set>
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h"
@@ -267,12 +268,12 @@ protected:
int64_t _lastReceived;
ModuleRtpRtcpImpl& _rtpRtcp;
- CriticalSectionWrapper* _criticalSectionFeedbacks;
+ rtc::CriticalSection _criticalSectionFeedbacks;
RtcpBandwidthObserver* const _cbRtcpBandwidthObserver;
RtcpIntraFrameObserver* const _cbRtcpIntraFrameObserver;
TransportFeedbackObserver* const _cbTransportFeedbackObserver;
- CriticalSectionWrapper* _criticalSectionRTCPReceiver;
+ rtc::CriticalSection _criticalSectionRTCPReceiver;
uint32_t main_ssrc_ GUARDED_BY(_criticalSectionRTCPReceiver);
uint32_t _remoteSSRC GUARDED_BY(_criticalSectionRTCPReceiver);
std::set<uint32_t> registered_ssrcs_ GUARDED_BY(_criticalSectionRTCPReceiver);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
index a5c0e282828..bfcc1bdfde0 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.cc
@@ -42,14 +42,13 @@ RTCPPacketInformation::RTCPPacketInformation()
RTCPPacketInformation::~RTCPPacketInformation()
{
delete [] applicationData;
- delete VoIPMetric;
}
void
RTCPPacketInformation::AddVoIPMetric(const RTCPVoIPMetric* metric)
{
- VoIPMetric = new RTCPVoIPMetric();
- memcpy(VoIPMetric, metric, sizeof(RTCPVoIPMetric));
+ VoIPMetric.reset(new RTCPVoIPMetric());
+ memcpy(VoIPMetric.get(), metric, sizeof(RTCPVoIPMetric));
}
void RTCPPacketInformation::AddApplicationData(const uint8_t* data,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
index a7928419627..40d1220069c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
@@ -12,10 +12,10 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
#include <list>
+#include <memory>
#include <vector>
#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPReportBlock
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/tmmbr_help.h"
@@ -87,9 +87,9 @@ public:
uint32_t xr_originator_ssrc;
bool xr_dlrr_item;
- RTCPVoIPMetric* VoIPMetric;
+ std::unique_ptr<RTCPVoIPMetric> VoIPMetric;
- rtc::scoped_ptr<rtcp::TransportFeedback> transport_feedback_;
+ std::unique_ptr<rtcp::TransportFeedback> transport_feedback_;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(RTCPPacketInformation);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
index 08f109bc297..924d009883d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -8,10 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
-/*
- * This file includes unit tests for the RTCPReceiver.
- */
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -136,7 +134,7 @@ class RtcpReceiverTest : public ::testing::Test {
rtcp_packet_info_.rtp_timestamp = rtcpPacketInformation.rtp_timestamp;
rtcp_packet_info_.xr_dlrr_item = rtcpPacketInformation.xr_dlrr_item;
if (rtcpPacketInformation.VoIPMetric)
- rtcp_packet_info_.AddVoIPMetric(rtcpPacketInformation.VoIPMetric);
+ rtcp_packet_info_.AddVoIPMetric(rtcpPacketInformation.VoIPMetric.get());
rtcp_packet_info_.transport_feedback_.reset(
rtcpPacketInformation.transport_feedback_.release());
return 0;
@@ -149,7 +147,7 @@ class RtcpReceiverTest : public ::testing::Test {
TestTransport* test_transport_;
RTCPHelp::RTCPPacketInformation rtcp_packet_info_;
MockRemoteBitrateObserver remote_bitrate_observer_;
- rtc::scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+ std::unique_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
};
@@ -1244,6 +1242,20 @@ TEST_F(RtcpReceiverTest, ReceivesTransportFeedback) {
EXPECT_TRUE(rtcp_packet_info_.transport_feedback_.get() != nullptr);
}
+TEST_F(RtcpReceiverTest, ReceivesRemb) {
+ const uint32_t kSenderSsrc = 0x123456;
+ const uint32_t kBitrateBps = 500000;
+ rtcp::Remb remb;
+ remb.From(kSenderSsrc);
+ remb.WithBitrateBps(kBitrateBps);
+ rtc::Buffer built_packet = remb.Build();
+
+ EXPECT_EQ(0, InjectRtcpPacket(built_packet.data(), built_packet.size()));
+
+ EXPECT_EQ(kRtcpRemb, rtcp_packet_info_.rtcpPacketTypeFlags & kRtcpRemb);
+ EXPECT_EQ(kBitrateBps, rtcp_packet_info_.receiverEstimatedMaxBitrate);
+}
+
TEST_F(RtcpReceiverTest, HandlesInvalidTransportFeedback) {
const uint32_t kSenderSsrc = 0x10203;
const uint32_t kSourceSsrc = 0x123456;
@@ -1261,7 +1273,7 @@ TEST_F(RtcpReceiverTest, HandlesInvalidTransportFeedback) {
static uint32_t kBitrateBps = 50000;
rtcp::Remb remb;
- remb.From(kSourceSsrc);
+ remb.From(kSenderSsrc);
remb.WithBitrateBps(kBitrateBps);
rtcp::CompoundPacket compound;
compound.Append(&packet);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
index 95bfeeea1f6..4a509b001ea 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -13,6 +13,7 @@
#include <string.h> // memcpy
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/call.h"
@@ -638,16 +639,11 @@ std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBR(
std::unique_ptr<rtcp::RtcpPacket> RTCPSender::BuildTMMBN(
const RtcpContext& ctx) {
- TMMBRSet* boundingSet = tmmbr_help_.BoundingSetToSend();
- if (boundingSet == nullptr)
- return nullptr;
-
rtcp::Tmmbn* tmmbn = new rtcp::Tmmbn();
tmmbn->From(ssrc_);
- for (uint32_t i = 0; i < boundingSet->lengthOfSet(); i++) {
- if (boundingSet->Tmmbr(i) > 0) {
- tmmbn->WithTmmbr(boundingSet->Ssrc(i), boundingSet->Tmmbr(i),
- boundingSet->PacketOH(i));
+ for (const rtcp::TmmbItem& tmmbr : tmmbn_to_send_) {
+ if (tmmbr.bitrate_bps() > 0) {
+ tmmbn->WithTmmbr(tmmbr);
}
}
@@ -871,11 +867,13 @@ void RTCPSender::PrepareReport(const std::set<RTCPPacketType>& packetTypes,
random_.Rand(minIntervalMs * 1 / 2, minIntervalMs * 3 / 2);
next_time_to_send_rtcp_ = clock_->TimeInMilliseconds() + timeToNext;
- StatisticianMap statisticians =
- receive_statistics_->GetActiveStatisticians();
- RTC_DCHECK(report_blocks_.empty());
- for (auto& it : statisticians) {
- AddReportBlock(feedback_state, it.first, it.second);
+ if (receive_statistics_) {
+ StatisticianMap statisticians =
+ receive_statistics_->GetActiveStatisticians();
+ RTC_DCHECK(report_blocks_.empty());
+ for (auto& it : statisticians) {
+ AddReportBlock(feedback_state, it.first, it.second);
+ }
}
}
}
@@ -972,14 +970,14 @@ bool RTCPSender::RtcpXrReceiverReferenceTime() const {
}
// no callbacks allowed inside this function
-int32_t RTCPSender::SetTMMBN(const TMMBRSet* boundingSet) {
+void RTCPSender::SetTMMBN(const std::vector<rtcp::TmmbItem>* bounding_set) {
rtc::CritScope lock(&critical_section_rtcp_sender_);
-
- if (0 == tmmbr_help_.SetTMMBRBoundingSetToSend(boundingSet)) {
- SetFlag(kRtcpTmmbn, true);
- return 0;
+ if (bounding_set) {
+ tmmbn_to_send_ = *bounding_set;
+ } else {
+ tmmbn_to_send_.clear();
}
- return -1;
+ SetFlag(kRtcpTmmbn, true);
}
void RTCPSender::SetFlag(RTCPPacketType type, bool is_volatile) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
index ba6fb700558..02719aa1c50 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
@@ -18,6 +18,7 @@
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/random.h"
#include "webrtc/base/thread_annotations.h"
@@ -134,7 +135,7 @@ class RTCPSender {
void SetMaxPayloadLength(size_t max_payload_length);
- int32_t SetTMMBN(const TMMBRSet* boundingSet);
+ void SetTMMBN(const std::vector<rtcp::TmmbItem>* boundingSet);
int32_t SetApplicationSpecificData(uint8_t subType,
uint32_t name,
@@ -240,6 +241,8 @@ class RTCPSender {
std::vector<uint32_t> remb_ssrcs_ GUARDED_BY(critical_section_rtcp_sender_);
TMMBRHelp tmmbr_help_ GUARDED_BY(critical_section_rtcp_sender_);
+ std::vector<rtcp::TmmbItem> tmmbn_to_send_
+ GUARDED_BY(critical_section_rtcp_sender_);
uint32_t tmmbr_send_ GUARDED_BY(critical_section_rtcp_sender_);
uint32_t packet_oh_send_ GUARDED_BY(critical_section_rtcp_sender_);
size_t max_payload_length_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
index dafc2e0be66..a4d6e59c8f8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
@@ -8,10 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
-/*
- * This file includes unit tests for the RTCPSender.
- */
+#include <memory>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -261,9 +258,9 @@ class RtcpSenderTest : public ::testing::Test {
SimulatedClock clock_;
TestTransport test_transport_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
- rtc::scoped_ptr<ModuleRtpRtcpImpl> rtp_rtcp_impl_;
- rtc::scoped_ptr<RTCPSender> rtcp_sender_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ModuleRtpRtcpImpl> rtp_rtcp_impl_;
+ std::unique_ptr<RTCPSender> rtcp_sender_;
};
TEST_F(RtcpSenderTest, SetRtcpStatus) {
@@ -691,13 +688,14 @@ TEST_F(RtcpSenderTest, TmmbrIncludedInCompoundPacketIfEnabled) {
TEST_F(RtcpSenderTest, SendTmmbn) {
rtcp_sender_->SetRTCPStatus(RtcpMode::kCompound);
- TMMBRSet bounding_set;
- bounding_set.VerifyAndAllocateSet(1);
+ std::vector<rtcp::TmmbItem> bounding_set;
const uint32_t kBitrateKbps = 32768;
const uint32_t kPacketOh = 40;
const uint32_t kSourceSsrc = 12345;
- bounding_set.AddEntry(kBitrateKbps, kPacketOh, kSourceSsrc);
- EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set));
+ const rtcp::TmmbItem tmmbn(kSourceSsrc, kBitrateKbps * 1000, kPacketOh);
+ bounding_set.push_back(tmmbn);
+ rtcp_sender_->SetTMMBN(&bounding_set);
+
EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpSr));
EXPECT_EQ(1, parser()->sender_report()->num_packets());
EXPECT_EQ(1, parser()->tmmbn()->num_packets());
@@ -716,8 +714,8 @@ TEST_F(RtcpSenderTest, SendTmmbn) {
// situation where this caused confusion.
TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndEmpty) {
rtcp_sender_->SetRTCPStatus(RtcpMode::kCompound);
- TMMBRSet bounding_set;
- EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set));
+ std::vector<rtcp::TmmbItem> bounding_set;
+ rtcp_sender_->SetTMMBN(&bounding_set);
EXPECT_EQ(0, rtcp_sender_->SendRTCP(feedback_state(), kRtcpSr));
EXPECT_EQ(1, parser()->sender_report()->num_packets());
EXPECT_EQ(1, parser()->tmmbn()->num_packets());
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
index 9b5a83515f4..c4f688aac44 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
@@ -14,12 +14,17 @@
#include <math.h> // ceil
#include <string.h> // memcpy
+#include <limits>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h"
namespace webrtc {
+namespace {
+constexpr uint64_t kMaxBitrateBps = std::numeric_limits<uint32_t>::max();
+} // namespace
namespace RTCPUtility {
@@ -1440,14 +1445,23 @@ RTCPUtility::RTCPParserV2::ParsePsfbREMBItem()
}
_packet.REMBItem.NumberOfSSRCs = *_ptrRTCPData++;
- const uint8_t brExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+ const uint8_t exp = (_ptrRTCPData[0] >> 2) & 0x3F;
- uint32_t brMantissa = (_ptrRTCPData[0] & 0x03) << 16;
- brMantissa += (_ptrRTCPData[1] << 8);
- brMantissa += (_ptrRTCPData[2]);
+ uint64_t mantissa = (_ptrRTCPData[0] & 0x03) << 16;
+ mantissa += (_ptrRTCPData[1] << 8);
+ mantissa += (_ptrRTCPData[2]);
_ptrRTCPData += 3; // Fwd read data
- _packet.REMBItem.BitRate = (brMantissa << brExp);
+ uint64_t bitrate_bps = (mantissa << exp);
+ bool shift_overflow = exp > 0 && (mantissa >> (64 - exp)) != 0;
+ if (shift_overflow || bitrate_bps > kMaxBitrateBps) {
+ LOG(LS_ERROR) << "Unhandled remb bitrate value : " << mantissa
+ << "*2^" << static_cast<int>(exp);
+ _state = ParseState::State_TopLevel;
+ EndCurrentBlock();
+ return false;
+ }
+ _packet.REMBItem.BitRate = bitrate_bps;
const ptrdiff_t length_ssrcs = _ptrRTCPBlockEnd - _ptrRTCPData;
if (length_ssrcs < 4 * _packet.REMBItem.NumberOfSSRCs)
@@ -1492,18 +1506,28 @@ RTCPUtility::RTCPParserV2::ParseTMMBRItem()
_packet.TMMBRItem.SSRC += *_ptrRTCPData++ << 8;
_packet.TMMBRItem.SSRC += *_ptrRTCPData++;
- uint8_t mxtbrExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+ uint8_t exp = (_ptrRTCPData[0] >> 2) & 0x3F;
- uint32_t mxtbrMantissa = (_ptrRTCPData[0] & 0x03) << 15;
- mxtbrMantissa += (_ptrRTCPData[1] << 7);
- mxtbrMantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
+ uint64_t mantissa = (_ptrRTCPData[0] & 0x03) << 15;
+ mantissa += (_ptrRTCPData[1] << 7);
+ mantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
uint32_t measuredOH = (_ptrRTCPData[2] & 0x01) << 8;
measuredOH += _ptrRTCPData[3];
_ptrRTCPData += 4; // Fwd read data
- _packet.TMMBRItem.MaxTotalMediaBitRate = ((mxtbrMantissa << mxtbrExp) / 1000);
+ uint64_t bitrate_bps = (mantissa << exp);
+ bool shift_overflow = exp > 0 && (mantissa >> (64 - exp)) != 0;
+ if (shift_overflow || bitrate_bps > kMaxBitrateBps) {
+ LOG(LS_ERROR) << "Unhandled tmmbr bitrate value : " << mantissa
+ << "*2^" << static_cast<int>(exp);
+ _state = ParseState::State_TopLevel;
+ EndCurrentBlock();
+ return false;
+ }
+
+ _packet.TMMBRItem.MaxTotalMediaBitRate = bitrate_bps / 1000;
_packet.TMMBRItem.MeasuredOverhead = measuredOH;
return true;
@@ -1531,18 +1555,28 @@ RTCPUtility::RTCPParserV2::ParseTMMBNItem()
_packet.TMMBNItem.SSRC += *_ptrRTCPData++ << 8;
_packet.TMMBNItem.SSRC += *_ptrRTCPData++;
- uint8_t mxtbrExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+ uint8_t exp = (_ptrRTCPData[0] >> 2) & 0x3F;
- uint32_t mxtbrMantissa = (_ptrRTCPData[0] & 0x03) << 15;
- mxtbrMantissa += (_ptrRTCPData[1] << 7);
- mxtbrMantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
+ uint64_t mantissa = (_ptrRTCPData[0] & 0x03) << 15;
+ mantissa += (_ptrRTCPData[1] << 7);
+ mantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
uint32_t measuredOH = (_ptrRTCPData[2] & 0x01) << 8;
measuredOH += _ptrRTCPData[3];
_ptrRTCPData += 4; // Fwd read data
- _packet.TMMBNItem.MaxTotalMediaBitRate = ((mxtbrMantissa << mxtbrExp) / 1000);
+ uint64_t bitrate_bps = (mantissa << exp);
+ bool shift_overflow = exp > 0 && (mantissa >> (64 - exp)) != 0;
+ if (shift_overflow || bitrate_bps > kMaxBitrateBps) {
+ LOG(LS_ERROR) << "Unhandled tmmbn bitrate value : " << mantissa
+ << "*2^" << static_cast<int>(exp);
+ _state = ParseState::State_TopLevel;
+ EndCurrentBlock();
+ return false;
+ }
+
+ _packet.TMMBNItem.MaxTotalMediaBitRate = bitrate_bps / 1000;
_packet.TMMBNItem.MeasuredOverhead = measuredOH;
return true;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
index 4067a40886f..629de4e99ec 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.h
@@ -13,7 +13,8 @@
#include <stddef.h> // size_t, ptrdiff_t
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/typedefs.h"
@@ -468,7 +469,7 @@ class RTCPParserV2 {
RTCPPacketTypes _packetType;
RTCPPacket _packet;
- rtc::scoped_ptr<webrtc::rtcp::RtcpPacket> rtcp_packet_;
+ std::unique_ptr<webrtc::rtcp::RtcpPacket> rtcp_packet_;
};
class RTCPPacketIterator {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h
index e32433fe904..88258df8bcd 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h
@@ -14,6 +14,7 @@
#include <queue>
#include <string>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
index d29e3d4f212..12c2db564bd 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
@@ -72,7 +72,7 @@ void VerifyFua(size_t fua_index,
void TestFua(size_t frame_size,
size_t max_payload_size,
const std::vector<size_t>& expected_sizes) {
- rtc::scoped_ptr<uint8_t[]> frame;
+ std::unique_ptr<uint8_t[]> frame;
frame.reset(new uint8_t[frame_size]);
frame[0] = 0x05; // F=0, NRI=0, Type=5.
for (size_t i = 0; i < frame_size - kNalHeaderSize; ++i) {
@@ -82,11 +82,11 @@ void TestFua(size_t frame_size,
fragmentation.VerifyAndAllocateFragmentationHeader(1);
fragmentation.fragmentationOffset[0] = 0;
fragmentation.fragmentationLength[0] = frame_size;
- rtc::scoped_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
+ std::unique_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
kRtpVideoH264, max_payload_size, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame.get(), frame_size, &fragmentation);
- rtc::scoped_ptr<uint8_t[]> packet(new uint8_t[max_payload_size]);
+ std::unique_ptr<uint8_t[]> packet(new uint8_t[max_payload_size]);
size_t length = 0;
bool last = false;
size_t offset = kNalHeaderSize;
@@ -156,7 +156,7 @@ TEST(RtpPacketizerH264Test, TestSingleNalu) {
fragmentation.VerifyAndAllocateFragmentationHeader(1);
fragmentation.fragmentationOffset[0] = 0;
fragmentation.fragmentationLength[0] = sizeof(frame);
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, sizeof(frame), &fragmentation);
uint8_t packet[kMaxPayloadSize] = {0};
@@ -185,7 +185,7 @@ TEST(RtpPacketizerH264Test, TestSingleNaluTwoPackets) {
frame[fragmentation.fragmentationOffset[0]] = 0x01;
frame[fragmentation.fragmentationOffset[1]] = 0x01;
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
@@ -222,7 +222,7 @@ TEST(RtpPacketizerH264Test, TestStapA) {
fragmentation.fragmentationOffset[2] = 4;
fragmentation.fragmentationLength[2] =
kNalHeaderSize + kFrameSize - kPayloadOffset;
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
@@ -257,7 +257,7 @@ TEST(RtpPacketizerH264Test, TestTooSmallForStapAHeaders) {
fragmentation.fragmentationOffset[2] = 4;
fragmentation.fragmentationLength[2] =
kNalHeaderSize + kFrameSize - kPayloadOffset;
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
@@ -305,7 +305,7 @@ TEST(RtpPacketizerH264Test, TestMixedStapA_FUA) {
frame[nalu_offset + j] = i + j;
}
}
- rtc::scoped_ptr<RtpPacketizer> packetizer(
+ std::unique_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(kRtpVideoH264, kMaxPayloadSize, NULL, kEmptyFrame));
packetizer->SetPayloadData(frame, kFrameSize, &fragmentation);
@@ -394,7 +394,7 @@ class RtpDepacketizerH264Test : public ::testing::Test {
::testing::ElementsAreArray(data, length));
}
- rtc::scoped_ptr<RtpDepacketizer> depacketizer_;
+ std::unique_ptr<RtpDepacketizer> depacketizer_;
};
TEST_F(RtpDepacketizerH264Test, TestSingleNalu) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h
index 3bf72e9dd35..e72fe310cfc 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h
@@ -12,6 +12,7 @@
#include <string>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
index 4283a778d00..079d9647545 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
@@ -8,9 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * This file includes unit tests for the VP8 packetizer.
- */
+#include <memory>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -421,7 +419,7 @@ class RtpDepacketizerVp8Test : public ::testing::Test {
::testing::ElementsAreArray(data, length));
}
- rtc::scoped_ptr<RtpDepacketizer> depacketizer_;
+ std::unique_ptr<RtpDepacketizer> depacketizer_;
};
TEST_F(RtpDepacketizerVp8Test, BasicHeader) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
index 5bbafe459d2..f9514ad4bdb 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
@@ -76,7 +77,7 @@ void ParseAndCheckPacket(const uint8_t* packet,
const RTPVideoHeaderVP9& expected,
size_t expected_hdr_length,
size_t expected_length) {
- rtc::scoped_ptr<RtpDepacketizer> depacketizer(new RtpDepacketizerVp9());
+ std::unique_ptr<RtpDepacketizer> depacketizer(new RtpDepacketizerVp9());
RtpDepacketizer::ParsedPayload parsed;
ASSERT_TRUE(depacketizer->Parse(&parsed, packet, expected_length));
EXPECT_EQ(kRtpVideoVp9, parsed.type.Video.codec);
@@ -127,12 +128,12 @@ class RtpPacketizerVp9Test : public ::testing::Test {
expected_.InitRTPVideoHeaderVP9();
}
- rtc::scoped_ptr<uint8_t[]> packet_;
- rtc::scoped_ptr<uint8_t[]> payload_;
+ std::unique_ptr<uint8_t[]> packet_;
+ std::unique_ptr<uint8_t[]> payload_;
size_t payload_size_;
size_t payload_pos_;
RTPVideoHeaderVP9 expected_;
- rtc::scoped_ptr<RtpPacketizerVp9> packetizer_;
+ std::unique_ptr<RtpPacketizerVp9> packetizer_;
void Init(size_t payload_size, size_t packet_size) {
payload_.reset(new uint8_t[payload_size]);
@@ -469,7 +470,7 @@ class RtpDepacketizerVp9Test : public ::testing::Test {
}
RTPVideoHeaderVP9 expected_;
- rtc::scoped_ptr<RtpDepacketizer> depacketizer_;
+ std::unique_ptr<RtpDepacketizer> depacketizer_;
};
TEST_F(RtpDepacketizerVp9Test, ParseBasicHeader) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
index 8605925785e..2c2a0a13566 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.cc
@@ -112,6 +112,14 @@ int32_t RtpHeaderExtensionMap::GetType(const uint8_t id,
return 0;
}
+RTPExtensionType RtpHeaderExtensionMap::GetType(uint8_t id) const {
+ auto it = extensionMap_.find(id);
+ if (it == extensionMap_.end()) {
+ return kInvalidType;
+ }
+ return it->second->type;
+}
+
int32_t RtpHeaderExtensionMap::GetId(const RTPExtensionType type,
uint8_t* id) const {
assert(id);
@@ -129,6 +137,14 @@ int32_t RtpHeaderExtensionMap::GetId(const RTPExtensionType type,
return -1;
}
+uint8_t RtpHeaderExtensionMap::GetId(RTPExtensionType type) const {
+ for (auto kv : extensionMap_) {
+ if (kv.second->type == type)
+ return kv.first;
+ }
+ return kInvalidId;
+}
+
size_t RtpHeaderExtensionMap::GetTotalLengthInBytes() const {
// Get length for each extension block.
size_t length = 0;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
index 342e38a1f2a..beaf989c895 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
@@ -70,6 +70,8 @@ struct HeaderExtension {
class RtpHeaderExtensionMap {
public:
+ static constexpr RTPExtensionType kInvalidType = kRtpExtensionNone;
+ static constexpr uint8_t kInvalidId = 0;
RtpHeaderExtensionMap();
~RtpHeaderExtensionMap();
@@ -89,8 +91,12 @@ class RtpHeaderExtensionMap {
bool IsRegistered(RTPExtensionType type) const;
int32_t GetType(const uint8_t id, RTPExtensionType* type) const;
+ // Return kInvalidType if not found.
+ RTPExtensionType GetType(uint8_t id) const;
int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
+ // Return kInvalidId if not found.
+ uint8_t GetId(RTPExtensionType type) const;
//
// Methods below ignore any inactive rtp header extensions.
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
index ca37750621c..0b4f893e2c8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
@@ -8,11 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
-/*
- * This file includes unit tests for the RtpHeaderExtensionMap.
- */
-
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
new file mode 100644
index 00000000000..a551b15617b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -0,0 +1,203 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+// Absolute send time in RTP streams.
+//
+// The absolute send time is signaled to the receiver in-band using the
+// general mechanism for RTP header extensions [RFC5285]. The payload
+// of this extension (the transmitted value) is a 24-bit unsigned integer
+// containing the sender's current time in seconds as a fixed point number
+// with 18 bits fractional part.
+//
+// The form of the absolute send time extension block:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=2 | absolute send time |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+const char* AbsoluteSendTime::kName =
+ "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time";
+bool AbsoluteSendTime::IsSupportedFor(MediaType type) {
+ return true;
+}
+
+bool AbsoluteSendTime::Parse(const uint8_t* data, uint32_t* value) {
+ *value = ByteReader<uint32_t, 3>::ReadBigEndian(data);
+ return true;
+}
+
+bool AbsoluteSendTime::Write(uint8_t* data, int64_t time_ms) {
+ const uint32_t kAbsSendTimeFraction = 18;
+ uint32_t time_24_bits =
+ static_cast<uint32_t>(((time_ms << kAbsSendTimeFraction) + 500) / 1000) &
+ 0x00FFFFFF;
+
+ ByteWriter<uint32_t, 3>::WriteBigEndian(data, time_24_bits);
+ return true;
+}
+
+// An RTP Header Extension for Client-to-Mixer Audio Level Indication
+//
+// https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
+//
+// The form of the audio level extension block:
+//
+// 0 1
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=0 |V| level |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+const char* AudioLevel::kName = "urn:ietf:params:rtp-hdrext:ssrc-audio-level";
+bool AudioLevel::IsSupportedFor(MediaType type) {
+ switch (type) {
+ case MediaType::ANY:
+ case MediaType::AUDIO:
+ return true;
+ case MediaType::VIDEO:
+ case MediaType::DATA:
+ return false;
+ }
+ RTC_NOTREACHED();
+ return false;
+}
+
+bool AudioLevel::Parse(const uint8_t* data,
+ bool* voice_activity,
+ uint8_t* audio_level) {
+ *voice_activity = (data[0] & 0x80) != 0;
+ *audio_level = data[0] & 0x7F;
+ return true;
+}
+
+bool AudioLevel::Write(uint8_t* data,
+ bool voice_activity,
+ uint8_t audio_level) {
+ RTC_CHECK_LE(audio_level, 0x7f);
+ data[0] = (voice_activity ? 0x80 : 0x00) | audio_level;
+ return true;
+}
+
+// From RFC 5450: Transmission Time Offsets in RTP Streams.
+//
+// The transmission time is signaled to the receiver in-band using the
+// general mechanism for RTP header extensions [RFC5285]. The payload
+// of this extension (the transmitted value) is a 24-bit signed integer.
+// When added to the RTP timestamp of the packet, it represents the
+// "effective" RTP transmission time of the packet, on the RTP
+// timescale.
+//
+// The form of the transmission offset extension block:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=2 | transmission offset |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+const char* TransmissionOffset::kName = "urn:ietf:params:rtp-hdrext:toffset";
+bool TransmissionOffset::IsSupportedFor(MediaType type) {
+ switch (type) {
+ case MediaType::ANY:
+ case MediaType::VIDEO:
+ return true;
+ case MediaType::AUDIO:
+ case MediaType::DATA:
+ return false;
+ }
+ RTC_NOTREACHED();
+ return false;
+}
+
+bool TransmissionOffset::Parse(const uint8_t* data, int32_t* value) {
+ *value = ByteReader<int32_t, 3>::ReadBigEndian(data);
+ return true;
+}
+
+bool TransmissionOffset::Write(uint8_t* data, int64_t value) {
+ RTC_CHECK_LE(value, 0x00ffffff);
+ ByteWriter<int32_t, 3>::WriteBigEndian(data, value);
+ return true;
+}
+
+// 0 1 2
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | L=1 |transport wide sequence number |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+const char* TransportSequenceNumber::kName =
+ "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions";
+bool TransportSequenceNumber::IsSupportedFor(MediaType type) {
+ return true;
+}
+
+bool TransportSequenceNumber::Parse(const uint8_t* data, uint16_t* value) {
+ *value = ByteReader<uint16_t>::ReadBigEndian(data);
+ return true;
+}
+
+bool TransportSequenceNumber::Write(uint8_t* data, uint16_t value) {
+ ByteWriter<uint16_t>::WriteBigEndian(data, value);
+ return true;
+}
+
+// Coordination of Video Orientation in RTP streams.
+//
+// Coordination of Video Orientation consists in signaling of the current
+// orientation of the image captured on the sender side to the receiver for
+// appropriate rendering and displaying.
+//
+// 0 1
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | ID | len=0 |0 0 0 0 C F R R|
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+const char* VideoOrientation::kName = "urn:3gpp:video-orientation";
+bool VideoOrientation::IsSupportedFor(MediaType type) {
+ switch (type) {
+ case MediaType::ANY:
+ case MediaType::VIDEO:
+ return true;
+ case MediaType::AUDIO:
+ case MediaType::DATA:
+ return false;
+ }
+ RTC_NOTREACHED();
+ return false;
+}
+
+bool VideoOrientation::Parse(const uint8_t* data, VideoRotation* rotation) {
+ *rotation = ConvertCVOByteToVideoRotation(data[0] & 0x03);
+ return true;
+}
+
+bool VideoOrientation::Write(uint8_t* data, VideoRotation rotation) {
+ data[0] = ConvertVideoRotationToCVOByte(rotation);
+ return true;
+}
+
+bool VideoOrientation::Parse(const uint8_t* data, uint8_t* value) {
+ *value = data[0];
+ return true;
+}
+
+bool VideoOrientation::Write(uint8_t* data, uint8_t value) {
+ data[0] = value;
+ return true;
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
new file mode 100644
index 00000000000..cdbf806170d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/call.h"
+#include "webrtc/common_video/rotation.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class AbsoluteSendTime {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionAbsoluteSendTime;
+ static constexpr uint8_t kValueSizeBytes = 3;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data, uint32_t* time_ms);
+ static bool Write(uint8_t* data, int64_t time_ms);
+};
+
+class AudioLevel {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionAudioLevel;
+ static constexpr uint8_t kValueSizeBytes = 1;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data,
+ bool* voice_activity,
+ uint8_t* audio_level);
+ static bool Write(uint8_t* data, bool voice_activity, uint8_t audio_level);
+};
+
+class TransmissionOffset {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionTransmissionTimeOffset;
+ static constexpr uint8_t kValueSizeBytes = 3;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data, int32_t* time_ms);
+ static bool Write(uint8_t* data, int64_t time_ms);
+};
+
+class TransportSequenceNumber {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionTransportSequenceNumber;
+ static constexpr uint8_t kValueSizeBytes = 2;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data, uint16_t* value);
+ static bool Write(uint8_t* data, uint16_t value);
+};
+
+class VideoOrientation {
+ public:
+ static constexpr RTPExtensionType kId = kRtpExtensionVideoRotation;
+ static constexpr uint8_t kValueSizeBytes = 1;
+ static const char* kName;
+ static bool IsSupportedFor(MediaType type);
+ static bool Parse(const uint8_t* data, VideoRotation* value);
+ static bool Write(uint8_t* data, VideoRotation value);
+ static bool Parse(const uint8_t* data, uint8_t* value);
+ static bool Write(uint8_t* data, uint8_t value);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_HEADER_EXTENSIONS_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
index d4cbe544cc6..2cec8a3e0f6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
@@ -9,10 +9,9 @@
*/
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -30,7 +29,7 @@ class RtpHeaderParserImpl : public RtpHeaderParser {
bool DeregisterRtpHeaderExtension(RTPExtensionType type) override;
private:
- rtc::scoped_ptr<CriticalSectionWrapper> critical_section_;
+ rtc::CriticalSection critical_section_;
RtpHeaderExtensionMap rtp_header_extension_map_ GUARDED_BY(critical_section_);
};
@@ -38,8 +37,7 @@ RtpHeaderParser* RtpHeaderParser::Create() {
return new RtpHeaderParserImpl;
}
-RtpHeaderParserImpl::RtpHeaderParserImpl()
- : critical_section_(CriticalSectionWrapper::CreateCriticalSection()) {}
+RtpHeaderParserImpl::RtpHeaderParserImpl() {}
bool RtpHeaderParser::IsRtcp(const uint8_t* packet, size_t length) {
RtpUtility::RtpHeaderParser rtp_parser(packet, length);
@@ -54,7 +52,7 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
RtpHeaderExtensionMap map;
{
- CriticalSectionScoped cs(critical_section_.get());
+ rtc::CritScope cs(&critical_section_);
rtp_header_extension_map_.GetCopy(&map);
}
@@ -67,12 +65,12 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet,
bool RtpHeaderParserImpl::RegisterRtpHeaderExtension(RTPExtensionType type,
uint8_t id) {
- CriticalSectionScoped cs(critical_section_.get());
+ rtc::CritScope cs(&critical_section_);
return rtp_header_extension_map_.Register(type, id) == 0;
}
bool RtpHeaderParserImpl::DeregisterRtpHeaderExtension(RTPExtensionType type) {
- CriticalSectionScoped cs(critical_section_.get());
+ rtc::CritScope cs(&critical_section_);
return rtp_header_extension_map_.Deregister(type) == 0;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
new file mode 100644
index 00000000000..f6634867f68
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.cc
@@ -0,0 +1,509 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet.h"
+
+#include <cstring>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/random.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+namespace rtp {
+namespace {
+constexpr size_t kFixedHeaderSize = 12;
+constexpr uint8_t kRtpVersion = 2;
+constexpr uint16_t kOneByteExtensionId = 0xBEDE;
+constexpr size_t kOneByteHeaderSize = 1;
+constexpr size_t kDefaultPacketSize = 1500;
+} // namespace
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P|X| CC |M| PT | sequence number |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | timestamp |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | synchronization source (SSRC) identifier |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | Contributing source (CSRC) identifiers |
+// | .... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// |One-byte eXtensions id = 0xbede| length in 32bits |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | Extensions |
+// | .... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | Payload |
+// | .... : padding... |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | padding | Padding size |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+Packet::Packet(const ExtensionManager* extensions)
+ : extensions_(extensions), buffer_(kDefaultPacketSize) {
+ Clear();
+}
+
+Packet::Packet(const ExtensionManager* extensions, size_t capacity)
+ : extensions_(extensions), buffer_(capacity) {
+ RTC_DCHECK_GE(capacity, kFixedHeaderSize);
+ Clear();
+}
+
+Packet::~Packet() {}
+
+void Packet::IdentifyExtensions(const ExtensionManager* extensions) {
+ RTC_DCHECK(extensions);
+ extensions_ = extensions;
+ for (size_t i = 0; i < num_extensions_; ++i) {
+ uint8_t id = data()[extension_entries_[i].offset - 1] >> 4;
+ extension_entries_[i].type = extensions_->GetType(id);
+ }
+}
+
+bool Packet::Parse(const uint8_t* buffer, size_t buffer_size) {
+ if (!ParseBuffer(buffer, buffer_size)) {
+ Clear();
+ return false;
+ }
+ RTC_DCHECK_EQ(size(), buffer_size);
+ buffer_.SetData(buffer, buffer_size);
+ return true;
+}
+
+bool Packet::Parse(rtc::Buffer buffer) {
+ if (!ParseBuffer(buffer.data(), buffer.size())) {
+ Clear();
+ return false;
+ }
+ RTC_DCHECK_EQ(size(), buffer.size());
+ buffer_ = std::move(buffer);
+ return true;
+}
+
+bool Packet::Marker() const {
+ RTC_DCHECK_EQ(marker_, (data()[1] & 0x80) != 0);
+ return marker_;
+}
+
+uint8_t Packet::PayloadType() const {
+ RTC_DCHECK_EQ(payload_type_, data()[1] & 0x7f);
+ return payload_type_;
+}
+
+uint16_t Packet::SequenceNumber() const {
+ RTC_DCHECK_EQ(sequence_number_,
+ ByteReader<uint16_t>::ReadBigEndian(data() + 2));
+ return sequence_number_;
+}
+
+uint32_t Packet::Timestamp() const {
+ RTC_DCHECK_EQ(timestamp_, ByteReader<uint32_t>::ReadBigEndian(data() + 4));
+ return timestamp_;
+}
+
+uint32_t Packet::Ssrc() const {
+ RTC_DCHECK_EQ(ssrc_, ByteReader<uint32_t>::ReadBigEndian(data() + 8));
+ return ssrc_;
+}
+
+std::vector<uint32_t> Packet::Csrcs() const {
+ size_t num_csrc = data()[0] & 0x0F;
+ RTC_DCHECK_GE(capacity(), kFixedHeaderSize + num_csrc * 4);
+ std::vector<uint32_t> csrcs(num_csrc);
+ for (size_t i = 0; i < num_csrc; ++i) {
+ csrcs[i] =
+ ByteReader<uint32_t>::ReadBigEndian(&data()[kFixedHeaderSize + i * 4]);
+ }
+ return csrcs;
+}
+
+void Packet::GetHeader(RTPHeader* header) const {
+ header->markerBit = Marker();
+ header->payloadType = PayloadType();
+ header->sequenceNumber = SequenceNumber();
+ header->timestamp = Timestamp();
+ header->ssrc = Ssrc();
+ std::vector<uint32_t> csrcs = Csrcs();
+ header->numCSRCs = csrcs.size();
+ for (size_t i = 0; i < csrcs.size(); ++i) {
+ header->arrOfCSRCs[i] = csrcs[i];
+ }
+ header->paddingLength = padding_size();
+ header->headerLength = headers_size();
+ header->payload_type_frequency = 0;
+ header->extension.hasTransmissionTimeOffset =
+ GetExtension<TransmissionOffset>(
+ &header->extension.transmissionTimeOffset);
+ header->extension.hasAbsoluteSendTime =
+ GetExtension<AbsoluteSendTime>(&header->extension.absoluteSendTime);
+ header->extension.hasTransportSequenceNumber =
+ GetExtension<TransportSequenceNumber>(
+ &header->extension.transportSequenceNumber);
+ header->extension.hasAudioLevel = GetExtension<AudioLevel>(
+ &header->extension.voiceActivity, &header->extension.audioLevel);
+ header->extension.hasVideoRotation =
+ GetExtension<VideoOrientation>(&header->extension.videoRotation);
+}
+
+size_t Packet::headers_size() const {
+ return payload_offset_;
+}
+
+size_t Packet::payload_size() const {
+ return payload_size_;
+}
+
+size_t Packet::padding_size() const {
+ return padding_size_;
+}
+
+const uint8_t* Packet::payload() const {
+ return data() + payload_offset_;
+}
+
+size_t Packet::capacity() const {
+ return buffer_.size();
+}
+
+size_t Packet::size() const {
+ return payload_offset_ + payload_size_ + padding_size_;
+}
+
+const uint8_t* Packet::data() const {
+ return buffer_.data();
+}
+
+size_t Packet::FreeCapacity() const {
+ return capacity() - size();
+}
+
+size_t Packet::MaxPayloadSize() const {
+ return capacity() - payload_offset_;
+}
+
+void Packet::CopyHeader(const Packet& packet) {
+ RTC_DCHECK_GE(capacity(), packet.headers_size());
+
+ marker_ = packet.marker_;
+ payload_type_ = packet.payload_type_;
+ sequence_number_ = packet.sequence_number_;
+ timestamp_ = packet.timestamp_;
+ ssrc_ = packet.ssrc_;
+ payload_offset_ = packet.payload_offset_;
+ num_extensions_ = packet.num_extensions_;
+ for (size_t i = 0; i < num_extensions_; ++i) {
+ extension_entries_[i] = packet.extension_entries_[i];
+ }
+ extensions_size_ = packet.extensions_size_;
+ buffer_.SetData(packet.data(), packet.headers_size());
+ // Reset payload and padding.
+ payload_size_ = 0;
+ padding_size_ = 0;
+}
+
+void Packet::SetMarker(bool marker_bit) {
+ marker_ = marker_bit;
+ if (marker_) {
+ WriteAt(1, data()[1] | 0x80);
+ } else {
+ WriteAt(1, data()[1] & 0x7F);
+ }
+}
+
+void Packet::SetPayloadType(uint8_t payload_type) {
+ RTC_DCHECK_LE(payload_type, 0x7Fu);
+ payload_type_ = payload_type;
+ WriteAt(1, (data()[1] & 0x80) | payload_type);
+}
+
+void Packet::SetSequenceNumber(uint16_t seq_no) {
+ sequence_number_ = seq_no;
+ ByteWriter<uint16_t>::WriteBigEndian(WriteAt(2), seq_no);
+}
+
+void Packet::SetTimestamp(uint32_t timestamp) {
+ timestamp_ = timestamp;
+ ByteWriter<uint32_t>::WriteBigEndian(WriteAt(4), timestamp);
+}
+
+void Packet::SetSsrc(uint32_t ssrc) {
+ ssrc_ = ssrc;
+ ByteWriter<uint32_t>::WriteBigEndian(WriteAt(8), ssrc);
+}
+
+void Packet::SetCsrcs(const std::vector<uint32_t>& csrcs) {
+ RTC_DCHECK_EQ(num_extensions_, 0u);
+ RTC_DCHECK_EQ(payload_size_, 0u);
+ RTC_DCHECK_EQ(padding_size_, 0u);
+ RTC_DCHECK_LE(csrcs.size(), 0x0fu);
+ RTC_DCHECK_LE(kFixedHeaderSize + 4 * csrcs.size(), capacity());
+ payload_offset_ = kFixedHeaderSize + 4 * csrcs.size();
+ WriteAt(0, (data()[0] & 0xF0) | csrcs.size());
+ size_t offset = kFixedHeaderSize;
+ for (uint32_t csrc : csrcs) {
+ ByteWriter<uint32_t>::WriteBigEndian(WriteAt(offset), csrc);
+ offset += 4;
+ }
+}
+
+uint8_t* Packet::AllocatePayload(size_t size_bytes) {
+ RTC_DCHECK_EQ(padding_size_, 0u);
+ if (payload_offset_ + size_bytes > capacity()) {
+ LOG(LS_WARNING) << "Cannot set payload, not enough space in buffer.";
+ return nullptr;
+ }
+ payload_size_ = size_bytes;
+ return WriteAt(payload_offset_);
+}
+
+void Packet::SetPayloadSize(size_t size_bytes) {
+ RTC_DCHECK_EQ(padding_size_, 0u);
+ RTC_DCHECK_LE(size_bytes, payload_size_);
+ payload_size_ = size_bytes;
+}
+
+bool Packet::SetPadding(uint8_t size_bytes, Random* random) {
+ RTC_DCHECK(random);
+ if (payload_offset_ + payload_size_ + size_bytes > capacity()) {
+ LOG(LS_WARNING) << "Cannot set padding size " << size_bytes << ", only "
+ << (capacity() - payload_offset_ - payload_size_)
+ << " bytes left in buffer.";
+ return false;
+ }
+ padding_size_ = size_bytes;
+ if (padding_size_ > 0) {
+ size_t padding_offset = payload_offset_ + payload_size_;
+ size_t padding_end = padding_offset + padding_size_;
+ for (size_t offset = padding_offset; offset < padding_end - 1; ++offset) {
+ WriteAt(offset, random->Rand<uint8_t>());
+ }
+ WriteAt(padding_end - 1, padding_size_);
+ WriteAt(0, data()[0] | 0x20); // Set padding bit.
+ } else {
+ WriteAt(0, data()[0] & ~0x20); // Clear padding bit.
+ }
+ return true;
+}
+
+void Packet::Clear() {
+ marker_ = false;
+ payload_type_ = 0;
+ sequence_number_ = 0;
+ timestamp_ = 0;
+ ssrc_ = 0;
+ payload_offset_ = kFixedHeaderSize;
+ payload_size_ = 0;
+ padding_size_ = 0;
+ num_extensions_ = 0;
+ extensions_size_ = 0;
+
+ memset(WriteAt(0), 0, kFixedHeaderSize);
+ WriteAt(0, kRtpVersion << 6);
+}
+
+bool Packet::ParseBuffer(const uint8_t* buffer, size_t size) {
+ if (size < kFixedHeaderSize) {
+ return false;
+ }
+ const uint8_t version = buffer[0] >> 6;
+ if (version != kRtpVersion) {
+ return false;
+ }
+ const bool has_padding = (buffer[0] & 0x20) != 0;
+ const bool has_extension = (buffer[0] & 0x10) != 0;
+ const uint8_t number_of_crcs = buffer[0] & 0x0f;
+ marker_ = (buffer[1] & 0x80) != 0;
+ payload_type_ = buffer[1] & 0x7f;
+
+ sequence_number_ = ByteReader<uint16_t>::ReadBigEndian(&buffer[2]);
+ timestamp_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[4]);
+ ssrc_ = ByteReader<uint32_t>::ReadBigEndian(&buffer[8]);
+ if (size < kFixedHeaderSize + number_of_crcs * 4) {
+ return false;
+ }
+ payload_offset_ = kFixedHeaderSize + number_of_crcs * 4;
+
+ if (has_padding) {
+ padding_size_ = buffer[size - 1];
+ if (padding_size_ == 0) {
+ LOG(LS_WARNING) << "Padding was set, but padding size is zero";
+ return false;
+ }
+ } else {
+ padding_size_ = 0;
+ }
+
+ num_extensions_ = 0;
+ extensions_size_ = 0;
+ if (has_extension) {
+ /* RTP header extension, RFC 3550.
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | defined by profile | length |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | header extension |
+ | .... |
+ */
+ size_t extension_offset = payload_offset_ + 4;
+ if (extension_offset > size) {
+ return false;
+ }
+ uint16_t profile =
+ ByteReader<uint16_t>::ReadBigEndian(&buffer[payload_offset_]);
+ size_t extensions_capacity =
+ ByteReader<uint16_t>::ReadBigEndian(&buffer[payload_offset_ + 2]);
+ extensions_capacity *= 4;
+ if (extension_offset + extensions_capacity > size) {
+ return false;
+ }
+ if (profile != kOneByteExtensionId) {
+ LOG(LS_WARNING) << "Unsupported rtp extension " << profile;
+ } else {
+ constexpr uint8_t kPaddingId = 0;
+ constexpr uint8_t kReservedId = 15;
+ while (extensions_size_ + kOneByteHeaderSize < extensions_capacity) {
+ uint8_t id = buffer[extension_offset + extensions_size_] >> 4;
+ if (id == kReservedId) {
+ break;
+ } else if (id == kPaddingId) {
+ extensions_size_++;
+ continue;
+ }
+ uint8_t length =
+ 1 + (buffer[extension_offset + extensions_size_] & 0xf);
+ extensions_size_ += kOneByteHeaderSize;
+ if (num_extensions_ >= kMaxExtensionHeaders) {
+ LOG(LS_WARNING) << "Too many extensions.";
+ return false;
+ }
+ extension_entries_[num_extensions_].type =
+ extensions_ ? extensions_->GetType(id)
+ : ExtensionManager::kInvalidType;
+ extension_entries_[num_extensions_].length = length;
+ extension_entries_[num_extensions_].offset =
+ extension_offset + extensions_size_;
+ num_extensions_++;
+ extensions_size_ += length;
+ }
+ }
+ payload_offset_ = extension_offset + extensions_capacity;
+ }
+
+ if (payload_offset_ + padding_size_ > size) {
+ return false;
+ }
+ payload_size_ = size - payload_offset_ - padding_size_;
+ return true;
+}
+
+bool Packet::FindExtension(ExtensionType type,
+ uint8_t length,
+ uint16_t* offset) const {
+ RTC_DCHECK(offset);
+ for (size_t i = 0; i < num_extensions_; ++i) {
+ if (extension_entries_[i].type == type) {
+ RTC_CHECK_EQ(length, extension_entries_[i].length)
+ << "Length mismatch for extension '" << type << "'"
+ << "should be " << length << ", received "
+ << extension_entries_[i].length;
+ *offset = extension_entries_[i].offset;
+ return true;
+ }
+ }
+ return false;
+}
+
+bool Packet::AllocateExtension(ExtensionType type,
+ uint8_t length,
+ uint16_t* offset) {
+ if (!extensions_) {
+ return false;
+ }
+ if (FindExtension(type, length, offset)) {
+ return true;
+ }
+
+ // Can't add new extension after payload/padding was set.
+ if (payload_size_ > 0) {
+ return false;
+ }
+ if (padding_size_ > 0) {
+ return false;
+ }
+
+ uint8_t extension_id = extensions_->GetId(type);
+ if (extension_id == ExtensionManager::kInvalidId) {
+ return false;
+ }
+ RTC_DCHECK_GT(length, 0u);
+ RTC_DCHECK_LE(length, 16u);
+
+ size_t num_csrc = data()[0] & 0x0F;
+ size_t extensions_offset = kFixedHeaderSize + (num_csrc * 4) + 4;
+ if (extensions_offset + extensions_size_ + kOneByteHeaderSize + length >
+ capacity()) {
+ LOG(LS_WARNING) << "Extension cannot be registered: "
+ "Not enough space left in buffer.";
+ return false;
+ }
+
+ uint16_t new_extensions_size =
+ extensions_size_ + kOneByteHeaderSize + length;
+ uint16_t extensions_words =
+ (new_extensions_size + 3) / 4; // Wrap up to 32bit.
+
+ // All checks passed, write down the extension.
+ if (num_extensions_ == 0) {
+ RTC_DCHECK_EQ(payload_offset_, kFixedHeaderSize + (num_csrc * 4));
+ RTC_DCHECK_EQ(extensions_size_, 0);
+ WriteAt(0, data()[0] | 0x10); // Set extension bit.
+ // Profile specific ID always set to OneByteExtensionHeader.
+ ByteWriter<uint16_t>::WriteBigEndian(WriteAt(extensions_offset - 4),
+ kOneByteExtensionId);
+ }
+
+ WriteAt(extensions_offset + extensions_size_,
+ (extension_id << 4) | (length - 1));
+ RTC_DCHECK(num_extensions_ < kMaxExtensionHeaders);
+ extension_entries_[num_extensions_].type = type;
+ extension_entries_[num_extensions_].length = length;
+ *offset = extensions_offset + kOneByteHeaderSize + extensions_size_;
+ extension_entries_[num_extensions_].offset = *offset;
+ ++num_extensions_;
+ extensions_size_ = new_extensions_size;
+
+ // Update header length field.
+ ByteWriter<uint16_t>::WriteBigEndian(WriteAt(extensions_offset - 2),
+ extensions_words);
+ // Fill extension padding place with zeroes.
+ size_t extension_padding_size = 4 * extensions_words - extensions_size_;
+ memset(WriteAt(extensions_offset + extensions_size_), 0,
+ extension_padding_size);
+ payload_offset_ = extensions_offset + 4 * extensions_words;
+ return true;
+}
+
+uint8_t* Packet::WriteAt(size_t offset) {
+ return buffer_.data() + offset;
+}
+
+void Packet::WriteAt(size_t offset, uint8_t byte) {
+ buffer_.data()[offset] = byte;
+}
+
+} // namespace rtp
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.h
new file mode 100644
index 00000000000..b2687ca9bab
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet.h
@@ -0,0 +1,187 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H_
+
+#include <vector>
+
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/buffer.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+
+namespace webrtc {
+struct RTPHeader;
+class RtpHeaderExtensionMap;
+class Random;
+
+namespace rtp {
+class Packet {
+ public:
+ using ExtensionType = RTPExtensionType;
+ using ExtensionManager = RtpHeaderExtensionMap;
+ static constexpr size_t kMaxExtensionHeaders = 14;
+
+ // Parse and copy given buffer into Packet.
+ bool Parse(const uint8_t* buffer, size_t size);
+
+ // Parse and move given buffer into Packet.
+ bool Parse(rtc::Buffer packet);
+
+ // Maps parsed extensions to their types to allow use of GetExtension.
+ // Used after parsing when |extensions| can't be provided until base rtp
+ // header is parsed.
+ void IdentifyExtensions(const ExtensionManager* extensions);
+
+ // Header.
+ bool Marker() const;
+ uint8_t PayloadType() const;
+ uint16_t SequenceNumber() const;
+ uint32_t Timestamp() const;
+ uint32_t Ssrc() const;
+ std::vector<uint32_t> Csrcs() const;
+
+ // TODO(danilchap): Remove this function when all code update to use RtpPacket
+ // directly. Function is there just for easier backward compatibilty.
+ void GetHeader(RTPHeader* header) const;
+
+ size_t headers_size() const;
+
+ // Payload.
+ size_t payload_size() const;
+ size_t padding_size() const;
+ const uint8_t* payload() const;
+
+ // Buffer.
+ size_t capacity() const;
+ size_t size() const;
+ const uint8_t* data() const;
+ size_t FreeCapacity() const;
+ size_t MaxPayloadSize() const;
+
+ // Reset fields and buffer.
+ void Clear();
+
+ // Header setters.
+ void CopyHeader(const Packet& packet);
+ void SetMarker(bool marker_bit);
+ void SetPayloadType(uint8_t payload_type);
+ void SetSequenceNumber(uint16_t seq_no);
+ void SetTimestamp(uint32_t timestamp);
+ void SetSsrc(uint32_t ssrc);
+
+ // Writes csrc list. Assumes:
+ // a) There is enough room left in buffer.
+ // b) Extension headers, payload or padding data has not already been added.
+ void SetCsrcs(const std::vector<uint32_t>& csrcs);
+
+ // Header extensions.
+ template <typename Extension, typename... Values>
+ bool GetExtension(Values...) const;
+
+ template <typename Extension, typename... Values>
+ bool SetExtension(Values...);
+
+ template <typename Extension>
+ bool ReserveExtension();
+
+ // Reserve size_bytes for payload. Returns nullptr on failure.
+ uint8_t* AllocatePayload(size_t size_bytes);
+ void SetPayloadSize(size_t size_bytes);
+ bool SetPadding(uint8_t size_bytes, Random* random);
+
+ protected:
+ // |extensions| required for SetExtension/ReserveExtension functions during
+ // packet creating and used if available in Parse function.
+ // Adding and getting extensions will fail until |extensions| is
+ // provided via constructor or IdentifyExtensions function.
+ explicit Packet(const ExtensionManager* extensions);
+ Packet(const ExtensionManager* extensions, size_t capacity);
+ virtual ~Packet();
+
+ private:
+ struct ExtensionInfo {
+ ExtensionType type;
+ uint16_t offset;
+ uint8_t length;
+ };
+
+ // Helper function for Parse. Fill header fields using data in given buffer,
+ // but does not touch packet own buffer, leaving packet in invalid state.
+ bool ParseBuffer(const uint8_t* buffer, size_t size);
+
+ // Find an extension based on the type field of the parameter.
+ // If found, length field would be validated, the offset field will be set
+ // and true returned,
+ // otherwise the parameter will be unchanged and false is returned.
+ bool FindExtension(ExtensionType type,
+ uint8_t length,
+ uint16_t* offset) const;
+
+ // Find or allocate an extension, based on the type field of the parameter.
+ // If found, the length field be checked against what is already registered
+ // and the offset field will be set, then true is returned. If allocated, the
+ // length field will be used for allocation and the offset update to indicate
+ // position, the true is returned.
+ // If not found and allocations fails, false is returned and parameter remains
+ // unchanged.
+ bool AllocateExtension(ExtensionType type, uint8_t length, uint16_t* offset);
+
+ uint8_t* WriteAt(size_t offset);
+ void WriteAt(size_t offset, uint8_t byte);
+
+ const ExtensionManager* extensions_;
+
+ // Header.
+ bool marker_;
+ uint8_t payload_type_;
+ uint8_t padding_size_;
+ uint16_t sequence_number_;
+ uint32_t timestamp_;
+ uint32_t ssrc_;
+ size_t payload_offset_; // Match header size with csrcs and extensions.
+ size_t payload_size_;
+
+ uint8_t num_extensions_ = 0;
+ ExtensionInfo extension_entries_[kMaxExtensionHeaders];
+ uint16_t extensions_size_ = 0; // Unaligned.
+ rtc::Buffer buffer_;
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(Packet);
+};
+
+template <typename Extension, typename... Values>
+bool Packet::GetExtension(Values... values) const {
+ uint16_t offset = 0;
+ if (!FindExtension(Extension::kId, Extension::kValueSizeBytes, &offset))
+ return false;
+ return Extension::Parse(data() + offset, values...);
+}
+
+template <typename Extension, typename... Values>
+bool Packet::SetExtension(Values... values) {
+ uint16_t offset = 0;
+ if (!AllocateExtension(Extension::kId, Extension::kValueSizeBytes, &offset))
+ return false;
+ return Extension::Write(WriteAt(offset), values...);
+}
+
+template <typename Extension>
+bool Packet::ReserveExtension() {
+ uint16_t offset = 0;
+ if (!AllocateExtension(Extension::kId, Extension::kValueSizeBytes, &offset))
+ return false;
+ memset(WriteAt(offset), 0, Extension::kValueSizeBytes);
+ return true;
+}
+} // namespace rtp
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
index 49f9d8530a9..713fba87707 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
@@ -21,7 +21,6 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -29,7 +28,6 @@ static const int kMinPacketRequestBytes = 50;
RTPPacketHistory::RTPPacketHistory(Clock* clock)
: clock_(clock),
- critsect_(CriticalSectionWrapper::CreateCriticalSection()),
store_(false),
prev_index_(0) {}
@@ -38,7 +36,7 @@ RTPPacketHistory::~RTPPacketHistory() {
void RTPPacketHistory::SetStorePacketsStatus(bool enable,
uint16_t number_to_store) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (enable) {
if (store_) {
LOG(LS_WARNING) << "Purging packet history in order to re-set status.";
@@ -70,7 +68,7 @@ void RTPPacketHistory::Free() {
}
bool RTPPacketHistory::StorePackets() const {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
return store_;
}
@@ -78,7 +76,7 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
size_t packet_length,
int64_t capture_time_ms,
StorageType type) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (!store_) {
return 0;
}
@@ -131,7 +129,7 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
}
bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (!store_) {
return false;
}
@@ -150,7 +148,7 @@ bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
}
bool RTPPacketHistory::SetSent(uint16_t sequence_number) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (!store_) {
return false;
}
@@ -176,7 +174,7 @@ bool RTPPacketHistory::GetPacketAndSetSendTime(uint16_t sequence_number,
uint8_t* packet,
size_t* packet_length,
int64_t* stored_time_ms) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
RTC_CHECK_GE(*packet_length, static_cast<size_t>(IP_PACKET_SIZE));
if (!store_)
return false;
@@ -232,7 +230,7 @@ void RTPPacketHistory::GetPacket(int index,
bool RTPPacketHistory::GetBestFittingPacket(uint8_t* packet,
size_t* packet_length,
int64_t* stored_time_ms) {
- CriticalSectionScoped cs(critsect_.get());
+ rtc::CritScope cs(&critsect_);
if (!store_)
return false;
int index = FindBestFittingPacket(*packet_length);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
index 8e1a732b199..b4d48aa2ced 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
@@ -15,6 +15,7 @@
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -23,7 +24,6 @@
namespace webrtc {
class Clock;
-class CriticalSectionWrapper;
static const size_t kMaxHistoryCapacity = 9600;
@@ -71,19 +71,19 @@ class RTPPacketHistory {
uint8_t* packet,
size_t* packet_length,
int64_t* stored_time_ms) const
- EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
- void Allocate(size_t number_to_store) EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
- void Free() EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
+ void Allocate(size_t number_to_store) EXCLUSIVE_LOCKS_REQUIRED(critsect_);
+ void Free() EXCLUSIVE_LOCKS_REQUIRED(critsect_);
void VerifyAndAllocatePacketLength(size_t packet_length, uint32_t start_index)
- EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
bool FindSeqNum(uint16_t sequence_number, int32_t* index) const
- EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
int FindBestFittingPacket(size_t size) const
- EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_);
private:
Clock* clock_;
- rtc::scoped_ptr<CriticalSectionWrapper> critsect_;
+ rtc::CriticalSection critsect_;
bool store_ GUARDED_BY(critsect_);
uint32_t prev_index_ GUARDED_BY(critsect_);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
index a406d8bc9b4..7580a809235 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
@@ -6,8 +6,6 @@
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
- *
- * This file includes unit tests for the RTPPacketHistory.
*/
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h
new file mode 100644
index 00000000000..e2222b9200a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_RECEIVED_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_RECEIVED_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet.h"
+#include "webrtc/system_wrappers/include/ntp_time.h"
+
+namespace webrtc {
+// Class to hold rtp packet with metadata for receiver side.
+class RtpPacketReceived : public rtp::Packet {
+ public:
+ RtpPacketReceived() : Packet(nullptr) {}
+ explicit RtpPacketReceived(const ExtensionManager* extensions)
+ : Packet(extensions) {}
+
+ void GetHeader(RTPHeader* header) const {
+ Packet::GetHeader(header);
+ header->payload_type_frequency = payload_type_frequency();
+ }
+
+ // Time in local time base as close as it can to packet arrived on the
+ // network.
+ int64_t arrival_time_ms() const { return arrival_time_ms_; }
+ void set_arrival_time_ms(int64_t time) { arrival_time_ms_ = time; }
+
+ // Estimated from Timestamp() using rtcp Sender Reports.
+ NtpTime capture_ntp_time() const { return capture_time_; }
+ void set_capture_ntp_time(NtpTime time) { capture_time_ = time; }
+
+ // Flag if packet arrived via rtx.
+ bool retransmit() const { return retransmit_; }
+ void set_retransmit(bool value) { retransmit_ = value; }
+
+ int payload_type_frequency() const { return payload_type_frequency_; }
+ void set_payload_type_frequency(int value) {
+ payload_type_frequency_ = value;
+ }
+
+ private:
+ NtpTime capture_time_;
+ int64_t arrival_time_ms_ = 0;
+ int payload_type_frequency_ = 0;
+ bool retransmit_ = false;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_RECEIVED_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
new file mode 100644
index 00000000000..ad749ffb61e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_TO_SEND_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_TO_SEND_H_
+
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet.h"
+
+namespace webrtc {
+// Class to hold rtp packet with metadata for sender side.
+class RtpPacketToSend : public rtp::Packet {
+ public:
+ explicit RtpPacketToSend(const ExtensionManager* extensions)
+ : Packet(extensions) {}
+ RtpPacketToSend(const ExtensionManager* extensions, size_t capacity)
+ : Packet(extensions, capacity) {}
+
+ // Time in local time base as close as it can to frame capture time.
+ int64_t capture_time_ms() const { return capture_time_ms_; }
+ void set_capture_time_ms(int64_t time) { capture_time_ms_ = time; }
+
+ private:
+ int64_t capture_time_ms_ = 0;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_PACKET_TO_SEND_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc
new file mode 100644
index 00000000000..b992d2da909
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_unittest.cc
@@ -0,0 +1,252 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet_received.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/random.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h"
+#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
+
+using testing::ElementsAreArray;
+using testing::make_tuple;
+
+namespace webrtc {
+namespace {
+constexpr int8_t kPayloadType = 100;
+constexpr uint32_t kSsrc = 0x12345678;
+constexpr uint16_t kSeqNum = 88;
+constexpr uint32_t kTimestamp = 0x65431278;
+constexpr uint8_t kTransmissionOffsetExtensionId = 1;
+constexpr uint8_t kAudioLevelExtensionId = 9;
+constexpr int32_t kTimeOffset = 0x56ce;
+constexpr bool kVoiceActive = true;
+constexpr uint8_t kAudioLevel = 0x5a;
+constexpr size_t kMaxPaddingSize = 224u;
+constexpr uint8_t kMinimumPacket[] = {
+ 0x80, kPayloadType, 0x00, kSeqNum,
+ 0x65, 0x43, 0x12, 0x78,
+ 0x12, 0x34, 0x56, 0x78};
+constexpr uint8_t kPacketWithTO[] = {
+ 0x90, kPayloadType, 0x00, kSeqNum,
+ 0x65, 0x43, 0x12, 0x78,
+ 0x12, 0x34, 0x56, 0x78,
+ 0xbe, 0xde, 0x00, 0x01,
+ 0x12, 0x00, 0x56, 0xce};
+
+constexpr uint8_t kPacketWithTOAndAL[] = {
+ 0x90, kPayloadType, 0x00, kSeqNum,
+ 0x65, 0x43, 0x12, 0x78,
+ 0x12, 0x34, 0x56, 0x78,
+ 0xbe, 0xde, 0x00, 0x02,
+ 0x12, 0x00, 0x56, 0xce,
+ 0x90, 0x80|kAudioLevel, 0x00, 0x00};
+
+constexpr uint32_t kCsrcs[] = {0x34567890, 0x32435465};
+constexpr uint8_t kPayload[] = {'p', 'a', 'y', 'l', 'o', 'a', 'd'};
+constexpr uint8_t kPacketPaddingSize = 8;
+constexpr uint8_t kPacket[] = {
+ 0xb2, kPayloadType, 0x00, kSeqNum,
+ 0x65, 0x43, 0x12, 0x78,
+ 0x12, 0x34, 0x56, 0x78,
+ 0x34, 0x56, 0x78, 0x90,
+ 0x32, 0x43, 0x54, 0x65,
+ 0xbe, 0xde, 0x00, 0x01,
+ 0x12, 0x00, 0x56, 0xce,
+ 'p', 'a', 'y', 'l', 'o', 'a', 'd',
+ 'p', 'a', 'd', 'd', 'i', 'n', 'g', kPacketPaddingSize};
+
+} // namespace
+
+TEST(RtpPacketTest, CreateMinimum) {
+ RtpPacketToSend packet(nullptr);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ EXPECT_THAT(kMinimumPacket, ElementsAreArray(packet.data(), packet.size()));
+}
+
+TEST(RtpPacketTest, CreateWithExtension) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ RtpPacketToSend packet(&extensions);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ packet.SetExtension<TransmissionOffset>(kTimeOffset);
+ EXPECT_THAT(kPacketWithTO, ElementsAreArray(packet.data(), packet.size()));
+}
+
+TEST(RtpPacketTest, CreateWith2Extensions) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ extensions.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
+ RtpPacketToSend packet(&extensions);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ packet.SetExtension<TransmissionOffset>(kTimeOffset);
+ packet.SetExtension<AudioLevel>(kVoiceActive, kAudioLevel);
+ EXPECT_THAT(kPacketWithTOAndAL,
+ ElementsAreArray(packet.data(), packet.size()));
+}
+
+TEST(RtpPacketTest, SetReservedExtensionsAfterPayload) {
+ const size_t kPayloadSize = 4;
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ extensions.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
+ RtpPacketToSend packet(&extensions);
+
+ EXPECT_TRUE(packet.ReserveExtension<TransmissionOffset>());
+ packet.AllocatePayload(kPayloadSize);
+ // Can't set extension after payload.
+ EXPECT_FALSE(packet.SetExtension<AudioLevel>(kVoiceActive, kAudioLevel));
+ // Unless reserved.
+ EXPECT_TRUE(packet.SetExtension<TransmissionOffset>(kTimeOffset));
+}
+
+TEST(RtpPacketTest, CreatePurePadding) {
+ const size_t kPaddingSize = kMaxPaddingSize - 1;
+ RtpPacketToSend packet(nullptr, 12 + kPaddingSize);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ Random random(0x123456789);
+
+ EXPECT_LT(packet.size(), packet.capacity());
+ EXPECT_FALSE(packet.SetPadding(kPaddingSize + 1, &random));
+ EXPECT_TRUE(packet.SetPadding(kPaddingSize, &random));
+ EXPECT_EQ(packet.size(), packet.capacity());
+}
+
+TEST(RtpPacketTest, CreateUnalignedPadding) {
+ const size_t kPayloadSize = 3; // Make padding start at unaligned address.
+ RtpPacketToSend packet(nullptr, 12 + kPayloadSize + kMaxPaddingSize);
+ packet.SetPayloadType(kPayloadType);
+ packet.SetSequenceNumber(kSeqNum);
+ packet.SetTimestamp(kTimestamp);
+ packet.SetSsrc(kSsrc);
+ packet.AllocatePayload(kPayloadSize);
+ Random r(0x123456789);
+
+ EXPECT_LT(packet.size(), packet.capacity());
+ EXPECT_TRUE(packet.SetPadding(kMaxPaddingSize, &r));
+ EXPECT_EQ(packet.size(), packet.capacity());
+}
+
+TEST(RtpPacketTest, ParseMinimum) {
+ RtpPacketReceived packet;
+ EXPECT_TRUE(packet.Parse(kMinimumPacket, sizeof(kMinimumPacket)));
+ EXPECT_EQ(kPayloadType, packet.PayloadType());
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+ EXPECT_EQ(0u, packet.padding_size());
+ EXPECT_EQ(0u, packet.payload_size());
+}
+
+TEST(RtpPacketTest, ParseBuffer) {
+ rtc::Buffer unparsed(kMinimumPacket);
+ const uint8_t* raw = unparsed.data();
+
+ RtpPacketReceived packet;
+ EXPECT_TRUE(packet.Parse(std::move(unparsed)));
+ EXPECT_EQ(raw, packet.data()); // Expect packet took over the buffer.
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+ EXPECT_EQ(0u, packet.padding_size());
+ EXPECT_EQ(0u, packet.payload_size());
+}
+
+TEST(RtpPacketTest, ParseWithExtension) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+
+ RtpPacketReceived packet(&extensions);
+ EXPECT_TRUE(packet.Parse(kPacketWithTO, sizeof(kPacketWithTO)));
+ EXPECT_EQ(kPayloadType, packet.PayloadType());
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+ int32_t time_offset;
+ EXPECT_TRUE(packet.GetExtension<TransmissionOffset>(&time_offset));
+ EXPECT_EQ(kTimeOffset, time_offset);
+ EXPECT_EQ(0u, packet.payload_size());
+ EXPECT_EQ(0u, packet.padding_size());
+}
+
+TEST(RtpPacketTest, ParseWith2Extensions) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ extensions.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
+ RtpPacketReceived packet(&extensions);
+ EXPECT_TRUE(packet.Parse(kPacketWithTOAndAL, sizeof(kPacketWithTOAndAL)));
+ int32_t time_offset;
+ EXPECT_TRUE(packet.GetExtension<TransmissionOffset>(&time_offset));
+ EXPECT_EQ(kTimeOffset, time_offset);
+ bool voice_active;
+ uint8_t audio_level;
+ EXPECT_TRUE(packet.GetExtension<AudioLevel>(&voice_active, &audio_level));
+ EXPECT_EQ(kVoiceActive, voice_active);
+ EXPECT_EQ(kAudioLevel, audio_level);
+}
+
+TEST(RtpPacketTest, ParseWithAllFeatures) {
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+ RtpPacketReceived packet(&extensions);
+ EXPECT_TRUE(packet.Parse(kPacket, sizeof(kPacket)));
+ EXPECT_EQ(kPayloadType, packet.PayloadType());
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+ EXPECT_THAT(packet.Csrcs(), ElementsAreArray(kCsrcs));
+ EXPECT_THAT(make_tuple(packet.payload(), packet.payload_size()),
+ ElementsAreArray(kPayload));
+ EXPECT_EQ(kPacketPaddingSize, packet.padding_size());
+ int32_t time_offset;
+ EXPECT_TRUE(packet.GetExtension<TransmissionOffset>(&time_offset));
+}
+
+TEST(RtpPacketTest, ParseWithExtensionDelayed) {
+ RtpPacketReceived packet;
+ EXPECT_TRUE(packet.Parse(kPacketWithTO, sizeof(kPacketWithTO)));
+ EXPECT_EQ(kPayloadType, packet.PayloadType());
+ EXPECT_EQ(kSeqNum, packet.SequenceNumber());
+ EXPECT_EQ(kTimestamp, packet.Timestamp());
+ EXPECT_EQ(kSsrc, packet.Ssrc());
+
+ RtpPacketToSend::ExtensionManager extensions;
+ extensions.Register(kRtpExtensionTransmissionTimeOffset,
+ kTransmissionOffsetExtensionId);
+
+ int32_t time_offset;
+ EXPECT_FALSE(packet.GetExtension<TransmissionOffset>(&time_offset));
+ packet.IdentifyExtensions(&extensions);
+ EXPECT_TRUE(packet.GetExtension<TransmissionOffset>(&time_offset));
+ EXPECT_EQ(kTimeOffset, time_offset);
+ EXPECT_EQ(0u, packet.payload_size());
+ EXPECT_EQ(0u, packet.padding_size());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
index f3793d0901e..283c2846e1b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
@@ -16,8 +16,7 @@
namespace webrtc {
RTPPayloadRegistry::RTPPayloadRegistry(RTPPayloadStrategy* rtp_payload_strategy)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- rtp_payload_strategy_(rtp_payload_strategy),
+ : rtp_payload_strategy_(rtp_payload_strategy),
red_payload_type_(-1),
ulpfec_payload_type_(-1),
incoming_payload_type_(-1),
@@ -67,7 +66,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
size_t payload_name_length = strlen(payload_name);
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::iterator it =
payload_type_map_.find(payload_type);
@@ -122,7 +121,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
int32_t RTPPayloadRegistry::DeRegisterReceivePayload(
const int8_t payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::iterator it =
payload_type_map_.find(payload_type);
assert(it != payload_type_map_.end());
@@ -176,7 +175,7 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
assert(payload_type);
size_t payload_name_length = strlen(payload_name);
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::const_iterator it = payload_type_map_.begin();
@@ -218,12 +217,12 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
}
bool RTPPayloadRegistry::RtxEnabled() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return rtx_;
}
bool RTPPayloadRegistry::IsRtx(const RTPHeader& header) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return IsRtxInternal(header);
}
@@ -231,15 +230,6 @@ bool RTPPayloadRegistry::IsRtxInternal(const RTPHeader& header) const {
return rtx_ && ssrc_rtx_ == header.ssrc;
}
-bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t** restored_packet,
- const uint8_t* packet,
- size_t* packet_length,
- uint32_t original_ssrc,
- const RTPHeader& header) const {
- return RestoreOriginalPacket(*restored_packet, packet, packet_length,
- original_ssrc, header);
-}
-
bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t* restored_packet,
const uint8_t* packet,
size_t* packet_length,
@@ -264,7 +254,7 @@ bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t* restored_packet,
original_sequence_number);
ByteWriter<uint32_t>::WriteBigEndian(restored_packet + 8, original_ssrc);
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
if (!rtx_)
return true;
@@ -290,20 +280,20 @@ bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t* restored_packet,
}
void RTPPayloadRegistry::SetRtxSsrc(uint32_t ssrc) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
ssrc_rtx_ = ssrc;
rtx_ = true;
}
bool RTPPayloadRegistry::GetRtxSsrc(uint32_t* ssrc) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
*ssrc = ssrc_rtx_;
return rtx_;
}
void RTPPayloadRegistry::SetRtxPayloadType(int payload_type,
int associated_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
if (payload_type < 0) {
LOG(LS_ERROR) << "Invalid RTX payload type: " << payload_type;
return;
@@ -315,7 +305,7 @@ void RTPPayloadRegistry::SetRtxPayloadType(int payload_type,
}
bool RTPPayloadRegistry::IsRed(const RTPHeader& header) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return red_payload_type_ == header.payloadType;
}
@@ -325,7 +315,7 @@ bool RTPPayloadRegistry::IsEncapsulated(const RTPHeader& header) const {
bool RTPPayloadRegistry::GetPayloadSpecifics(uint8_t payload_type,
PayloadUnion* payload) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::const_iterator it =
payload_type_map_.find(payload_type);
@@ -343,13 +333,13 @@ int RTPPayloadRegistry::GetPayloadTypeFrequency(
if (!payload) {
return -1;
}
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
return rtp_payload_strategy_->GetPayloadTypeFrequency(*payload);
}
const RtpUtility::Payload* RTPPayloadRegistry::PayloadTypeToPayload(
uint8_t payload_type) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
RtpUtility::PayloadTypeMap::const_iterator it =
payload_type_map_.find(payload_type);
@@ -363,13 +353,13 @@ const RtpUtility::Payload* RTPPayloadRegistry::PayloadTypeToPayload(
}
void RTPPayloadRegistry::SetIncomingPayloadType(const RTPHeader& header) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
if (!IsRtxInternal(header))
incoming_payload_type_ = header.payloadType;
}
bool RTPPayloadRegistry::ReportMediaPayloadType(uint8_t media_payload_type) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
if (last_received_media_payload_type_ == media_payload_type) {
// Media type unchanged.
return true;
@@ -416,7 +406,8 @@ class RTPPayloadAudioStrategy : public RTPPayloadStrategy {
return payload;
}
- int GetPayloadTypeFrequency(const RtpUtility::Payload& payload) const {
+ int GetPayloadTypeFrequency(
+ const RtpUtility::Payload& payload) const override {
return payload.typeSpecific.Audio.frequency;
}
};
@@ -466,7 +457,8 @@ class RTPPayloadVideoStrategy : public RTPPayloadStrategy {
return payload;
}
- int GetPayloadTypeFrequency(const RtpUtility::Payload& payload) const {
+ int GetPayloadTypeFrequency(
+ const RtpUtility::Payload& payload) const override {
return kVideoPayloadTypeFrequency;
}
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
index cbded6872d3..5bbe97a32ce 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
@@ -8,11 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/mock/mock_rtp_payload_strategy.h"
@@ -58,7 +59,7 @@ class RtpPayloadRegistryTest : public ::testing::Test {
return returned_payload_on_heap;
}
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry_;
testing::NiceMock<MockRTPPayloadStrategy>* mock_payload_strategy_;
};
@@ -296,9 +297,9 @@ void TestRtxPacket(RTPPayloadRegistry* rtp_payload_registry,
uint16_t original_sequence_number = 1234;
uint32_t original_ssrc = 500;
- rtc::scoped_ptr<const uint8_t[]> packet(GenerateRtxPacket(
+ std::unique_ptr<const uint8_t[]> packet(GenerateRtxPacket(
header_length, payload_length, original_sequence_number));
- rtc::scoped_ptr<uint8_t[]> restored_packet(
+ std::unique_ptr<uint8_t[]> restored_packet(
new uint8_t[header_length + payload_length]);
size_t length = original_length;
bool success = rtp_payload_registry->RestoreOriginalPacket(
@@ -312,7 +313,7 @@ void TestRtxPacket(RTPPayloadRegistry* rtp_payload_registry,
EXPECT_EQ(original_length - kRtxHeaderSize, length)
<< "The restored packet should be exactly kRtxHeaderSize smaller.";
- rtc::scoped_ptr<RtpHeaderParser> header_parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> header_parser(RtpHeaderParser::Create());
RTPHeader restored_header;
ASSERT_TRUE(
header_parser->Parse(restored_packet.get(), length, &restored_header));
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
index 7a3b6fd8290..38b2830b79c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
@@ -16,7 +16,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
RTPReceiverStrategy* RTPReceiverStrategy::CreateAudioStrategy(
@@ -46,26 +45,26 @@ RTPReceiverAudio::RTPReceiverAudio(RtpData* data_callback)
// Outband TelephoneEvent(DTMF) detection
void RTPReceiverAudio::SetTelephoneEventForwardToDecoder(
bool forward_to_decoder) {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
telephone_event_forward_to_decoder_ = forward_to_decoder;
}
// Is forwarding of outband telephone events turned on/off?
bool RTPReceiverAudio::TelephoneEventForwardToDecoder() const {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
return telephone_event_forward_to_decoder_;
}
bool RTPReceiverAudio::TelephoneEventPayloadType(
int8_t payload_type) const {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
return telephone_event_payload_type_ == payload_type;
}
bool RTPReceiverAudio::CNGPayloadType(int8_t payload_type,
uint32_t* frequency,
bool* cng_payload_type_has_changed) {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
*cng_payload_type_has_changed = false;
// We can have four CNG on 8000Hz, 16000Hz, 32000Hz and 48000Hz.
@@ -152,7 +151,7 @@ int32_t RTPReceiverAudio::OnNewPayloadTypeCreated(
const char payload_name[RTP_PAYLOAD_NAME_SIZE],
int8_t payload_type,
uint32_t frequency) {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
if (RtpUtility::StringCompare(payload_name, "telephone-event", 15)) {
telephone_event_payload_type_ = payload_type;
@@ -194,6 +193,10 @@ int32_t RTPReceiverAudio::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
rtp_header->type.Audio.numEnergy);
}
+ if (first_packet_received_()) {
+ LOG(LS_INFO) << "Received first audio RTP packet";
+ }
+
return ParseAudioCodecSpecific(rtp_header,
payload,
payload_length,
@@ -202,7 +205,7 @@ int32_t RTPReceiverAudio::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
}
int RTPReceiverAudio::GetPayloadTypeFrequency() const {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
if (last_received_g722_) {
return 8000;
}
@@ -245,7 +248,7 @@ void RTPReceiverAudio::CheckPayloadChanged(int8_t payload_type,
}
int RTPReceiverAudio::Energy(uint8_t array_of_energy[kRtpCsrcSize]) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
assert(num_energy_ <= kRtpCsrcSize);
@@ -287,7 +290,7 @@ int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
bool telephone_event_packet =
TelephoneEventPayloadType(rtp_header->header.payloadType);
if (telephone_event_packet) {
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
// RFC 4733 2.3
// 0 1 2 3
@@ -332,7 +335,7 @@ int32_t RTPReceiverAudio::ParseAudioCodecSpecific(
}
{
- CriticalSectionScoped lock(crit_sect_.get());
+ rtc::CritScope lock(&crit_sect_);
if (!telephone_event_packet) {
last_received_frequency_ = audio_specific.frequency;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
index b68febbb82b..d5d89bae2d4 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.h
@@ -13,7 +13,7 @@
#include <set>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
@@ -22,8 +22,6 @@
namespace webrtc {
-class CriticalSectionWrapper;
-
// Handles audio RTP packets. This class is thread-safe.
class RTPReceiverAudio : public RTPReceiverStrategy,
public TelephoneEventHandler {
@@ -33,15 +31,15 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
// The following three methods implement the TelephoneEventHandler interface.
// Forward DTMFs to decoder for playout.
- void SetTelephoneEventForwardToDecoder(bool forward_to_decoder);
+ void SetTelephoneEventForwardToDecoder(bool forward_to_decoder) override;
// Is forwarding of outband telephone events turned on/off?
- bool TelephoneEventForwardToDecoder() const;
+ bool TelephoneEventForwardToDecoder() const override;
// Is TelephoneEvent configured with payload type payload_type
- bool TelephoneEventPayloadType(const int8_t payload_type) const;
+ bool TelephoneEventPayloadType(const int8_t payload_type) const override;
- TelephoneEventHandler* GetTelephoneEventHandler() { return this; }
+ TelephoneEventHandler* GetTelephoneEventHandler() override { return this; }
// Returns true if CNG is configured with payload type payload_type. If so,
// the frequency and cng_payload_type_has_changed are filled in.
@@ -118,6 +116,8 @@ class RTPReceiverAudio : public RTPReceiverStrategy,
uint8_t num_energy_;
uint8_t current_remote_energy_[kRtpCsrcSize];
+
+ ThreadUnsafeOneTimeEvent first_packet_received_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
index 6f2efe783a1..190449b3ddf 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
@@ -61,8 +61,6 @@ RtpReceiverImpl::RtpReceiverImpl(
rtp_payload_registry_(rtp_payload_registry),
rtp_media_receiver_(rtp_media_receiver),
cb_rtp_feedback_(incoming_messages_callback),
- critical_section_rtp_receiver_(
- CriticalSectionWrapper::CreateCriticalSection()),
last_receive_time_(0),
last_received_payload_length_(0),
ssrc_(0),
@@ -70,8 +68,7 @@ RtpReceiverImpl::RtpReceiverImpl(
current_remote_csrc_(),
last_received_timestamp_(0),
last_received_frame_time_ms_(-1),
- last_received_sequence_number_(0),
- nack_method_(kNackOff) {
+ last_received_sequence_number_(0) {
assert(incoming_messages_callback);
memset(current_remote_csrc_, 0, sizeof(current_remote_csrc_));
@@ -89,7 +86,7 @@ int32_t RtpReceiverImpl::RegisterReceivePayload(
const uint32_t frequency,
const size_t channels,
const uint32_t rate) {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
// TODO(phoglund): Try to streamline handling of the RED codec and some other
// cases which makes it necessary to keep track of whether we created a
@@ -111,29 +108,18 @@ int32_t RtpReceiverImpl::RegisterReceivePayload(
int32_t RtpReceiverImpl::DeRegisterReceivePayload(
const int8_t payload_type) {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
return rtp_payload_registry_->DeRegisterReceivePayload(payload_type);
}
-NACKMethod RtpReceiverImpl::NACK() const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
- return nack_method_;
-}
-
-// Turn negative acknowledgment requests on/off.
-void RtpReceiverImpl::SetNACKStatus(const NACKMethod method) {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
- nack_method_ = method;
-}
-
uint32_t RtpReceiverImpl::SSRC() const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
return ssrc_;
}
// Get remote CSRC.
int32_t RtpReceiverImpl::CSRCs(uint32_t array_of_csrcs[kRtpCsrcSize]) const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
assert(num_csrcs_ <= kRtpCsrcSize);
@@ -179,7 +165,7 @@ bool RtpReceiverImpl::IncomingRtpPacket(
bool is_first_packet_in_frame = false;
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
if (HaveReceivedFrame()) {
is_first_packet_in_frame =
last_received_sequence_number_ + 1 == rtp_header.sequenceNumber &&
@@ -198,7 +184,7 @@ bool RtpReceiverImpl::IncomingRtpPacket(
}
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
last_receive_time_ = clock_->TimeInMilliseconds();
last_received_payload_length_ = payload_data_length;
@@ -219,7 +205,7 @@ TelephoneEventHandler* RtpReceiverImpl::GetTelephoneEventHandler() {
}
bool RtpReceiverImpl::Timestamp(uint32_t* timestamp) const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
if (!HaveReceivedFrame())
return false;
*timestamp = last_received_timestamp_;
@@ -227,7 +213,7 @@ bool RtpReceiverImpl::Timestamp(uint32_t* timestamp) const {
}
bool RtpReceiverImpl::LastReceivedTimeMs(int64_t* receive_time_ms) const {
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
if (!HaveReceivedFrame())
return false;
*receive_time_ms = last_received_frame_time_ms_;
@@ -247,7 +233,7 @@ void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader& rtp_header) {
uint32_t rate = 0;
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
int8_t last_received_payload_type =
rtp_payload_registry_->last_received_payload_type();
@@ -318,7 +304,7 @@ int32_t RtpReceiverImpl::CheckPayloadChanged(const RTPHeader& rtp_header,
int8_t payload_type = rtp_header.payloadType;
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
int8_t last_received_payload_type =
rtp_payload_registry_->last_received_payload_type();
@@ -401,7 +387,7 @@ void RtpReceiverImpl::CheckCSRC(const WebRtcRTPHeader& rtp_header) {
uint8_t old_num_csrcs = 0;
{
- CriticalSectionScoped lock(critical_section_rtp_receiver_.get());
+ rtc::CritScope lock(&critical_section_rtp_receiver_);
if (!rtp_media_receiver_->ShouldReportCsrcChanges(
rtp_header.header.payloadType)) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
index 63b65fefd8e..1ae1c9168a6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.h
@@ -11,11 +11,12 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -46,11 +47,6 @@ class RtpReceiverImpl : public RtpReceiver {
PayloadUnion payload_specific,
bool in_order) override;
- NACKMethod NACK() const override;
-
- // Turn negative acknowledgement requests on/off.
- void SetNACKStatus(const NACKMethod method) override;
-
// Returns the last received timestamp.
bool Timestamp(uint32_t* timestamp) const override;
bool LastReceivedTimeMs(int64_t* receive_time_ms) const override;
@@ -75,11 +71,11 @@ class RtpReceiverImpl : public RtpReceiver {
Clock* clock_;
RTPPayloadRegistry* rtp_payload_registry_;
- rtc::scoped_ptr<RTPReceiverStrategy> rtp_media_receiver_;
+ std::unique_ptr<RTPReceiverStrategy> rtp_media_receiver_;
RtpFeedback* cb_rtp_feedback_;
- rtc::scoped_ptr<CriticalSectionWrapper> critical_section_rtp_receiver_;
+ rtc::CriticalSection critical_section_rtp_receiver_;
int64_t last_receive_time_;
size_t last_received_payload_length_;
@@ -91,8 +87,6 @@ class RtpReceiverImpl : public RtpReceiver {
uint32_t last_received_timestamp_;
int64_t last_received_frame_time_ms_;
uint16_t last_received_sequence_number_;
-
- NACKMethod nack_method_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc
index 3797b1bcc20..69d079f9aa3 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.cc
@@ -12,25 +12,22 @@
#include <stdlib.h>
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
namespace webrtc {
RTPReceiverStrategy::RTPReceiverStrategy(RtpData* data_callback)
- : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- data_callback_(data_callback) {
+ : data_callback_(data_callback) {
memset(&last_payload_, 0, sizeof(last_payload_));
}
void RTPReceiverStrategy::GetLastMediaSpecificPayload(
PayloadUnion* payload) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
memcpy(payload, &last_payload_, sizeof(*payload));
}
void RTPReceiverStrategy::SetLastMediaSpecificPayload(
const PayloadUnion& payload) {
- CriticalSectionScoped cs(crit_sect_.get());
+ rtc::CritScope cs(&crit_sect_);
memcpy(&last_payload_, &payload, sizeof(last_payload_));
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
index f2a60ff855a..663b883295d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
@@ -11,11 +11,10 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_STRATEGY_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_STRATEGY_H_
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -95,7 +94,7 @@ class RTPReceiverStrategy {
// packet.
explicit RTPReceiverStrategy(RtpData* data_callback);
- rtc::scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ rtc::CriticalSection crit_sect_;
PayloadUnion last_payload_;
RtpData* data_callback_;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
index 406acc23c20..9d76c1a6163 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -13,6 +13,8 @@
#include <assert.h>
#include <string.h>
+#include <memory>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
@@ -21,7 +23,6 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_format.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -70,8 +71,12 @@ int32_t RTPReceiverVideo::ParseRtpPacket(WebRtcRTPHeader* rtp_header,
: -1;
}
+ if (first_packet_received_()) {
+ LOG(LS_INFO) << "Received first video RTP packet";
+ }
+
// We are not allowed to hold a critical section when calling below functions.
- rtc::scoped_ptr<RtpDepacketizer> depacketizer(
+ std::unique_ptr<RtpDepacketizer> depacketizer(
RtpDepacketizer::Create(rtp_header->type.Video.codec));
if (depacketizer.get() == NULL) {
LOG(LS_ERROR) << "Failed to create depacketizer.";
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
index 56f761a2e1c..486eced3641 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
@@ -34,7 +34,7 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
int64_t timestamp,
bool is_first_packet) override;
- TelephoneEventHandler* GetTelephoneEventHandler() { return NULL; }
+ TelephoneEventHandler* GetTelephoneEventHandler() override { return NULL; }
int GetPayloadTypeFrequency() const override;
@@ -54,6 +54,9 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
const PayloadUnion& specific_payload) const override;
void SetPacketOverHead(uint16_t packet_over_head);
+
+ private:
+ OneTimeEvent first_packet_received_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 5875529cfb2..214472f81ae 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -60,7 +60,8 @@ RtpRtcp::Configuration::Configuration()
send_bitrate_observer(nullptr),
send_frame_count_observer(nullptr),
send_side_delay_observer(nullptr),
- event_log(nullptr) {}
+ event_log(nullptr),
+ send_packet_observer(nullptr) {}
RtpRtcp* RtpRtcp::CreateRtpRtcp(const RtpRtcp::Configuration& configuration) {
if (configuration.clock) {
@@ -85,7 +86,8 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
configuration.send_bitrate_observer,
configuration.send_frame_count_observer,
configuration.send_side_delay_observer,
- configuration.event_log),
+ configuration.event_log,
+ configuration.send_packet_observer),
rtcp_sender_(configuration.audio,
configuration.clock,
configuration.receive_statistics,
@@ -105,14 +107,13 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
last_process_time_(configuration.clock->TimeInMilliseconds()),
last_bitrate_process_time_(configuration.clock->TimeInMilliseconds()),
last_rtt_process_time_(configuration.clock->TimeInMilliseconds()),
- packet_overhead_(28), // IPV4 UDP.
+ packet_overhead_(28), // IPV4 UDP.
nack_last_time_sent_full_(0),
nack_last_time_sent_full_prev_(0),
nack_last_seq_number_sent_(0),
key_frame_req_method_(kKeyFrameReqPliRtcp),
remote_bitrate_(configuration.remote_bitrate_estimator),
rtt_stats_(configuration.rtt_stats),
- critical_section_rtt_(CriticalSectionWrapper::CreateCriticalSection()),
rtt_ms_(0) {
// Make sure that RTCP objects are aware of our SSRC.
uint32_t SSRC = rtp_sender_.SSRC();
@@ -301,30 +302,21 @@ void ModuleRtpRtcpImpl::SetSequenceNumber(const uint16_t seq_num) {
rtp_sender_.SetSequenceNumber(seq_num);
}
-bool ModuleRtpRtcpImpl::SetRtpStateForSsrc(uint32_t ssrc,
- const RtpState& rtp_state) {
- if (rtp_sender_.SSRC() == ssrc) {
- SetStartTimestamp(rtp_state.start_timestamp);
- rtp_sender_.SetRtpState(rtp_state);
- return true;
- }
- if (rtp_sender_.RtxSsrc() == ssrc) {
- rtp_sender_.SetRtxRtpState(rtp_state);
- return true;
- }
- return false;
+void ModuleRtpRtcpImpl::SetRtpState(const RtpState& rtp_state) {
+ SetStartTimestamp(rtp_state.start_timestamp);
+ rtp_sender_.SetRtpState(rtp_state);
}
-bool ModuleRtpRtcpImpl::GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) {
- if (rtp_sender_.SSRC() == ssrc) {
- *rtp_state = rtp_sender_.GetRtpState();
- return true;
- }
- if (rtp_sender_.RtxSsrc() == ssrc) {
- *rtp_state = rtp_sender_.GetRtxRtpState();
- return true;
- }
- return false;
+void ModuleRtpRtcpImpl::SetRtxState(const RtpState& rtp_state) {
+ rtp_sender_.SetRtxRtpState(rtp_state);
+}
+
+RtpState ModuleRtpRtcpImpl::GetRtpState() const {
+ return rtp_sender_.GetRtpState();
+}
+
+RtpState ModuleRtpRtcpImpl::GetRtxState() const {
+ return rtp_sender_.GetRtxRtpState();
}
uint32_t ModuleRtpRtcpImpl::SSRC() const {
@@ -688,8 +680,9 @@ void ModuleRtpRtcpImpl::SetTMMBRStatus(const bool enable) {
rtcp_sender_.SetTMMBRStatus(enable);
}
-int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) {
- return rtcp_sender_.SetTMMBN(bounding_set);
+void ModuleRtpRtcpImpl::SetTMMBN(
+ const std::vector<rtcp::TmmbItem>* bounding_set) {
+ rtcp_sender_.SetTMMBN(bounding_set);
}
// Returns the currently configured retransmission mode.
@@ -982,12 +975,12 @@ void ModuleRtpRtcpImpl::SetRtcpReceiverSsrcs(uint32_t main_ssrc) {
}
void ModuleRtpRtcpImpl::set_rtt_ms(int64_t rtt_ms) {
- CriticalSectionScoped cs(critical_section_rtt_.get());
+ rtc::CritScope cs(&critical_section_rtt_);
rtt_ms_ = rtt_ms;
}
int64_t ModuleRtpRtcpImpl::rtt_ms() const {
- CriticalSectionScoped cs(critical_section_rtt_.get());
+ rtc::CritScope cs(&critical_section_rtt_);
return rtt_ms_;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index 76faca0f7eb..7bbb06e5289 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -16,8 +16,8 @@
#include <utility>
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/gtest_prod_util.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/packet_loss_stats.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
@@ -75,8 +75,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Set SequenceNumber, default is a random number.
void SetSequenceNumber(uint16_t seq) override;
- bool SetRtpStateForSsrc(uint32_t ssrc, const RtpState& rtp_state) override;
- bool GetRtpStateForSsrc(uint32_t ssrc, RtpState* rtp_state) override;
+ void SetRtpState(const RtpState& rtp_state) override;
+ void SetRtxState(const RtpState& rtp_state) override;
+ RtpState GetRtpState() const override;
+ RtpState GetRtxState() const override;
uint32_t SSRC() const override;
@@ -200,7 +202,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
void SetTMMBRStatus(bool enable) override;
- int32_t SetTMMBN(const TMMBRSet* bounding_set);
+ void SetTMMBN(const std::vector<rtcp::TmmbItem>* bounding_set);
uint16_t MaxPayloadLength() const override;
@@ -362,7 +364,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
PacketLossStats receive_loss_stats_;
// The processed RTT from RtcpRttStats.
- rtc::scoped_ptr<CriticalSectionWrapper> critical_section_rtt_;
+ rtc::CriticalSection critical_section_rtt_;
int64_t rtt_ms_;
};
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index 708b9af1e09..7e0ac312c80 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -9,6 +9,7 @@
*/
#include <map>
+#include <memory>
#include <set>
#include "testing/gmock/include/gmock/gmock.h"
@@ -68,7 +69,7 @@ class SendTransport : public Transport,
size_t len,
const PacketOptions& options) override {
RTPHeader header;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
EXPECT_TRUE(parser->Parse(static_cast<const uint8_t*>(data), len, &header));
++rtp_packets_sent_;
last_rtp_header_ = header;
@@ -115,10 +116,10 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver {
RtcpPacketTypeCounter packets_sent_;
RtcpPacketTypeCounter packets_received_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
SendTransport transport_;
RtcpRttStatsTestImpl rtt_stats_;
- rtc::scoped_ptr<ModuleRtpRtcpImpl> impl_;
+ std::unique_ptr<ModuleRtpRtcpImpl> impl_;
uint32_t remote_ssrc_;
void SetRemoteSsrc(uint32_t ssrc) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
index 3fbca7b67d8..b58a94d457f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
@@ -17,6 +17,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/call.h"
#include "webrtc/call/rtc_event_log.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
@@ -24,8 +25,6 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
#include "webrtc/modules/rtp_rtcp/source/time_util.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -114,12 +113,11 @@ RTPSender::RTPSender(
BitrateStatisticsObserver* bitrate_callback,
FrameCountObserver* frame_count_observer,
SendSideDelayObserver* send_side_delay_observer,
- RtcEventLog* event_log)
+ RtcEventLog* event_log,
+ SendPacketObserver* send_packet_observer)
: clock_(clock),
- // TODO(holmer): Remove this conversion when we remove the use of
- // TickTime.
- clock_delta_ms_(clock_->TimeInMilliseconds() -
- TickTime::MillisecondTimestamp()),
+ // TODO(holmer): Remove this conversion?
+ clock_delta_ms_(clock_->TimeInMilliseconds() - rtc::TimeMillis()),
random_(clock_->TimeInMicroseconds()),
bitrates_(bitrate_callback),
total_bitrate_sent_(clock, bitrates_.total_bitrate_observer()),
@@ -147,11 +145,11 @@ RTPSender::RTPSender(
nack_bitrate_(clock, bitrates_.retransmit_bitrate_observer()),
packet_history_(clock),
// Statistics
- statistics_crit_(CriticalSectionWrapper::CreateCriticalSection()),
rtp_stats_callback_(NULL),
frame_count_observer_(frame_count_observer),
send_side_delay_observer_(send_side_delay_observer),
event_log_(event_log),
+ send_packet_observer_(send_packet_observer),
// RTP variables
start_timestamp_forced_(false),
start_timestamp_(0),
@@ -166,7 +164,6 @@ RTPSender::RTPSender(
last_packet_marker_bit_(false),
csrcs_(),
rtx_(kRtxOff),
- target_bitrate_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
target_bitrate_(0) {
memset(nack_byte_count_times_, 0, sizeof(nack_byte_count_times_));
memset(nack_byte_count_, 0, sizeof(nack_byte_count_));
@@ -210,12 +207,12 @@ RTPSender::~RTPSender() {
}
void RTPSender::SetTargetBitrate(uint32_t bitrate) {
- CriticalSectionScoped cs(target_bitrate_critsect_.get());
+ rtc::CritScope cs(&target_bitrate_critsect_);
target_bitrate_ = bitrate;
}
uint32_t RTPSender::GetTargetBitrate() {
- CriticalSectionScoped cs(target_bitrate_critsect_.get());
+ rtc::CritScope cs(&target_bitrate_critsect_);
return target_bitrate_;
}
@@ -532,7 +529,7 @@ int32_t RTPSender::SendOutgoingData(FrameType frame_type,
payload_size, fragmentation, rtp_hdr);
}
- CriticalSectionScoped cs(statistics_crit_.get());
+ rtc::CritScope cs(&statistics_crit_);
// Note: This is currently only counting for video.
if (frame_type == kVideoFrameKey) {
++frame_counts_.key_frames;
@@ -675,13 +672,12 @@ size_t RTPSender::SendPadData(size_t bytes,
UpdateAbsoluteSendTime(padding_packet, length, rtp_header, now_ms);
PacketOptions options;
- if (using_transport_seq) {
- options.packet_id =
- UpdateTransportSequenceNumber(padding_packet, length, rtp_header);
- }
-
- if (using_transport_seq && transport_feedback_observer_) {
- transport_feedback_observer_->AddPacket(options.packet_id, length, true);
+ if (AllocateTransportSequenceNumber(&options.packet_id)) {
+ if (UpdateTransportSequenceNumber(options.packet_id, padding_packet,
+ length, rtp_header)) {
+ if (transport_feedback_observer_)
+ transport_feedback_observer_->AddPacket(options.packet_id, length);
+ }
}
if (!SendPacketToNetwork(padding_packet, length, options))
@@ -886,9 +882,7 @@ bool RTPSender::TimeToSendPacket(uint16_t sequence_number,
// Packet cannot be found. Allow sending to continue.
return true;
}
- if (!retransmission && capture_time_ms > 0) {
- UpdateDelayStatistics(capture_time_ms, clock_->TimeInMilliseconds());
- }
+
int rtx;
{
rtc::CritScope lock(&send_critsect_);
@@ -932,19 +926,18 @@ bool RTPSender::PrepareAndSendPacket(uint8_t* buffer,
diff_ms);
UpdateAbsoluteSendTime(buffer_to_send_ptr, length, rtp_header, now_ms);
- // TODO(sprang): Potentially too much overhead in IsRegistered()?
- bool using_transport_seq = rtp_header_extension_map_.IsRegistered(
- kRtpExtensionTransportSequenceNumber) &&
- transport_sequence_number_allocator_;
-
PacketOptions options;
- if (using_transport_seq) {
- options.packet_id =
- UpdateTransportSequenceNumber(buffer_to_send_ptr, length, rtp_header);
+ if (AllocateTransportSequenceNumber(&options.packet_id)) {
+ if (UpdateTransportSequenceNumber(options.packet_id, buffer_to_send_ptr,
+ length, rtp_header)) {
+ if (transport_feedback_observer_)
+ transport_feedback_observer_->AddPacket(options.packet_id, length);
+ }
}
- if (using_transport_seq && transport_feedback_observer_) {
- transport_feedback_observer_->AddPacket(options.packet_id, length, true);
+ if (!is_retransmit && !send_over_rtx) {
+ UpdateDelayStatistics(capture_time_ms, now_ms);
+ UpdateOnSendPacket(options.packet_id, capture_time_ms, rtp_header.ssrc);
}
bool ret = SendPacketToNetwork(buffer_to_send_ptr, length, options);
@@ -966,7 +959,7 @@ void RTPSender::UpdateRtpStats(const uint8_t* buffer,
// Get ssrc before taking statistics_crit_ to avoid possible deadlock.
uint32_t ssrc = is_rtx ? RtxSsrc() : SSRC();
- CriticalSectionScoped lock(statistics_crit_.get());
+ rtc::CritScope lock(&statistics_crit_);
if (is_rtx) {
counters = &rtx_rtp_stats_;
} else {
@@ -1061,23 +1054,17 @@ int32_t RTPSender::SendToNetwork(uint8_t* buffer,
}
return 0;
}
- if (capture_time_ms > 0) {
- UpdateDelayStatistics(capture_time_ms, now_ms);
- }
-
- // TODO(sprang): Potentially too much overhead in IsRegistered()?
- bool using_transport_seq = rtp_header_extension_map_.IsRegistered(
- kRtpExtensionTransportSequenceNumber) &&
- transport_sequence_number_allocator_;
PacketOptions options;
- if (using_transport_seq) {
- options.packet_id =
- UpdateTransportSequenceNumber(buffer, length, rtp_header);
- if (transport_feedback_observer_) {
- transport_feedback_observer_->AddPacket(options.packet_id, length, true);
+ if (AllocateTransportSequenceNumber(&options.packet_id)) {
+ if (UpdateTransportSequenceNumber(options.packet_id, buffer, length,
+ rtp_header)) {
+ if (transport_feedback_observer_)
+ transport_feedback_observer_->AddPacket(options.packet_id, length);
}
}
+ UpdateDelayStatistics(capture_time_ms, now_ms);
+ UpdateOnSendPacket(options.packet_id, capture_time_ms, rtp_header.ssrc);
bool sent = SendPacketToNetwork(buffer, length, options);
@@ -1098,7 +1085,7 @@ int32_t RTPSender::SendToNetwork(uint8_t* buffer,
}
void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) {
- if (!send_side_delay_observer_)
+ if (!send_side_delay_observer_ || capture_time_ms <= 0)
return;
uint32_t ssrc;
@@ -1109,7 +1096,7 @@ void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) {
ssrc = ssrc_;
}
{
- CriticalSectionScoped cs(statistics_crit_.get());
+ rtc::CritScope cs(&statistics_crit_);
// TODO(holmer): Compute this iteratively instead.
send_delays_[now_ms] = now_ms - capture_time_ms;
send_delays_.erase(send_delays_.begin(),
@@ -1130,6 +1117,15 @@ void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) {
ssrc);
}
+void RTPSender::UpdateOnSendPacket(int packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc) {
+ if (!send_packet_observer_ || capture_time_ms <= 0 || packet_id == -1)
+ return;
+
+ send_packet_observer_->OnSendPacket(packet_id, capture_time_ms, ssrc);
+}
+
void RTPSender::ProcessBitrate() {
rtc::CritScope lock(&send_critsect_);
total_bitrate_sent_.Process();
@@ -1157,7 +1153,7 @@ uint16_t RTPSender::AllocateSequenceNumber(uint16_t packets_to_send) {
void RTPSender::GetDataCounters(StreamDataCounters* rtp_stats,
StreamDataCounters* rtx_stats) const {
- CriticalSectionScoped lock(statistics_crit_.get());
+ rtc::CritScope lock(&statistics_crit_);
*rtp_stats = rtp_stats_;
*rtx_stats = rtx_rtp_stats_;
}
@@ -1613,7 +1609,8 @@ void RTPSender::UpdateAbsoluteSendTime(uint8_t* rtp_packet,
ConvertMsTo24Bits(now_ms));
}
-uint16_t RTPSender::UpdateTransportSequenceNumber(
+bool RTPSender::UpdateTransportSequenceNumber(
+ uint16_t sequence_number,
uint8_t* rtp_packet,
size_t rtp_packet_length,
const RTPHeader& rtp_header) const {
@@ -1624,19 +1621,26 @@ uint16_t RTPSender::UpdateTransportSequenceNumber(
rtp_packet_length, rtp_header,
kTransportSequenceNumberLength, &offset)) {
case ExtensionStatus::kNotRegistered:
- return 0;
+ return false;
case ExtensionStatus::kError:
LOG(LS_WARNING) << "Failed to update transport sequence number";
- return 0;
+ return false;
case ExtensionStatus::kOk:
break;
default:
RTC_NOTREACHED();
}
- uint16_t seq = transport_sequence_number_allocator_->AllocateSequenceNumber();
- BuildTransportSequenceNumberExtension(rtp_packet + offset, seq);
- return seq;
+ BuildTransportSequenceNumberExtension(rtp_packet + offset, sequence_number);
+ return true;
+}
+
+bool RTPSender::AllocateTransportSequenceNumber(int* packet_id) const {
+ if (!transport_sequence_number_allocator_)
+ return false;
+
+ *packet_id = transport_sequence_number_allocator_->AllocateSequenceNumber();
+ return true;
}
void RTPSender::SetSendingStatus(bool enabled) {
@@ -1858,12 +1862,12 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, size_t* length,
void RTPSender::RegisterRtpStatisticsCallback(
StreamDataCountersCallback* callback) {
- CriticalSectionScoped cs(statistics_crit_.get());
+ rtc::CritScope cs(&statistics_crit_);
rtp_stats_callback_ = callback;
}
StreamDataCountersCallback* RTPSender::GetRtpStatisticsCallback() const {
- CriticalSectionScoped cs(statistics_crit_.get());
+ rtc::CritScope cs(&statistics_crit_);
return rtp_stats_callback_;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
index 4344df67451..f501d27a723 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
@@ -13,9 +13,11 @@
#include <list>
#include <map>
+#include <memory>
#include <utility>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/random.h"
#include "webrtc/base/thread_annotations.h"
@@ -95,7 +97,9 @@ class RTPSender : public RTPSenderInterface {
BitrateStatisticsObserver* bitrate_callback,
FrameCountObserver* frame_count_observer,
SendSideDelayObserver* send_side_delay_observer,
- RtcEventLog* event_log);
+ RtcEventLog* event_log,
+ SendPacketObserver* send_packet_observer);
+
virtual ~RTPSender();
void ProcessBitrate();
@@ -351,6 +355,9 @@ class RTPSender : public RTPSenderInterface {
const PacketOptions& options);
void UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms);
+ void UpdateOnSendPacket(int packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc);
// Find the byte position of the RTP extension as indicated by |type| in
// |rtp_packet|. Return false if such extension doesn't exist.
@@ -368,12 +375,13 @@ class RTPSender : public RTPSenderInterface {
size_t rtp_packet_length,
const RTPHeader& rtp_header,
int64_t now_ms) const;
- // Update the transport sequence number of the packet using a new sequence
- // number allocated by SequenceNumberAllocator. Returns the assigned sequence
- // number, or 0 if extension could not be updated.
- uint16_t UpdateTransportSequenceNumber(uint8_t* rtp_packet,
- size_t rtp_packet_length,
- const RTPHeader& rtp_header) const;
+
+ bool UpdateTransportSequenceNumber(uint16_t sequence_number,
+ uint8_t* rtp_packet,
+ size_t rtp_packet_length,
+ const RTPHeader& rtp_header) const;
+
+ bool AllocateTransportSequenceNumber(int* packet_id) const;
void UpdateRtpStats(const uint8_t* buffer,
size_t packet_length,
@@ -422,8 +430,8 @@ class RTPSender : public RTPSenderInterface {
Bitrate total_bitrate_sent_;
const bool audio_configured_;
- const rtc::scoped_ptr<RTPSenderAudio> audio_;
- const rtc::scoped_ptr<RTPSenderVideo> video_;
+ const std::unique_ptr<RTPSenderAudio> audio_;
+ const std::unique_ptr<RTPSenderVideo> video_;
RtpPacketSender* const paced_sender_;
TransportSequenceNumberAllocator* const transport_sequence_number_allocator_;
@@ -454,7 +462,7 @@ class RTPSender : public RTPSenderInterface {
RTPPacketHistory packet_history_;
// Statistics
- rtc::scoped_ptr<CriticalSectionWrapper> statistics_crit_;
+ rtc::CriticalSection statistics_crit_;
SendDelayMap send_delays_ GUARDED_BY(statistics_crit_);
FrameCounts frame_counts_ GUARDED_BY(statistics_crit_);
StreamDataCounters rtp_stats_ GUARDED_BY(statistics_crit_);
@@ -463,6 +471,7 @@ class RTPSender : public RTPSenderInterface {
FrameCountObserver* const frame_count_observer_;
SendSideDelayObserver* const send_side_delay_observer_;
RtcEventLog* const event_log_;
+ SendPacketObserver* const send_packet_observer_;
// RTP variables
bool start_timestamp_forced_ GUARDED_BY(send_critsect_);
@@ -489,7 +498,7 @@ class RTPSender : public RTPSenderInterface {
// SetTargetBitrateKbps or GetTargetBitrateKbps. Also remember
// that by the time the function returns there is no guarantee
// that the target bitrate is still valid.
- rtc::scoped_ptr<CriticalSectionWrapper> target_bitrate_critsect_;
+ rtc::CriticalSection target_bitrate_critsect_;
uint32_t target_bitrate_ GUARDED_BY(target_bitrate_critsect_);
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RTPSender);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
index 804294ac540..4236e1f37d4 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -12,10 +12,11 @@
#include <string.h>
+#include "webrtc/base/logging.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -24,7 +25,6 @@ static const int kDtmfFrequencyHz = 8000;
RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtpSender)
: _clock(clock),
_rtpSender(rtpSender),
- _sendAudioCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_packetSizeSamples(160),
_dtmfEventIsOn(false),
_dtmfEventFirstPacketSent(false),
@@ -53,7 +53,7 @@ int RTPSenderAudio::AudioFrequency() const {
// set audio packet size, used to determine when it's time to send a DTMF packet
// in silence (CNG)
int32_t RTPSenderAudio::SetAudioPacketSize(uint16_t packetSizeSamples) {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
_packetSizeSamples = packetSizeSamples;
return 0;
@@ -67,7 +67,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
const uint32_t rate,
RtpUtility::Payload** payload) {
if (RtpUtility::StringCompare(payloadName, "cn", 2)) {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
// we can have multiple CNG payload types
switch (frequency) {
case 8000:
@@ -86,7 +86,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
return -1;
}
} else if (RtpUtility::StringCompare(payloadName, "telephone-event", 15)) {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
// Don't add it to the list
// we dont want to allow send with a DTMF payloadtype
_dtmfPayloadType = payloadType;
@@ -104,7 +104,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(
}
bool RTPSenderAudio::MarkerBit(FrameType frameType, int8_t payload_type) {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
// for audio true for first packet in a speech burst
bool markerBit = false;
if (_lastPayloadType != payload_type) {
@@ -162,7 +162,7 @@ int32_t RTPSenderAudio::SendAudio(FrameType frameType,
int8_t dtmf_payload_type;
uint16_t packet_size_samples;
{
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
red_payload_type = _REDPayloadType;
audio_level_dbov = _audioLevel_dBov;
dtmf_payload_type = _dtmfPayloadType;
@@ -333,8 +333,9 @@ int32_t RTPSenderAudio::SendAudio(FrameType frameType,
memcpy(dataBuffer + rtpHeaderLength, payloadData, payloadSize);
}
}
+
{
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
_lastPayloadType = payloadType;
}
// Update audio level extension, if included.
@@ -348,10 +349,14 @@ int32_t RTPSenderAudio::SendAudio(FrameType frameType,
TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp, "timestamp",
_rtpSender->Timestamp(), "seqnum",
_rtpSender->SequenceNumber());
- return _rtpSender->SendToNetwork(dataBuffer, payloadSize, rtpHeaderLength,
- TickTime::MillisecondTimestamp(),
- kAllowRetransmission,
- RtpPacketSender::kHighPriority);
+ int32_t send_result = _rtpSender->SendToNetwork(
+ dataBuffer, payloadSize, rtpHeaderLength,
+ rtc::TimeMillis(), kAllowRetransmission,
+ RtpPacketSender::kHighPriority);
+ if (first_packet_sent_()) {
+ LOG(LS_INFO) << "First audio RTP packet sent to pacer";
+ }
+ return send_result;
}
// Audio level magnitude and voice activity flag are set for each RTP packet
@@ -359,7 +364,7 @@ int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dBov) {
if (level_dBov > 127) {
return -1;
}
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
_audioLevel_dBov = level_dBov;
return 0;
}
@@ -369,14 +374,14 @@ int32_t RTPSenderAudio::SetRED(int8_t payloadType) {
if (payloadType < -1) {
return -1;
}
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
_REDPayloadType = payloadType;
return 0;
}
// Get payload type for Redundant Audio Data RFC 2198
int32_t RTPSenderAudio::RED(int8_t* payloadType) const {
- CriticalSectionScoped cs(_sendAudioCritsect.get());
+ rtc::CritScope cs(&_sendAudioCritsect);
if (_REDPayloadType == -1) {
// not configured
return -1;
@@ -390,7 +395,7 @@ int32_t RTPSenderAudio::SendTelephoneEvent(uint8_t key,
uint16_t time_ms,
uint8_t level) {
{
- CriticalSectionScoped lock(_sendAudioCritsect.get());
+ rtc::CritScope lock(&_sendAudioCritsect);
if (_dtmfPayloadType < 0) {
// TelephoneEvent payloadtype not configured
return -1;
@@ -445,7 +450,7 @@ int32_t RTPSenderAudio::SendTelephoneEventPacket(bool ended,
"Audio::SendTelephoneEvent", "timestamp",
dtmfTimeStamp, "seqnum", _rtpSender->SequenceNumber());
retVal = _rtpSender->SendToNetwork(
- dtmfbuffer, 4, 12, TickTime::MillisecondTimestamp(),
+ dtmfbuffer, 4, 12, rtc::TimeMillis(),
kAllowRetransmission, RtpPacketSender::kHighPriority);
sendCount--;
} while (sendCount > 0 && retVal == 0);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
index 25c5e4dd88a..4bc0266b7d2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -12,6 +12,8 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
#include "webrtc/common_types.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/modules/rtp_rtcp/source/dtmf_queue.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
@@ -72,7 +74,7 @@ class RTPSenderAudio : public DTMFqueue {
Clock* const _clock;
RTPSender* const _rtpSender;
- rtc::scoped_ptr<CriticalSectionWrapper> _sendAudioCritsect;
+ rtc::CriticalSection _sendAudioCritsect;
uint16_t _packetSizeSamples GUARDED_BY(_sendAudioCritsect);
@@ -100,6 +102,7 @@ class RTPSenderAudio : public DTMFqueue {
// Audio level indication
// (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
uint8_t _audioLevel_dBov GUARDED_BY(_sendAudioCritsect);
+ OneTimeEvent first_packet_sent_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index b7238d26a22..d04ff4d200a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -8,17 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * This file includes unit tests for the RTPSender.
- */
-
#include <list>
+#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/buffer.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call/mock/mock_rtc_event_log.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
@@ -126,6 +122,11 @@ class MockTransportSequenceNumberAllocator
MOCK_METHOD0(AllocateSequenceNumber, uint16_t());
};
+class MockSendPacketObserver : public SendPacketObserver {
+ public:
+ MOCK_METHOD3(OnSendPacket, void(uint16_t, int64_t, uint32_t));
+};
+
class RtpSenderTest : public ::testing::Test {
protected:
RtpSenderTest()
@@ -141,10 +142,10 @@ class RtpSenderTest : public ::testing::Test {
void SetUp() override { SetUpRtpSender(true); }
void SetUpRtpSender(bool pacer) {
- rtp_sender_.reset(new RTPSender(false, &fake_clock_, &transport_,
- pacer ? &mock_paced_sender_ : nullptr,
- &seq_num_allocator_, nullptr, nullptr,
- nullptr, nullptr, &mock_rtc_event_log_));
+ rtp_sender_.reset(new RTPSender(
+ false, &fake_clock_, &transport_, pacer ? &mock_paced_sender_ : nullptr,
+ &seq_num_allocator_, nullptr, nullptr, nullptr, nullptr,
+ &mock_rtc_event_log_, &send_packet_observer_));
rtp_sender_->SetSequenceNumber(kSeqNum);
}
@@ -152,7 +153,8 @@ class RtpSenderTest : public ::testing::Test {
MockRtcEventLog mock_rtc_event_log_;
MockRtpPacketSender mock_paced_sender_;
MockTransportSequenceNumberAllocator seq_num_allocator_;
- rtc::scoped_ptr<RTPSender> rtp_sender_;
+ MockSendPacketObserver send_packet_observer_;
+ std::unique_ptr<RTPSender> rtp_sender_;
int payload_;
LoopbackTransportTest transport_;
const bool kMarkerBit;
@@ -189,6 +191,20 @@ class RtpSenderTest : public ::testing::Test {
packet_, payload_length, rtp_length, capture_time_ms,
kAllowRetransmission, RtpPacketSender::kNormalPriority));
}
+
+ void SendGenericPayload() {
+ const uint8_t kPayload[] = {47, 11, 32, 93, 89};
+ const uint32_t kTimestamp = 1234;
+ const uint8_t kPayloadType = 127;
+ const int64_t kCaptureTimeMs = fake_clock_.TimeInMilliseconds();
+ char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
+ EXPECT_EQ(0, rtp_sender_->RegisterPayload(payload_name, kPayloadType, 90000,
+ 0, 1500));
+
+ EXPECT_EQ(0, rtp_sender_->SendOutgoingData(
+ kVideoFrameKey, kPayloadType, kTimestamp, kCaptureTimeMs,
+ kPayload, sizeof(kPayload), nullptr));
+ }
};
// TODO(pbos): Move tests over from WithoutPacer to RtpSenderTest as this is our
@@ -206,7 +222,7 @@ class RtpSenderVideoTest : public RtpSenderTest {
rtp_sender_video_.reset(
new RTPSenderVideo(&fake_clock_, rtp_sender_.get()));
}
- rtc::scoped_ptr<RTPSenderVideo> rtp_sender_video_;
+ std::unique_ptr<RTPSenderVideo> rtp_sender_video_;
void VerifyCVOPacket(uint8_t* data,
size_t len,
@@ -483,21 +499,13 @@ TEST_F(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
kRtpExtensionTransportSequenceNumber,
kTransportSequenceNumberExtensionId));
- char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
- const uint8_t payload_type = 127;
- ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 90000,
- 0, 1500));
- // Create a dummy payload of 5 bytes.
- uint8_t payload[] = {47, 11, 32, 93, 89};
-
- const uint16_t kTransportSequenceNumber = 17;
EXPECT_CALL(seq_num_allocator_, AllocateSequenceNumber())
.WillOnce(testing::Return(kTransportSequenceNumber));
- const uint32_t kTimestamp = 1234;
- const int64_t kCaptureTimeMs = 4321;
- ASSERT_EQ(0, rtp_sender_->SendOutgoingData(
- kVideoFrameKey, payload_type, kTimestamp, kCaptureTimeMs,
- payload, sizeof(payload), nullptr));
+ EXPECT_CALL(send_packet_observer_,
+ OnSendPacket(kTransportSequenceNumber, _, _))
+ .Times(1);
+
+ SendGenericPayload();
RtpUtility::RtpHeaderParser rtp_parser(transport_.last_sent_packet_,
transport_.last_sent_packet_len_);
@@ -513,6 +521,19 @@ TEST_F(RtpSenderTestWithoutPacer, SendsPacketsWithTransportSequenceNumber) {
rtp_header.extension.transportSequenceNumber);
}
+TEST_F(RtpSenderTestWithoutPacer, OnSendPacketUpdated) {
+ EXPECT_CALL(mock_rtc_event_log_, // Ignore rtc event calls.
+ LogRtpHeader(PacketDirection::kOutgoingPacket, _, _, _));
+
+ EXPECT_CALL(seq_num_allocator_, AllocateSequenceNumber())
+ .WillOnce(testing::Return(kTransportSequenceNumber));
+ EXPECT_CALL(send_packet_observer_,
+ OnSendPacket(kTransportSequenceNumber, _, _))
+ .Times(1);
+
+ SendGenericPayload();
+}
+
// Test CVO header extension is only set when marker bit is true.
TEST_F(RtpSenderTestWithoutPacer, BuildRTPPacketWithVideoRotation_MarkerBit) {
rtp_sender_->SetVideoRotation(kRotation);
@@ -853,7 +874,7 @@ TEST_F(RtpSenderTest, SendPadding) {
rtp_header_len += 4; // 4 extra bytes common to all extension headers.
// Create and set up parser.
- rtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(
+ std::unique_ptr<webrtc::RtpHeaderParser> rtp_parser(
webrtc::RtpHeaderParser::Create());
ASSERT_TRUE(rtp_parser.get() != nullptr);
rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
@@ -951,11 +972,66 @@ TEST_F(RtpSenderTest, SendPadding) {
EXPECT_EQ(expected_send_time, rtp_header.extension.absoluteSendTime);
}
+TEST_F(RtpSenderTest, OnSendPacketUpdated) {
+ EXPECT_CALL(mock_rtc_event_log_, // Ignore rtc event calls.
+ LogRtpHeader(PacketDirection::kOutgoingPacket, _, _, _));
+ rtp_sender_->SetStorePacketsStatus(true, 10);
+
+ EXPECT_CALL(send_packet_observer_,
+ OnSendPacket(kTransportSequenceNumber, _, _))
+ .Times(1);
+ EXPECT_CALL(seq_num_allocator_, AllocateSequenceNumber())
+ .WillOnce(testing::Return(kTransportSequenceNumber));
+ EXPECT_CALL(mock_paced_sender_, InsertPacket(_, _, _, _, _, _)).Times(1);
+
+ SendGenericPayload(); // Packet passed to pacer.
+ const bool kIsRetransmit = false;
+ rtp_sender_->TimeToSendPacket(kSeqNum, fake_clock_.TimeInMilliseconds(),
+ kIsRetransmit);
+ EXPECT_EQ(1, transport_.packets_sent_);
+}
+
+TEST_F(RtpSenderTest, OnSendPacketNotUpdatedForRetransmits) {
+ EXPECT_CALL(mock_rtc_event_log_, // Ignore rtc event calls.
+ LogRtpHeader(PacketDirection::kOutgoingPacket, _, _, _));
+ rtp_sender_->SetStorePacketsStatus(true, 10);
+
+ EXPECT_CALL(send_packet_observer_, OnSendPacket(_, _, _)).Times(0);
+ EXPECT_CALL(seq_num_allocator_, AllocateSequenceNumber())
+ .WillOnce(testing::Return(kTransportSequenceNumber));
+ EXPECT_CALL(mock_paced_sender_, InsertPacket(_, _, _, _, _, _)).Times(1);
+
+ SendGenericPayload(); // Packet passed to pacer.
+ const bool kIsRetransmit = true;
+ rtp_sender_->TimeToSendPacket(kSeqNum, fake_clock_.TimeInMilliseconds(),
+ kIsRetransmit);
+ EXPECT_EQ(1, transport_.packets_sent_);
+}
+
+TEST_F(RtpSenderTest, OnSendPacketNotUpdatedWithoutSeqNumAllocator) {
+ rtp_sender_.reset(new RTPSender(
+ false, &fake_clock_, &transport_, &mock_paced_sender_,
+ nullptr /* TransportSequenceNumberAllocator */, nullptr, nullptr, nullptr,
+ nullptr, nullptr, &send_packet_observer_));
+ rtp_sender_->SetSequenceNumber(kSeqNum);
+ rtp_sender_->SetStorePacketsStatus(true, 10);
+
+ EXPECT_CALL(send_packet_observer_, OnSendPacket(_, _, _)).Times(0);
+ EXPECT_CALL(mock_paced_sender_, InsertPacket(_, _, _, _, _, _)).Times(1);
+
+ SendGenericPayload(); // Packet passed to pacer.
+ const bool kIsRetransmit = false;
+ rtp_sender_->TimeToSendPacket(kSeqNum, fake_clock_.TimeInMilliseconds(),
+ kIsRetransmit);
+ EXPECT_EQ(1, transport_.packets_sent_);
+}
+
TEST_F(RtpSenderTest, SendRedundantPayloads) {
MockTransport transport;
rtp_sender_.reset(new RTPSender(
- false, &fake_clock_, &transport, &mock_paced_sender_, nullptr,
- nullptr, nullptr, nullptr, nullptr, &mock_rtc_event_log_));
+ false, &fake_clock_, &transport, &mock_paced_sender_, nullptr, nullptr,
+ nullptr, nullptr, nullptr, &mock_rtc_event_log_, nullptr));
+
rtp_sender_->SetSequenceNumber(kSeqNum);
rtp_sender_->SetRtxPayloadType(kRtxPayload, kPayload);
@@ -972,7 +1048,7 @@ TEST_F(RtpSenderTest, SendRedundantPayloads) {
rtp_sender_->SetRtxSsrc(1234);
// Create and set up parser.
- rtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(
+ std::unique_ptr<webrtc::RtpHeaderParser> rtp_parser(
webrtc::RtpHeaderParser::Create());
ASSERT_TRUE(rtp_parser.get() != nullptr);
rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionTransmissionTimeOffset,
@@ -1096,9 +1172,9 @@ TEST_F(RtpSenderTest, FrameCountCallbacks) {
FrameCounts frame_counts_;
} callback;
- rtp_sender_.reset(new RTPSender(false, &fake_clock_, &transport_,
- &mock_paced_sender_, nullptr, nullptr,
- nullptr, &callback, nullptr, nullptr));
+ rtp_sender_.reset(new RTPSender(
+ false, &fake_clock_, &transport_, &mock_paced_sender_, nullptr, nullptr,
+ nullptr, &callback, nullptr, nullptr, nullptr));
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
const uint8_t payload_type = 127;
@@ -1152,8 +1228,8 @@ TEST_F(RtpSenderTest, BitrateCallbacks) {
BitrateStatistics total_stats_;
BitrateStatistics retransmit_stats_;
} callback;
- rtp_sender_.reset(new RTPSender(false, &fake_clock_, &transport_,
- nullptr, nullptr, nullptr, &callback, nullptr,
+ rtp_sender_.reset(new RTPSender(false, &fake_clock_, &transport_, nullptr,
+ nullptr, nullptr, &callback, nullptr, nullptr,
nullptr, nullptr));
// Simulate kNumPackets sent with kPacketInterval ms intervals.
@@ -1205,7 +1281,7 @@ class RtpSenderAudioTest : public RtpSenderTest {
void SetUp() override {
payload_ = kAudioPayload;
- rtp_sender_.reset(new RTPSender(true, &fake_clock_, &transport_,
+ rtp_sender_.reset(new RTPSender(true, &fake_clock_, &transport_, nullptr,
nullptr, nullptr, nullptr, nullptr, nullptr,
nullptr, nullptr));
rtp_sender_->SetSequenceNumber(kSeqNum);
@@ -1297,7 +1373,6 @@ TEST_F(RtpSenderTestWithoutPacer, StreamDataCountersCallbacks) {
fec_params.fec_mask_type = kFecMaskRandom;
fec_params.fec_rate = 1;
fec_params.max_fec_frames = 1;
- fec_params.use_uep_protection = false;
rtp_sender_->SetFecParameters(&fec_params, &fec_params);
ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameDelta, payload_type,
1234, 4321, payload,
@@ -1407,7 +1482,7 @@ TEST_F(RtpSenderAudioTest, CheckMarkerBitForTelephoneEvents) {
ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kEmptyFrame, payload_type,
capture_time_ms + 2000, 0, nullptr,
0, nullptr));
- rtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(
+ std::unique_ptr<webrtc::RtpHeaderParser> rtp_parser(
webrtc::RtpHeaderParser::Create());
ASSERT_TRUE(rtp_parser.get() != nullptr);
webrtc::RTPHeader rtp_header;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index 32ba26f54b8..e10b5b2edab 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -13,6 +13,7 @@
#include <stdlib.h>
#include <string.h>
+#include <memory>
#include <vector>
#include "webrtc/base/checks.h"
@@ -24,21 +25,19 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
enum { REDForFECHeaderLength = 1 };
RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
: _rtpSender(*rtpSender),
- crit_(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpVideoGeneric),
_retransmissionSettings(kRetransmitBaseLayer),
// Generic FEC
fec_(),
fec_enabled_(false),
- red_payload_type_(-1),
- fec_payload_type_(-1),
+ red_payload_type_(0),
+ fec_payload_type_(0),
delta_fec_params_(),
key_fec_params_(),
producer_fec_(&fec_),
@@ -113,13 +112,13 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
int64_t capture_time_ms,
StorageType media_packet_storage,
bool protect) {
- rtc::scoped_ptr<RedPacket> red_packet;
+ std::unique_ptr<RedPacket> red_packet;
std::vector<RedPacket*> fec_packets;
StorageType fec_storage = kDontRetransmit;
uint16_t next_fec_sequence_number = 0;
{
// Only protect while creating RED and FEC packets, not when sending.
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
red_packet.reset(producer_fec_.BuildRedPacket(
data_buffer, payload_length, rtp_header_length, red_payload_type_));
if (protect) {
@@ -170,7 +169,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
void RTPSenderVideo::SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC) {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
fec_enabled_ = enable;
red_payload_type_ = payloadTypeRED;
fec_payload_type_ = payloadTypeFEC;
@@ -184,33 +183,38 @@ void RTPSenderVideo::SetGenericFECStatus(const bool enable,
void RTPSenderVideo::GenericFECStatus(bool* enable,
uint8_t* payloadTypeRED,
uint8_t* payloadTypeFEC) const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
*enable = fec_enabled_;
*payloadTypeRED = red_payload_type_;
*payloadTypeFEC = fec_payload_type_;
}
size_t RTPSenderVideo::FECPacketOverhead() const {
- CriticalSectionScoped cs(crit_.get());
- if (fec_enabled_) {
+ rtc::CritScope cs(&crit_);
+ size_t overhead = 0;
+ if (red_payload_type_ != 0) {
// Overhead is FEC headers plus RED for FEC header plus anything in RTP
// header beyond the 12 bytes base header (CSRC list, extensions...)
// This reason for the header extensions to be included here is that
// from an FEC viewpoint, they are part of the payload to be protected.
// (The base RTP header is already protected by the FEC header.)
- return ForwardErrorCorrection::PacketOverhead() + REDForFECHeaderLength +
- (_rtpSender.RTPHeaderLength() - kRtpHeaderSize);
+ overhead = REDForFECHeaderLength + (_rtpSender.RTPHeaderLength() -
+ kRtpHeaderSize);
}
- return 0;
+ if (fec_enabled_)
+ overhead += ForwardErrorCorrection::PacketOverhead();
+ return overhead;
}
void RTPSenderVideo::SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params) {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
RTC_DCHECK(delta_params);
RTC_DCHECK(key_params);
- delta_fec_params_ = *delta_params;
- key_fec_params_ = *key_params;
+ if (fec_enabled_) {
+ delta_fec_params_ = *delta_params;
+ key_fec_params_ = *key_params;
+ }
}
int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
@@ -226,19 +230,20 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
return -1;
}
- rtc::scoped_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
+ std::unique_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
videoType, _rtpSender.MaxDataPayloadLength(),
video_header ? &(video_header->codecHeader) : nullptr, frameType));
StorageType storage;
- bool fec_enabled;
+ int red_payload_type;
+ bool first_frame = first_frame_sent_();
{
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
FecProtectionParams* fec_params =
frameType == kVideoFrameKey ? &key_fec_params_ : &delta_fec_params_;
producer_fec_.SetFecParameters(fec_params, 0);
storage = packetizer->GetStorageType(_retransmissionSettings);
- fec_enabled = fec_enabled_;
+ red_payload_type = red_payload_type_;
}
// Register CVO rtp header extension at the first time when we receive a frame
@@ -260,6 +265,7 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
packetizer->SetPayloadData(data, payload_bytes_to_send, frag);
+ bool first = true;
bool last = false;
while (!last) {
uint8_t dataBuffer[IP_PACKET_SIZE] = {0};
@@ -268,6 +274,7 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
&payload_bytes_in_packet, &last)) {
return -1;
}
+
// Write RTP header.
// Set marker bit true if this is the last packet in frame.
_rtpSender.BuildRTPheader(
@@ -299,7 +306,7 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
_rtpSender.UpdateVideoRotation(dataBuffer, packetSize, rtp_header,
video_header->rotation);
}
- if (fec_enabled) {
+ if (red_payload_type != 0) {
SendVideoPacketAsRed(dataBuffer, payload_bytes_in_packet,
rtp_header_length, _rtpSender.SequenceNumber(),
captureTimeStamp, capture_time_ms, storage,
@@ -309,6 +316,18 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
_rtpSender.SequenceNumber(), captureTimeStamp,
capture_time_ms, storage);
}
+
+ if (first_frame) {
+ if (first) {
+ LOG(LS_INFO)
+ << "Sent first RTP packet of the first video frame (pre-pacer)";
+ }
+ if (last) {
+ LOG(LS_INFO)
+ << "Sent last RTP packet of the first video frame (pre-pacer)";
+ }
+ }
+ first = false;
}
TRACE_EVENT_ASYNC_END1(
@@ -330,12 +349,12 @@ uint32_t RTPSenderVideo::FecOverheadRate() const {
}
int RTPSenderVideo::SelectiveRetransmissions() const {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
return _retransmissionSettings;
}
void RTPSenderVideo::SetSelectiveRetransmissions(uint8_t settings) {
- CriticalSectionScoped cs(crit_.get());
+ rtc::CritScope cs(&crit_);
_retransmissionSettings = settings;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
index dc1088a3f7c..8307b83864d 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -13,7 +13,8 @@
#include <list>
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@@ -27,7 +28,6 @@
#include "webrtc/typedefs.h"
namespace webrtc {
-class CriticalSectionWrapper;
class RTPSenderVideo {
public:
@@ -97,7 +97,7 @@ class RTPSenderVideo {
RTPSenderInterface& _rtpSender;
// Should never be held when calling out of this class.
- const rtc::scoped_ptr<CriticalSectionWrapper> crit_;
+ const rtc::CriticalSection crit_;
RtpVideoCodecTypes _videoType;
int32_t _retransmissionSettings GUARDED_BY(crit_);
@@ -116,6 +116,7 @@ class RTPSenderVideo {
Bitrate _fecOverheadRate;
// Bitrate used for video payload and RTP headers
Bitrate _videoBitrate;
+ OneTimeEvent first_frame_sent_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
index bdae3c4806f..439cd01a9aa 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -319,6 +319,13 @@ void RtpHeaderParser::ParseOneByteExtensionHeader(
return;
}
+ if (ptrRTPDataExtensionEnd - ptr < (len + 1)) {
+ LOG(LS_WARNING) << "Incorrect one-byte extension len: " << (len + 1)
+ << ", bytes left in buffer: "
+ << (ptrRTPDataExtensionEnd - ptr);
+ return;
+ }
+
RTPExtensionType type;
if (ptrExtensionMap->GetType(id, &type) != 0) {
// If we encounter an unknown extension, just skip over it.
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
index 1a242853952..474bc6e04e0 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
@@ -52,11 +52,6 @@ class RtpHeaderParser {
bool ParseRtcp(RTPHeader* header) const;
bool Parse(RTPHeader* parsedPacket,
RtpHeaderExtensionMap* ptrExtensionMap = nullptr) const;
- RTC_DEPRECATED bool Parse(
- RTPHeader& parsedPacket, // NOLINT(runtime/references)
- RtpHeaderExtensionMap* ptrExtensionMap = nullptr) const {
- return Parse(&parsedPacket, ptrExtensionMap);
- }
private:
void ParseOneByteExtensionHeader(RTPHeader* parsedPacket,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
index f1d1549e279..a96d05db468 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
@@ -9,9 +9,8 @@
*/
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
-
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/checks.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -45,7 +44,7 @@ void SSRCDatabase::ReturnSSRC(uint32_t ssrc) {
ssrcs_.erase(ssrc);
}
-SSRCDatabase::SSRCDatabase() : random_(TickTime::Now().Ticks()) {}
+SSRCDatabase::SSRCDatabase() : random_(rtc::TimeMicros()) {}
SSRCDatabase::~SSRCDatabase() {}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
index da43204b093..43d3a82ab28 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.cc
@@ -67,10 +67,8 @@ void TMMBRSet::ClearEntry(uint32_t idx) {
}
TMMBRHelp::TMMBRHelp()
- : _criticalSection(CriticalSectionWrapper::CreateCriticalSection()),
- _candidateSet(),
+ : _candidateSet(),
_boundingSet(),
- _boundingSetToSend(),
_ptrIntersectionBoundingSet(NULL),
_ptrMaxPRBoundingSet(NULL) {
}
@@ -80,13 +78,12 @@ TMMBRHelp::~TMMBRHelp() {
delete [] _ptrMaxPRBoundingSet;
_ptrIntersectionBoundingSet = 0;
_ptrMaxPRBoundingSet = 0;
- delete _criticalSection;
}
TMMBRSet*
TMMBRHelp::VerifyAndAllocateBoundingSet(uint32_t minimumSize)
{
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
if(minimumSize > _boundingSet.capacity())
{
@@ -107,43 +104,10 @@ TMMBRSet* TMMBRHelp::BoundingSet() {
return &_boundingSet;
}
-int32_t
-TMMBRHelp::SetTMMBRBoundingSetToSend(const TMMBRSet* boundingSetToSend)
-{
- CriticalSectionScoped lock(_criticalSection);
-
- if (boundingSetToSend == NULL)
- {
- _boundingSetToSend.clearSet();
- return 0;
- }
-
- VerifyAndAllocateBoundingSetToSend(boundingSetToSend->lengthOfSet());
- _boundingSetToSend.clearSet();
- for (uint32_t i = 0; i < boundingSetToSend->lengthOfSet(); i++)
- {
- // cap at our configured max bitrate
- uint32_t bitrate = boundingSetToSend->Tmmbr(i);
- _boundingSetToSend.SetEntry(i, bitrate,
- boundingSetToSend->PacketOH(i),
- boundingSetToSend->Ssrc(i));
- }
- return 0;
-}
-
-int32_t
-TMMBRHelp::VerifyAndAllocateBoundingSetToSend(uint32_t minimumSize)
-{
- CriticalSectionScoped lock(_criticalSection);
-
- _boundingSetToSend.VerifyAndAllocateSet(minimumSize);
- return 0;
-}
-
TMMBRSet*
TMMBRHelp::VerifyAndAllocateCandidateSet(uint32_t minimumSize)
{
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
_candidateSet.VerifyAndAllocateSet(minimumSize);
return &_candidateSet;
@@ -155,16 +119,10 @@ TMMBRHelp::CandidateSet()
return &_candidateSet;
}
-TMMBRSet*
-TMMBRHelp::BoundingSetToSend()
-{
- return &_boundingSetToSend;
-}
-
int32_t
TMMBRHelp::FindTMMBRBoundingSet(TMMBRSet*& boundingSet)
{
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
// Work on local variable, will be modified
TMMBRSet candidateSet;
@@ -207,7 +165,7 @@ TMMBRHelp::FindTMMBRBoundingSet(TMMBRSet*& boundingSet)
int32_t
TMMBRHelp::FindTMMBRBoundingSet(int32_t numCandidates, TMMBRSet& candidateSet)
{
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
uint32_t numBoundingSet = 0;
VerifyAndAllocateBoundingSet(candidateSet.capacity());
@@ -412,7 +370,7 @@ TMMBRHelp::FindTMMBRBoundingSet(int32_t numCandidates, TMMBRSet& candidateSet)
bool TMMBRHelp::IsOwner(const uint32_t ssrc,
const uint32_t length) const {
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
if (length == 0) {
// Empty bounding set.
@@ -428,7 +386,7 @@ bool TMMBRHelp::IsOwner(const uint32_t ssrc,
}
bool TMMBRHelp::CalcMinBitRate( uint32_t* minBitrateKbit) const {
- CriticalSectionScoped lock(_criticalSection);
+ rtc::CritScope lock(&_criticalSection);
if (_candidateSet.size() == 0) {
// Empty bounding set.
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h
index 6236d5d43bd..ffafb1409b8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/tmmbr_help.h
@@ -12,8 +12,8 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_TMMBR_HELP_H_
#include <vector>
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -63,11 +63,9 @@ public:
TMMBRSet* BoundingSet(); // used for debuging
TMMBRSet* CandidateSet();
- TMMBRSet* BoundingSetToSend();
TMMBRSet* VerifyAndAllocateCandidateSet(const uint32_t minimumSize);
int32_t FindTMMBRBoundingSet(TMMBRSet*& boundingSet);
- int32_t SetTMMBRBoundingSetToSend(const TMMBRSet* boundingSetToSend);
bool IsOwner(const uint32_t ssrc, const uint32_t length) const;
@@ -75,15 +73,12 @@ public:
protected:
TMMBRSet* VerifyAndAllocateBoundingSet(uint32_t minimumSize);
- int32_t VerifyAndAllocateBoundingSetToSend(uint32_t minimumSize);
-
int32_t FindTMMBRBoundingSet(int32_t numCandidates, TMMBRSet& candidateSet);
private:
- CriticalSectionWrapper* _criticalSection;
+ rtc::CriticalSection _criticalSection;
TMMBRSet _candidateSet;
TMMBRSet _boundingSet;
- TMMBRSet _boundingSetToSend;
float* _ptrIntersectionBoundingSet;
float* _ptrMaxPRBoundingSet;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
index 67e8a65c4df..89c9cbebeba 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.cc
@@ -11,6 +11,7 @@
#include "webrtc/modules/rtp_rtcp/test/testAPI/test_api.h"
#include <algorithm>
+#include <memory>
#include <vector>
#include "webrtc/test/null_transport.h"
@@ -41,7 +42,7 @@ bool LoopBackTransport::SendRtp(const uint8_t* data,
}
}
RTPHeader header;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
if (!parser->Parse(static_cast<const uint8_t*>(data), len, &header)) {
return false;
}
@@ -100,9 +101,9 @@ class RtpRtcpAPITest : public ::testing::Test {
&fake_clock_, NULL, NULL, rtp_payload_registry_.get()));
}
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
- rtc::scoped_ptr<RtpRtcp> module_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry_;
+ std::unique_ptr<RtpReceiver> rtp_receiver_;
+ std::unique_ptr<RtpRtcp> module_;
uint32_t test_ssrc_;
uint32_t test_timestamp_;
uint16_t test_sequence_number_;
@@ -151,10 +152,6 @@ TEST_F(RtpRtcpAPITest, RTCP) {
EXPECT_TRUE(module_->TMMBR());
module_->SetTMMBRStatus(false);
EXPECT_FALSE(module_->TMMBR());
-
- EXPECT_EQ(kNackOff, rtp_receiver_->NACK());
- rtp_receiver_->SetNACKStatus(kNackRtcp);
- EXPECT_EQ(kNackRtcp, rtp_receiver_->NACK());
}
TEST_F(RtpRtcpAPITest, RtxSender) {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
index d8040f79027..44de00a55f9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api.h
@@ -11,7 +11,6 @@
#define WEBRTC_MODULES_RTP_RTCP_TEST_TESTAPI_TEST_API_H_
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
index 9b44c4f40db..8069b0950b6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
@@ -9,6 +9,7 @@
*/
#include <algorithm>
+#include <memory>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
@@ -135,12 +136,12 @@ class RtpRtcpAudioTest : public ::testing::Test {
RtpRtcp* module1;
RtpRtcp* module2;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics1_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics2_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver1_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver2_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics1_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics2_;
+ std::unique_ptr<RtpReceiver> rtp_receiver1_;
+ std::unique_ptr<RtpReceiver> rtp_receiver2_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
VerifyingAudioReceiver* data_receiver1;
VerifyingAudioReceiver* data_receiver2;
LoopBackTransport* transport1;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
index d4b36412736..c1359df8646 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
@@ -9,6 +9,7 @@
*/
#include <algorithm>
+#include <memory>
#include <vector>
#include "testing/gmock/include/gmock/gmock.h"
@@ -175,14 +176,14 @@ class RtpRtcpRtcpTest : public ::testing::Test {
delete receiver;
}
- rtc::scoped_ptr<TestRtpFeedback> rtp_feedback1_;
- rtc::scoped_ptr<TestRtpFeedback> rtp_feedback2_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics1_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics2_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
- rtc::scoped_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver1_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver2_;
+ std::unique_ptr<TestRtpFeedback> rtp_feedback1_;
+ std::unique_ptr<TestRtpFeedback> rtp_feedback2_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics1_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics2_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry1_;
+ std::unique_ptr<RTPPayloadRegistry> rtp_payload_registry2_;
+ std::unique_ptr<RtpReceiver> rtp_receiver1_;
+ std::unique_ptr<RtpReceiver> rtp_receiver2_;
RtpRtcp* module1;
RtpRtcp* module2;
TestRtpReceiver* receiver;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
index 16ea540bd58..d84ff37be7a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testAPI/test_api_video.cc
@@ -11,6 +11,7 @@
#include <stdlib.h>
#include <algorithm>
+#include <memory>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
@@ -55,7 +56,6 @@ class RtpRtcpVideoTest : public ::testing::Test {
video_module_->SetRTCPStatus(RtcpMode::kCompound);
video_module_->SetSSRC(test_ssrc_);
- rtp_receiver_->SetNACKStatus(kNackRtcp);
video_module_->SetStorePacketsStatus(true, 600);
EXPECT_EQ(0, video_module_->SetSendingStatus(true));
@@ -127,9 +127,9 @@ class RtpRtcpVideoTest : public ::testing::Test {
}
int test_id_;
- rtc::scoped_ptr<ReceiveStatistics> receive_statistics_;
+ std::unique_ptr<ReceiveStatistics> receive_statistics_;
RTPPayloadRegistry rtp_payload_registry_;
- rtc::scoped_ptr<RtpReceiver> rtp_receiver_;
+ std::unique_ptr<RtpReceiver> rtp_receiver_;
RtpRtcp* video_module_;
LoopBackTransport* transport_;
TestRtpReceiver* receiver_;
@@ -170,7 +170,7 @@ TEST_F(RtpRtcpVideoTest, PaddingOnlyFrames) {
kPadSize);
++seq_num;
RTPHeader header;
- rtc::scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ std::unique_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
EXPECT_TRUE(parser->Parse(padding_packet, packet_size, &header));
PayloadUnion payload_specific;
EXPECT_TRUE(rtp_payload_registry_.GetPayloadSpecifics(header.payloadType,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
index 466214c740e..b7c4ef5506b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc
@@ -45,8 +45,9 @@
#include <math.h>
+#include <memory>
+
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h"
#include "webrtc/modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -191,7 +192,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
int RecoveredMediaPackets(int num_media_packets,
int num_fec_packets,
uint8_t* state) {
- rtc::scoped_ptr<uint8_t[]> state_tmp(
+ std::unique_ptr<uint8_t[]> state_tmp(
new uint8_t[num_media_packets + num_fec_packets]);
memcpy(state_tmp.get(), state, num_media_packets + num_fec_packets);
int num_recovered_packets = 0;
@@ -385,7 +386,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
// (which containes the code size parameters/protection length).
void ComputeMetricsForCode(CodeType code_type,
int code_index) {
- rtc::scoped_ptr<double[]> prob_weight(new double[kNumLossModels]);
+ std::unique_ptr<double[]> prob_weight(new double[kNumLossModels]);
memset(prob_weight.get() , 0, sizeof(double) * kNumLossModels);
MetricsFecCode metrics_code;
SetMetricsZero(&metrics_code);
@@ -393,7 +394,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test {
int num_media_packets = code_params_[code_index].num_media_packets;
int num_fec_packets = code_params_[code_index].num_fec_packets;
int tot_num_packets = num_media_packets + num_fec_packets;
- rtc::scoped_ptr<uint8_t[]> state(new uint8_t[tot_num_packets]);
+ std::unique_ptr<uint8_t[]> state(new uint8_t[tot_num_packets]);
memset(state.get() , 0, tot_num_packets);
int num_loss_configurations = static_cast<int>(pow(2.0f, tot_num_packets));
diff --git a/chromium/third_party/webrtc/modules/utility/OWNERS b/chromium/third_party/webrtc/modules/utility/OWNERS
index 65cb70c9b90..9456ae08c20 100644
--- a/chromium/third_party/webrtc/modules/utility/OWNERS
+++ b/chromium/third_party/webrtc/modules/utility/OWNERS
@@ -1,4 +1,3 @@
-asapersson@webrtc.org
perkj@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
diff --git a/chromium/third_party/webrtc/modules/utility/include/file_recorder.h b/chromium/third_party/webrtc/modules/utility/include/file_recorder.h
index 09ed8ae3507..480a4a97996 100644
--- a/chromium/third_party/webrtc/modules/utility/include/file_recorder.h
+++ b/chromium/third_party/webrtc/modules/utility/include/file_recorder.h
@@ -15,7 +15,6 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/media_file/media_file_defines.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_frame.h"
@@ -58,8 +57,7 @@ public:
// Write frame to file. Frame should contain 10ms of un-ecoded audio data.
virtual int32_t RecordAudioToFile(
- const AudioFrame& frame,
- const TickTime* playoutTS = NULL) = 0;
+ const AudioFrame& frame) = 0;
// Open/create the file specified by fileName for writing audio/video data
// (relative path is allowed). audioCodecInst specifies the encoding of the
diff --git a/chromium/third_party/webrtc/modules/utility/include/jvm_android.h b/chromium/third_party/webrtc/modules/utility/include/jvm_android.h
index f527dff6322..574c977cd04 100644
--- a/chromium/third_party/webrtc/modules/utility/include/jvm_android.h
+++ b/chromium/third_party/webrtc/modules/utility/include/jvm_android.h
@@ -12,9 +12,10 @@
#define WEBRTC_MODULES_UTILITY_INCLUDE_JVM_ANDROID_H_
#include <jni.h>
+
+#include <memory>
#include <string>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/utility/include/helpers_android.h"
@@ -76,7 +77,7 @@ class NativeRegistration : public JavaClass {
NativeRegistration(JNIEnv* jni, jclass clazz);
~NativeRegistration();
- rtc::scoped_ptr<GlobalRef> NewObject(
+ std::unique_ptr<GlobalRef> NewObject(
const char* name, const char* signature, ...);
private:
@@ -96,7 +97,7 @@ class JNIEnvironment {
// Note that the class name must be one of the names in the static
// |loaded_classes| array defined in jvm_android.cc.
// This method must be called on the construction thread.
- rtc::scoped_ptr<NativeRegistration> RegisterNatives(
+ std::unique_ptr<NativeRegistration> RegisterNatives(
const char* name, const JNINativeMethod *methods, int num_methods);
// Converts from Java string to std::string.
@@ -120,9 +121,9 @@ class JNIEnvironment {
// webrtc::JVM::Initialize(jvm, context);
//
// // Header (.h) file of example class called User.
-// rtc::scoped_ptr<JNIEnvironment> env;
-// rtc::scoped_ptr<NativeRegistration> reg;
-// rtc::scoped_ptr<GlobalRef> obj;
+// std::unique_ptr<JNIEnvironment> env;
+// std::unique_ptr<NativeRegistration> reg;
+// std::unique_ptr<GlobalRef> obj;
//
// // Construction (in .cc file) of User class.
// User::User() {
@@ -156,7 +157,7 @@ class JVM {
// Creates a JNIEnvironment object.
// This method returns a NULL pointer if AttachCurrentThread() has not been
// called successfully. Use the AttachCurrentThreadIfNeeded class if needed.
- rtc::scoped_ptr<JNIEnvironment> environment();
+ std::unique_ptr<JNIEnvironment> environment();
// Returns a JavaClass object given class |name|.
// Note that the class name must be one of the names in the static
diff --git a/chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h b/chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h
index 9560e408e87..621fcee8182 100644
--- a/chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h
+++ b/chromium/third_party/webrtc/modules/utility/include/mock/mock_process_thread.h
@@ -21,6 +21,10 @@ namespace webrtc {
class MockProcessThread : public ProcessThread {
public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
MOCK_METHOD0(Start, void());
MOCK_METHOD0(Stop, void());
MOCK_METHOD1(WakeUp, void(Module* module));
@@ -31,7 +35,7 @@ class MockProcessThread : public ProcessThread {
// MOCK_METHOD1 gets confused with mocking this method, so we work around it
// by overriding the method from the interface and forwarding the call to a
// mocked, simpler method.
- void PostTask(rtc::scoped_ptr<ProcessTask> task) override {
+ void PostTask(std::unique_ptr<ProcessTask> task) /* override */ {
PostTask(task.get());
}
};
diff --git a/chromium/third_party/webrtc/modules/utility/include/process_thread.h b/chromium/third_party/webrtc/modules/utility/include/process_thread.h
index 285a5ea5876..f6913ea3167 100644
--- a/chromium/third_party/webrtc/modules/utility/include/process_thread.h
+++ b/chromium/third_party/webrtc/modules/utility/include/process_thread.h
@@ -11,8 +11,9 @@
#ifndef WEBRTC_MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_
#define WEBRTC_MODULES_UTILITY_INCLUDE_PROCESS_THREAD_H_
+#include <memory>
+
#include "webrtc/typedefs.h"
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
class Module;
@@ -29,7 +30,7 @@ class ProcessThread {
public:
virtual ~ProcessThread();
- static rtc::scoped_ptr<ProcessThread> Create(const char* thread_name);
+ static std::unique_ptr<ProcessThread> Create(const char* thread_name);
// Starts the worker thread. Must be called from the construction thread.
virtual void Start() = 0;
@@ -50,7 +51,7 @@ class ProcessThread {
// construction thread of the ProcessThread instance, if the task did not
// get a chance to run (e.g. posting the task while shutting down or when
// the thread never runs).
- virtual void PostTask(rtc::scoped_ptr<ProcessTask> task) = 0;
+ virtual void PostTask(std::unique_ptr<ProcessTask> task) = 0;
// Adds a module that will start to receive callbacks on the worker thread.
// Can be called from any thread.
diff --git a/chromium/third_party/webrtc/modules/utility/source/coder.cc b/chromium/third_party/webrtc/modules/utility/source/coder.cc
index 1476e02d9c5..3c065e7c2be 100644
--- a/chromium/third_party/webrtc/modules/utility/source/coder.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/coder.cc
@@ -13,96 +13,101 @@
#include "webrtc/modules/utility/source/coder.h"
namespace webrtc {
-AudioCoder::AudioCoder(uint32_t instanceID)
- : _acm(AudioCodingModule::Create(instanceID)),
- _receiveCodec(),
- _encodeTimestamp(0),
- _encodedData(NULL),
- _encodedLengthInBytes(0),
- _decodeTimestamp(0)
-{
- _acm->InitializeReceiver();
- _acm->RegisterTransportCallback(this);
+namespace {
+AudioCodingModule::Config GetAcmConfig(uint32_t id) {
+ AudioCodingModule::Config config;
+ // This class does not handle muted output.
+ config.neteq_config.enable_muted_state = false;
+ config.id = id;
+ return config;
}
+} // namespace
-AudioCoder::~AudioCoder()
-{
+AudioCoder::AudioCoder(uint32_t instance_id)
+ : acm_(AudioCodingModule::Create(GetAcmConfig(instance_id))),
+ receive_codec_(),
+ encode_timestamp_(0),
+ encoded_data_(nullptr),
+ encoded_length_in_bytes_(0),
+ decode_timestamp_(0) {
+ acm_->InitializeReceiver();
+ acm_->RegisterTransportCallback(this);
}
-int32_t AudioCoder::SetEncodeCodec(const CodecInst& codecInst) {
- const bool success = codec_manager_.RegisterEncoder(codecInst) &&
- codec_manager_.MakeEncoder(&rent_a_codec_, _acm.get());
+AudioCoder::~AudioCoder() {}
+
+int32_t AudioCoder::SetEncodeCodec(const CodecInst& codec_inst) {
+ const bool success = codec_manager_.RegisterEncoder(codec_inst) &&
+ codec_manager_.MakeEncoder(&rent_a_codec_, acm_.get());
return success ? 0 : -1;
}
-int32_t AudioCoder::SetDecodeCodec(const CodecInst& codecInst) {
- if (_acm->RegisterReceiveCodec(
- codecInst, [&] { return rent_a_codec_.RentIsacDecoder(); }) == -1) {
+int32_t AudioCoder::SetDecodeCodec(const CodecInst& codec_inst) {
+ if (acm_->RegisterReceiveCodec(
+ codec_inst, [&] { return rent_a_codec_.RentIsacDecoder(); }) == -1) {
return -1;
}
- memcpy(&_receiveCodec, &codecInst, sizeof(CodecInst));
+ memcpy(&receive_codec_, &codec_inst, sizeof(CodecInst));
return 0;
}
-int32_t AudioCoder::Decode(AudioFrame& decodedAudio,
- uint32_t sampFreqHz,
- const int8_t* incomingPayload,
- size_t payloadLength)
-{
- if (payloadLength > 0)
- {
- const uint8_t payloadType = _receiveCodec.pltype;
- _decodeTimestamp += _receiveCodec.pacsize;
- if(_acm->IncomingPayload((const uint8_t*) incomingPayload,
- payloadLength,
- payloadType,
- _decodeTimestamp) == -1)
- {
- return -1;
- }
+int32_t AudioCoder::Decode(AudioFrame& decoded_audio,
+ uint32_t samp_freq_hz,
+ const int8_t* incoming_payload,
+ size_t payload_length) {
+ if (payload_length > 0) {
+ const uint8_t payload_type = receive_codec_.pltype;
+ decode_timestamp_ += receive_codec_.pacsize;
+ if (acm_->IncomingPayload((const uint8_t*)incoming_payload, payload_length,
+ payload_type, decode_timestamp_) == -1) {
+ return -1;
}
- return _acm->PlayoutData10Ms((uint16_t)sampFreqHz, &decodedAudio);
+ }
+ bool muted;
+ int32_t ret =
+ acm_->PlayoutData10Ms((uint16_t)samp_freq_hz, &decoded_audio, &muted);
+ RTC_DCHECK(!muted);
+ return ret;
}
-int32_t AudioCoder::PlayoutData(AudioFrame& decodedAudio,
- uint16_t& sampFreqHz)
-{
- return _acm->PlayoutData10Ms(sampFreqHz, &decodedAudio);
+int32_t AudioCoder::PlayoutData(AudioFrame& decoded_audio,
+ uint16_t& samp_freq_hz) {
+ bool muted;
+ int32_t ret = acm_->PlayoutData10Ms(samp_freq_hz, &decoded_audio, &muted);
+ RTC_DCHECK(!muted);
+ return ret;
}
int32_t AudioCoder::Encode(const AudioFrame& audio,
- int8_t* encodedData,
- size_t& encodedLengthInBytes)
-{
- // Fake a timestamp in case audio doesn't contain a correct timestamp.
- // Make a local copy of the audio frame since audio is const
- AudioFrame audioFrame;
- audioFrame.CopyFrom(audio);
- audioFrame.timestamp_ = _encodeTimestamp;
- _encodeTimestamp += static_cast<uint32_t>(audioFrame.samples_per_channel_);
+ int8_t* encoded_data,
+ size_t& encoded_length_in_bytes) {
+ // Fake a timestamp in case audio doesn't contain a correct timestamp.
+ // Make a local copy of the audio frame since audio is const
+ AudioFrame audio_frame;
+ audio_frame.CopyFrom(audio);
+ audio_frame.timestamp_ = encode_timestamp_;
+ encode_timestamp_ += static_cast<uint32_t>(audio_frame.samples_per_channel_);
- // For any codec with a frame size that is longer than 10 ms the encoded
- // length in bytes should be zero until a a full frame has been encoded.
- _encodedLengthInBytes = 0;
- if(_acm->Add10MsData((AudioFrame&)audioFrame) == -1)
- {
- return -1;
- }
- _encodedData = encodedData;
- encodedLengthInBytes = _encodedLengthInBytes;
- return 0;
+ // For any codec with a frame size that is longer than 10 ms the encoded
+ // length in bytes should be zero until a a full frame has been encoded.
+ encoded_length_in_bytes_ = 0;
+ if (acm_->Add10MsData((AudioFrame&)audio_frame) == -1) {
+ return -1;
+ }
+ encoded_data_ = encoded_data;
+ encoded_length_in_bytes = encoded_length_in_bytes_;
+ return 0;
}
-int32_t AudioCoder::SendData(
- FrameType /* frameType */,
- uint8_t /* payloadType */,
- uint32_t /* timeStamp */,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader* /* fragmentation*/)
-{
- memcpy(_encodedData,payloadData,sizeof(uint8_t) * payloadSize);
- _encodedLengthInBytes = payloadSize;
- return 0;
+int32_t AudioCoder::SendData(FrameType /* frame_type */,
+ uint8_t /* payload_type */,
+ uint32_t /* time_stamp */,
+ const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* /* fragmentation*/) {
+ memcpy(encoded_data_, payload_data, sizeof(uint8_t) * payload_size);
+ encoded_length_in_bytes_ = payload_size;
+ return 0;
}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/utility/source/coder.h b/chromium/third_party/webrtc/modules/utility/source/coder.h
index 9536a027d0e..5f441904bee 100644
--- a/chromium/third_party/webrtc/modules/utility/source/coder.h
+++ b/chromium/third_party/webrtc/modules/utility/source/coder.h
@@ -22,45 +22,47 @@
namespace webrtc {
class AudioFrame;
-class AudioCoder : public AudioPacketizationCallback
-{
-public:
- AudioCoder(uint32_t instanceID);
- ~AudioCoder();
+class AudioCoder : public AudioPacketizationCallback {
+ public:
+ AudioCoder(uint32_t instance_id);
+ ~AudioCoder();
- int32_t SetEncodeCodec(const CodecInst& codecInst);
+ int32_t SetEncodeCodec(const CodecInst& codec_inst);
- int32_t SetDecodeCodec(const CodecInst& codecInst);
+ int32_t SetDecodeCodec(const CodecInst& codec_inst);
- int32_t Decode(AudioFrame& decodedAudio, uint32_t sampFreqHz,
- const int8_t* incomingPayload, size_t payloadLength);
+ int32_t Decode(AudioFrame& decoded_audio,
+ uint32_t samp_freq_hz,
+ const int8_t* incoming_payload,
+ size_t payload_length);
- int32_t PlayoutData(AudioFrame& decodedAudio, uint16_t& sampFreqHz);
+ int32_t PlayoutData(AudioFrame& decoded_audio, uint16_t& samp_freq_hz);
- int32_t Encode(const AudioFrame& audio, int8_t* encodedData,
- size_t& encodedLengthInBytes);
+ int32_t Encode(const AudioFrame& audio,
+ int8_t* encoded_data,
+ size_t& encoded_length_in_bytes);
-protected:
- int32_t SendData(FrameType frameType,
- uint8_t payloadType,
- uint32_t timeStamp,
- const uint8_t* payloadData,
- size_t payloadSize,
- const RTPFragmentationHeader* fragmentation) override;
+ protected:
+ int32_t SendData(FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t time_stamp,
+ const uint8_t* payload_data,
+ size_t payload_size,
+ const RTPFragmentationHeader* fragmentation) override;
-private:
- std::unique_ptr<AudioCodingModule> _acm;
- acm2::CodecManager codec_manager_;
- acm2::RentACodec rent_a_codec_;
+ private:
+ std::unique_ptr<AudioCodingModule> acm_;
+ acm2::CodecManager codec_manager_;
+ acm2::RentACodec rent_a_codec_;
- CodecInst _receiveCodec;
+ CodecInst receive_codec_;
- uint32_t _encodeTimestamp;
- int8_t* _encodedData;
- size_t _encodedLengthInBytes;
+ uint32_t encode_timestamp_;
+ int8_t* encoded_data_;
+ size_t encoded_length_in_bytes_;
- uint32_t _decodeTimestamp;
+ uint32_t decode_timestamp_;
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_player_impl.h b/chromium/third_party/webrtc/modules/utility/source/file_player_impl.h
index beb6379ff0c..62887da13b8 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_player_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/file_player_impl.h
@@ -19,7 +19,6 @@
#include "webrtc/modules/utility/include/file_player.h"
#include "webrtc/modules/utility/source/coder.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
index 88b20eeac2a..b0a766f22e1 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
@@ -130,8 +130,7 @@ bool FileRecorderImpl::IsRecording() const
}
int32_t FileRecorderImpl::RecordAudioToFile(
- const AudioFrame& incomingAudioFrame,
- const TickTime* playoutTS)
+ const AudioFrame& incomingAudioFrame)
{
if (codec_info_.plfreq == 0)
{
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
index 697d7593757..96f811d49ea 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
@@ -27,7 +27,6 @@
#include "webrtc/modules/utility/include/file_recorder.h"
#include "webrtc/modules/utility/source/coder.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -45,23 +44,21 @@ public:
virtual ~FileRecorderImpl();
// FileRecorder functions.
- virtual int32_t RegisterModuleFileCallback(FileCallback* callback);
- virtual FileFormats RecordingFileFormat() const;
- virtual int32_t StartRecordingAudioFile(
+ int32_t RegisterModuleFileCallback(FileCallback* callback) override;
+ FileFormats RecordingFileFormat() const override;
+ int32_t StartRecordingAudioFile(
const char* fileName,
const CodecInst& codecInst,
uint32_t notificationTimeMs) override;
- virtual int32_t StartRecordingAudioFile(
+ int32_t StartRecordingAudioFile(
OutStream& destStream,
const CodecInst& codecInst,
uint32_t notificationTimeMs) override;
- virtual int32_t StopRecording();
- virtual bool IsRecording() const;
- virtual int32_t codec_info(CodecInst& codecInst) const;
- virtual int32_t RecordAudioToFile(
- const AudioFrame& frame,
- const TickTime* playoutTS = NULL);
- virtual int32_t StartRecordingVideoFile(
+ int32_t StopRecording() override;
+ bool IsRecording() const override;
+ int32_t codec_info(CodecInst& codecInst) const override;
+ int32_t RecordAudioToFile(const AudioFrame& frame) override;
+ int32_t StartRecordingVideoFile(
const char* fileName,
const CodecInst& audioCodecInst,
const VideoCodec& videoCodecInst,
@@ -69,7 +66,7 @@ public:
{
return -1;
}
- virtual int32_t RecordVideoToFile(const VideoFrame& videoFrame) {
+ int32_t RecordVideoToFile(const VideoFrame& videoFrame) override {
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/utility/source/jvm_android.cc b/chromium/third_party/webrtc/modules/utility/source/jvm_android.cc
index eb37fda0405..d53d1b5eadf 100644
--- a/chromium/third_party/webrtc/modules/utility/source/jvm_android.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/jvm_android.cc
@@ -10,6 +10,8 @@
#include <android/log.h>
+#include <memory>
+
#include "webrtc/modules/utility/include/jvm_android.h"
#include "webrtc/base/checks.h"
@@ -139,7 +141,7 @@ NativeRegistration::~NativeRegistration() {
CHECK_EXCEPTION(jni_) << "Error during UnregisterNatives";
}
-rtc::scoped_ptr<GlobalRef> NativeRegistration::NewObject(
+std::unique_ptr<GlobalRef> NativeRegistration::NewObject(
const char* name, const char* signature, ...) {
ALOGD("NativeRegistration::NewObject%s", GetThreadInfo().c_str());
va_list args;
@@ -149,7 +151,7 @@ rtc::scoped_ptr<GlobalRef> NativeRegistration::NewObject(
args);
CHECK_EXCEPTION(jni_) << "Error during NewObjectV";
va_end(args);
- return rtc::scoped_ptr<GlobalRef>(new GlobalRef(jni_, obj));
+ return std::unique_ptr<GlobalRef>(new GlobalRef(jni_, obj));
}
// JavaClass implementation.
@@ -181,14 +183,14 @@ JNIEnvironment::~JNIEnvironment() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
}
-rtc::scoped_ptr<NativeRegistration> JNIEnvironment::RegisterNatives(
+std::unique_ptr<NativeRegistration> JNIEnvironment::RegisterNatives(
const char* name, const JNINativeMethod *methods, int num_methods) {
ALOGD("JNIEnvironment::RegisterNatives(%s)", name);
RTC_DCHECK(thread_checker_.CalledOnValidThread());
jclass clazz = LookUpClass(name);
jni_->RegisterNatives(clazz, methods, num_methods);
CHECK_EXCEPTION(jni_) << "Error during RegisterNatives";
- return rtc::scoped_ptr<NativeRegistration>(
+ return std::unique_ptr<NativeRegistration>(
new NativeRegistration(jni_, clazz));
}
@@ -240,7 +242,7 @@ JVM::~JVM() {
DeleteGlobalRef(jni(), context_);
}
-rtc::scoped_ptr<JNIEnvironment> JVM::environment() {
+std::unique_ptr<JNIEnvironment> JVM::environment() {
ALOGD("JVM::environment%s", GetThreadInfo().c_str());
// The JNIEnv is used for thread-local storage. For this reason, we cannot
// share a JNIEnv between threads. If a piece of code has no other way to get
@@ -250,9 +252,9 @@ rtc::scoped_ptr<JNIEnvironment> JVM::environment() {
JNIEnv* jni = GetEnv(jvm_);
if (!jni) {
ALOGE("AttachCurrentThread() has not been called on this thread.");
- return rtc::scoped_ptr<JNIEnvironment>();
+ return std::unique_ptr<JNIEnvironment>();
}
- return rtc::scoped_ptr<JNIEnvironment>(new JNIEnvironment(jni));
+ return std::unique_ptr<JNIEnvironment>(new JNIEnvironment(jni));
}
JavaClass JVM::GetClass(const char* name) {
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
index 8cdf01634cb..4e3606ca08f 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
@@ -11,9 +11,9 @@
#include "webrtc/modules/utility/source/process_thread_impl.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/include/module.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace {
@@ -36,9 +36,9 @@ int64_t GetNextCallbackTime(Module* module, int64_t time_now) {
ProcessThread::~ProcessThread() {}
// static
-rtc::scoped_ptr<ProcessThread> ProcessThread::Create(
+std::unique_ptr<ProcessThread> ProcessThread::Create(
const char* thread_name) {
- return rtc::scoped_ptr<ProcessThread>(new ProcessThreadImpl(thread_name));
+ return std::unique_ptr<ProcessThread>(new ProcessThreadImpl(thread_name));
}
ProcessThreadImpl::ProcessThreadImpl(const char* thread_name)
@@ -119,7 +119,7 @@ void ProcessThreadImpl::WakeUp(Module* module) {
wake_up_->Set();
}
-void ProcessThreadImpl::PostTask(rtc::scoped_ptr<ProcessTask> task) {
+void ProcessThreadImpl::PostTask(std::unique_ptr<ProcessTask> task) {
// Allowed to be called on any thread.
{
rtc::CritScope lock(&lock_);
@@ -188,7 +188,7 @@ bool ProcessThreadImpl::Run(void* obj) {
}
bool ProcessThreadImpl::Process() {
- int64_t now = TickTime::MillisecondTimestamp();
+ int64_t now = rtc::TimeMillis();
int64_t next_checkpoint = now + (1000 * 60);
{
@@ -209,7 +209,7 @@ bool ProcessThreadImpl::Process() {
// Use a new 'now' reference to calculate when the next callback
// should occur. We'll continue to use 'now' above for the baseline
// of calculating how long we should wait, to reduce variance.
- int64_t new_now = TickTime::MillisecondTimestamp();
+ int64_t new_now = rtc::TimeMillis();
m.next_callback = GetNextCallbackTime(m.module, new_now);
}
@@ -227,7 +227,7 @@ bool ProcessThreadImpl::Process() {
}
}
- int64_t time_to_wait = next_checkpoint - TickTime::MillisecondTimestamp();
+ int64_t time_to_wait = next_checkpoint - rtc::TimeMillis();
if (time_to_wait > 0)
wake_up_->Wait(static_cast<unsigned long>(time_to_wait));
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
index 2855ed9d850..330aec946c9 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
@@ -33,7 +33,7 @@ class ProcessThreadImpl : public ProcessThread {
void Stop() override;
void WakeUp(Module* module) override;
- void PostTask(rtc::scoped_ptr<ProcessTask> task) override;
+ void PostTask(std::unique_ptr<ProcessTask> task) override;
void RegisterModule(Module* module) override;
void DeRegisterModule(Module* module) override;
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
index 9fa9edfa24a..5b31870ac41 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl_unittest.cc
@@ -13,9 +13,9 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/include/module.h"
#include "webrtc/modules/utility/source/process_thread_impl.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -51,7 +51,7 @@ ACTION_P(Increment, counter) {
}
ACTION_P(SetTimestamp, ptr) {
- *ptr = TickTime::MillisecondTimestamp();
+ *ptr = rtc::TimeMillis();
}
TEST(ProcessThreadImpl, StartStop) {
@@ -297,7 +297,7 @@ TEST(ProcessThreadImpl, PostTask) {
std::unique_ptr<EventWrapper> task_ran(EventWrapper::Create());
std::unique_ptr<RaiseEventTask> task(new RaiseEventTask(task_ran.get()));
thread.Start();
- thread.PostTask(rtc::UniqueToScoped(std::move(task)));
+ thread.PostTask(std::move(task));
EXPECT_EQ(kEventSignaled, task_ran->Wait(100));
thread.Stop();
}
diff --git a/chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc b/chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc
index 7ab33ffeaba..839ab804ad9 100644
--- a/chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -16,19 +16,19 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/test/frame_utils.h"
#include "webrtc/video_frame.h"
using webrtc::CriticalSectionWrapper;
using webrtc::CriticalSectionScoped;
using webrtc::SleepMs;
-using webrtc::TickTime;
using webrtc::VideoCaptureAlarm;
using webrtc::VideoCaptureCapability;
using webrtc::VideoCaptureDataCallback;
@@ -40,8 +40,8 @@ using webrtc::VideoCaptureModule;
#define WAIT_(ex, timeout, res) \
do { \
res = (ex); \
- int64_t start = TickTime::MillisecondTimestamp(); \
- while (!res && TickTime::MillisecondTimestamp() < start + timeout) { \
+ int64_t start = rtc::TimeMillis(); \
+ while (!res && rtc::TimeMillis() < start + timeout) { \
SleepMs(5); \
res = (ex); \
} \
@@ -60,32 +60,6 @@ static const int kTestHeight = 288;
static const int kTestWidth = 352;
static const int kTestFramerate = 30;
-// Compares the content of two video frames.
-static bool CompareFrames(const webrtc::VideoFrame& frame1,
- const webrtc::VideoFrame& frame2) {
- bool result =
- (frame1.stride(webrtc::kYPlane) == frame2.stride(webrtc::kYPlane)) &&
- (frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) &&
- (frame1.stride(webrtc::kVPlane) == frame2.stride(webrtc::kVPlane)) &&
- (frame1.width() == frame2.width()) &&
- (frame1.height() == frame2.height());
-
- if (!result)
- return false;
- for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
- webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
- int allocated_size1 = frame1.allocated_size(plane_type);
- int allocated_size2 = frame2.allocated_size(plane_type);
- if (allocated_size1 != allocated_size2)
- return false;
- const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
- const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
- if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
- return false;
- }
- return true;
-}
-
class TestVideoCaptureCallback : public VideoCaptureDataCallback {
public:
TestVideoCaptureCallback()
@@ -118,8 +92,8 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
#endif
// RenderTimstamp should be the time now.
EXPECT_TRUE(
- videoFrame.render_time_ms() >= TickTime::MillisecondTimestamp()-30 &&
- videoFrame.render_time_ms() <= TickTime::MillisecondTimestamp());
+ videoFrame.render_time_ms() >= rtc::TimeMillis()-30 &&
+ videoFrame.render_time_ms() <= rtc::TimeMillis());
if ((videoFrame.render_time_ms() >
last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
@@ -132,7 +106,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
incoming_frames_++;
last_render_time_ms_ = videoFrame.render_time_ms();
- last_frame_.CopyFrame(videoFrame);
+ last_frame_ = videoFrame.video_frame_buffer();
}
virtual void OnCaptureDelayChanged(const int32_t id,
@@ -168,7 +142,8 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
bool CompareLastFrame(const webrtc::VideoFrame& frame) {
CriticalSectionScoped cs(capture_cs_.get());
- return CompareFrames(last_frame_, frame);
+ return webrtc::test::FrameBufsEqual(last_frame_,
+ frame.video_frame_buffer());
}
void SetExpectedCaptureRotation(webrtc::VideoRotation rotation) {
@@ -183,7 +158,7 @@ class TestVideoCaptureCallback : public VideoCaptureDataCallback {
int64_t last_render_time_ms_;
int incoming_frames_;
int timing_warnings_;
- webrtc::VideoFrame last_frame_;
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> last_frame_;
webrtc::VideoRotation rotate_frame_;
};
@@ -277,7 +252,7 @@ class VideoCaptureTest : public testing::Test {
#endif
TEST_F(VideoCaptureTest, MAYBE_CreateDelete) {
for (int i = 0; i < 5; ++i) {
- int64_t start_time = TickTime::MillisecondTimestamp();
+ int64_t start_time = rtc::TimeMillis();
TestVideoCaptureCallback capture_observer;
rtc::scoped_refptr<VideoCaptureModule> module(
OpenVideoCaptureDevice(0, &capture_observer));
@@ -296,19 +271,19 @@ TEST_F(VideoCaptureTest, MAYBE_CreateDelete) {
ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability));
// Less than 4s to start the camera.
- EXPECT_LE(TickTime::MillisecondTimestamp() - start_time, 4000);
+ EXPECT_LE(rtc::TimeMillis() - start_time, 4000);
// Make sure 5 frames are captured.
EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut);
EXPECT_GE(capture_observer.capture_delay(), 0);
- int64_t stop_time = TickTime::MillisecondTimestamp();
+ int64_t stop_time = rtc::TimeMillis();
EXPECT_EQ(0, module->StopCapture());
EXPECT_FALSE(module->CaptureStarted());
// Less than 3s to stop the camera.
- EXPECT_LE(TickTime::MillisecondTimestamp() - stop_time, 3000);
+ EXPECT_LE(rtc::TimeMillis() - stop_time, 3000);
}
}
@@ -434,8 +409,7 @@ class VideoCaptureExternalTest : public testing::Test {
public:
void SetUp() {
capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_);
- process_module_ =
- rtc::ScopedToUnique(webrtc::ProcessThread::Create("ProcessThread"));
+ process_module_ = webrtc::ProcessThread::Create("ProcessThread");
process_module_->Start();
process_module_->RegisterModule(capture_module_);
@@ -449,10 +423,11 @@ class VideoCaptureExternalTest : public testing::Test {
test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth,
((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
- memset(test_frame_.buffer(webrtc::kYPlane), 127, kTestWidth * kTestHeight);
- memset(test_frame_.buffer(webrtc::kUPlane), 127,
+ memset(test_frame_.video_frame_buffer()->MutableDataY(), 127,
+ kTestWidth * kTestHeight);
+ memset(test_frame_.video_frame_buffer()->MutableDataU(), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
- memset(test_frame_.buffer(webrtc::kVPlane), 127,
+ memset(test_frame_.video_frame_buffer()->MutableDataV(), 127,
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
capture_module_->RegisterCaptureDataCallback(capture_callback_);
@@ -493,10 +468,10 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
#define MAYBE_FrameRate FrameRate
#endif
TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
- int64_t testTime = 3;
- TickTime startTime = TickTime::Now();
+ uint64_t testTime = 3 * rtc::kNumNanosecsPerSec;
+ uint64_t startTime = rtc::TimeNanos();
- while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+ while ((rtc::TimeNanos() - startTime) < testTime) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
@@ -511,8 +486,8 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
SleepMs(500);
EXPECT_EQ(webrtc::Raised, capture_feedback_.alarm());
- startTime = TickTime::Now();
- while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+ startTime = rtc::TimeNanos();
+ while ((rtc::TimeNanos() - startTime) < testTime) {
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
test_frame_.width(),
test_frame_.height());
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi b/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
index c80f2bf5b56..a2b2f58c9f2 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
@@ -172,6 +172,7 @@
'video_capture_module_internal_impl',
'webrtc_utility',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/test/test.gyp:video_test_common',
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'sources': [
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
index b1e697edc22..c3d5f370911 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
@@ -13,6 +13,7 @@
#include <stdlib.h>
#include "webrtc/base/refcount.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/include/module_common_types.h"
@@ -20,7 +21,6 @@
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/logging.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace videocapturemodule {
@@ -85,7 +85,8 @@ int64_t VideoCaptureImpl::TimeUntilNextProcess()
CriticalSectionScoped cs(&_callBackCs);
const int64_t kProcessIntervalMs = 300;
return kProcessIntervalMs -
- (TickTime::Now() - _lastProcessTime).Milliseconds();
+ (rtc::TimeNanos() - _lastProcessTimeNanos) /
+ rtc::kNumNanosecsPerMillisec;
}
// Process any pending tasks such as timeouts
@@ -93,12 +94,12 @@ void VideoCaptureImpl::Process()
{
CriticalSectionScoped cs(&_callBackCs);
- const TickTime now = TickTime::Now();
- _lastProcessTime = TickTime::Now();
+ const int64_t now_ns = rtc::TimeNanos();
+ _lastProcessTimeNanos = rtc::TimeNanos();
// Handle No picture alarm
- if (_lastProcessFrameCount.Ticks() == _incomingFrameTimes[0].Ticks() &&
+ if (_lastProcessFrameTimeNanos == _incomingFrameTimesNanos[0] &&
_captureAlarm != Raised)
{
if (_noPictureAlarmCallBack && _captureCallBack)
@@ -107,7 +108,7 @@ void VideoCaptureImpl::Process()
_captureCallBack->OnNoPictureAlarm(_id, _captureAlarm);
}
}
- else if (_lastProcessFrameCount.Ticks() != _incomingFrameTimes[0].Ticks() &&
+ else if (_lastProcessFrameTimeNanos != _incomingFrameTimesNanos[0] &&
_captureAlarm != Cleared)
{
if (_noPictureAlarmCallBack && _captureCallBack)
@@ -119,19 +120,21 @@ void VideoCaptureImpl::Process()
}
// Handle frame rate callback
- if ((now - _lastFrameRateCallbackTime).Milliseconds()
+ if ((now_ns - _lastFrameRateCallbackTimeNanos) /
+ rtc::kNumNanosecsPerMillisec
> kFrameRateCallbackInterval)
{
if (_frameRateCallBack && _captureCallBack)
{
- const uint32_t frameRate = CalculateFrameRate(now);
+ const uint32_t frameRate = CalculateFrameRate(now_ns);
_captureCallBack->OnCaptureFrameRate(_id, frameRate);
}
- _lastFrameRateCallbackTime = now; // Can be set by EnableFrameRateCallback
+ // Can be set by EnableFrameRateCallback
+ _lastFrameRateCallbackTimeNanos = now_ns;
}
- _lastProcessFrameCount = _incomingFrameTimes[0];
+ _lastProcessFrameTimeNanos = _incomingFrameTimesNanos[0];
}
VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
@@ -141,15 +144,15 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
_captureDelay(0),
_requestedCapability(),
_callBackCs(*CriticalSectionWrapper::CreateCriticalSection()),
- _lastProcessTime(TickTime::Now()),
- _lastFrameRateCallbackTime(TickTime::Now()),
+ _lastProcessTimeNanos(rtc::TimeNanos()),
+ _lastFrameRateCallbackTimeNanos(rtc::TimeNanos()),
_frameRateCallBack(false),
_noPictureAlarmCallBack(false),
_captureAlarm(Cleared),
_setCaptureDelay(0),
_dataCallBack(NULL),
_captureCallBack(NULL),
- _lastProcessFrameCount(TickTime::Now()),
+ _lastProcessFrameTimeNanos(rtc::TimeNanos()),
_rotateFrame(kVideoRotation_0),
apply_rotation_(false) {
_requestedCapability.width = kDefaultWidth;
@@ -157,7 +160,7 @@ VideoCaptureImpl::VideoCaptureImpl(const int32_t id)
_requestedCapability.maxFPS = 30;
_requestedCapability.rawType = kVideoI420;
_requestedCapability.codecType = kVideoCodecUnknown;
- memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes));
+ memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos));
}
VideoCaptureImpl::~VideoCaptureImpl()
@@ -295,7 +298,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
_captureFrame.set_rotation(kVideoRotation_0);
}
_captureFrame.set_ntp_time_ms(captureTime);
- _captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
+ _captureFrame.set_render_time_ms(rtc::TimeMillis());
DeliverCapturedFrame(_captureFrame);
}
@@ -321,7 +324,7 @@ void VideoCaptureImpl::EnableFrameRateCallback(const bool enable) {
_frameRateCallBack = enable;
if (enable)
{
- _lastFrameRateCallbackTime = TickTime::Now();
+ _lastFrameRateCallbackTimeNanos = rtc::TimeNanos();
}
}
@@ -341,7 +344,7 @@ void VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) {
void VideoCaptureImpl::UpdateFrameCount()
{
- if (_incomingFrameTimes[0].MicrosecondTimestamp() == 0)
+ if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0)
{
// first no shift
}
@@ -350,20 +353,22 @@ void VideoCaptureImpl::UpdateFrameCount()
// shift
for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
{
- _incomingFrameTimes[i + 1] = _incomingFrameTimes[i];
+ _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i];
}
}
- _incomingFrameTimes[0] = TickTime::Now();
+ _incomingFrameTimesNanos[0] = rtc::TimeNanos();
}
-uint32_t VideoCaptureImpl::CalculateFrameRate(const TickTime& now)
+uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns)
{
int32_t num = 0;
int32_t nrOfFrames = 0;
for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
{
- if (_incomingFrameTimes[num].Ticks() <= 0
- || (now - _incomingFrameTimes[num]).Milliseconds() > kFrameRateHistoryWindowMs) // don't use data older than 2sec
+ if (_incomingFrameTimesNanos[num] <= 0 ||
+ (now_ns - _incomingFrameTimesNanos[num]) /
+ rtc::kNumNanosecsPerMillisec >
+ kFrameRateHistoryWindowMs) // don't use data older than 2sec
{
break;
}
@@ -374,7 +379,8 @@ uint32_t VideoCaptureImpl::CalculateFrameRate(const TickTime& now)
}
if (num > 1)
{
- int64_t diff = (now - _incomingFrameTimes[num - 1]).Milliseconds();
+ int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) /
+ rtc::kNumNanosecsPerMillisec;
if (diff > 0)
{
return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f);
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
index 9c2cad7c95c..7d785c3a908 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
@@ -20,7 +20,6 @@
#include "webrtc/common_video/rotation.h"
#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_config.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/video_frame.h"
namespace webrtc
@@ -116,12 +115,14 @@ protected:
VideoCaptureCapability _requestedCapability; // Should be set by platform dependent code in StartCapture.
private:
void UpdateFrameCount();
- uint32_t CalculateFrameRate(const TickTime& now);
+ uint32_t CalculateFrameRate(int64_t now_ns);
CriticalSectionWrapper& _callBackCs;
- TickTime _lastProcessTime; // last time the module process function was called.
- TickTime _lastFrameRateCallbackTime; // last time the frame rate callback function was called.
+ // last time the module process function was called.
+ int64_t _lastProcessTimeNanos;
+ // last time the frame rate callback function was called.
+ int64_t _lastFrameRateCallbackTimeNanos;
bool _frameRateCallBack; // true if EnableFrameRateCallback
bool _noPictureAlarmCallBack; //true if EnableNoPictureAlarm
VideoCaptureAlarm _captureAlarm; // current value of the noPictureAlarm
@@ -130,8 +131,9 @@ private:
VideoCaptureDataCallback* _dataCallBack;
VideoCaptureFeedBack* _captureCallBack;
- TickTime _lastProcessFrameCount;
- TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames
+ int64_t _lastProcessFrameTimeNanos;
+ // timestamp for local captured frames
+ int64_t _incomingFrameTimesNanos[kFrameRateCountHistorySize];
VideoRotation _rotateFrame; // Set if the frame should be rotated by the
// capture module.
diff --git a/chromium/third_party/webrtc/modules/video_coding/BUILD.gn b/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
index bc6f595b51d..755e6efa9da 100644
--- a/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/video_coding/BUILD.gn
@@ -10,14 +10,10 @@ import("../../build/webrtc.gni")
source_set("video_coding") {
sources = [
- "bitrate_adjuster.cc",
- "bitrate_adjuster.h",
"codec_database.cc",
"codec_database.h",
"codec_timer.cc",
"codec_timer.h",
- "content_metrics_processing.cc",
- "content_metrics_processing.h",
"decoding_state.cc",
"decoding_state.h",
"encoded_frame.cc",
@@ -25,6 +21,8 @@ source_set("video_coding") {
"fec_tables_xor.h",
"frame_buffer.cc",
"frame_buffer.h",
+ "frame_buffer2.cc",
+ "frame_buffer2.h",
"frame_object.cc",
"frame_object.h",
"generic_decoder.cc",
@@ -56,11 +54,10 @@ source_set("video_coding") {
"packet_buffer.h",
"percentile_filter.cc",
"percentile_filter.h",
- "qm_select.cc",
- "qm_select.h",
- "qm_select_data.h",
"receiver.cc",
"receiver.h",
+ "rtp_frame_reference_finder.cc",
+ "rtp_frame_reference_finder.h",
"rtt_filter.cc",
"rtt_filter.h",
"session_info.cc",
@@ -107,6 +104,8 @@ source_set("video_coding_utility") {
sources = [
"utility/frame_dropper.cc",
"utility/frame_dropper.h",
+ "utility/ivf_file_writer.cc",
+ "utility/ivf_file_writer.h",
"utility/moving_average.h",
"utility/qp_parser.cc",
"utility/qp_parser.h",
diff --git a/chromium/third_party/webrtc/modules/video_coding/OWNERS b/chromium/third_party/webrtc/modules/video_coding/OWNERS
index 389d632dfdf..acf09399ac7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/OWNERS
+++ b/chromium/third_party/webrtc/modules/video_coding/OWNERS
@@ -1,5 +1,6 @@
-stefan@webrtc.org
marpan@webrtc.org
+pbos@webrtc.org
+stefan@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
diff --git a/chromium/third_party/webrtc/modules/video_coding/codec_database.cc b/chromium/third_party/webrtc/modules/video_coding/codec_database.cc
index a5a7c1ea999..1baa414bce7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codec_database.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codec_database.cc
@@ -238,7 +238,6 @@ bool VCMCodecDataBase::SetSendCodec(const VideoCodec* send_codec,
memcpy(&send_codec_, &new_send_codec, sizeof(send_codec_));
if (!reset_required) {
- encoded_frame_callback_->SetPayloadType(send_codec_.plType);
return true;
}
@@ -249,7 +248,6 @@ bool VCMCodecDataBase::SetSendCodec(const VideoCodec* send_codec,
ptr_encoder_.reset(
new VCMGenericEncoder(external_encoder_, encoder_rate_observer_,
encoded_frame_callback_, internal_source_));
- encoded_frame_callback_->SetPayloadType(send_codec_.plType);
encoded_frame_callback_->SetInternalSource(internal_source_);
if (ptr_encoder_->InitEncode(&send_codec_, number_of_cores_,
max_payload_size_) < 0) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi
index 9cb58a85986..92489c39665 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264.gypi
@@ -64,7 +64,7 @@
'type': 'static_library',
'includes': [ '../../../../build/objc_common.gypi' ],
'dependencies': [
- '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_common_objc',
],
'link_settings': {
'xcode_settings': {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
index e98666d0736..f560a37d0ec 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
@@ -129,10 +129,12 @@ int H264DecoderImpl::AVGetBuffer2(
video_frame->set_video_frame_buffer(
decoder->pool_.CreateBuffer(width, height));
// DCHECK that we have a continuous buffer as is required.
- RTC_DCHECK_EQ(video_frame->buffer(kUPlane),
- video_frame->buffer(kYPlane) + video_frame->allocated_size(kYPlane));
- RTC_DCHECK_EQ(video_frame->buffer(kVPlane),
- video_frame->buffer(kUPlane) + video_frame->allocated_size(kUPlane));
+ RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
+ video_frame->video_frame_buffer()->DataY() +
+ video_frame->allocated_size(kYPlane));
+ RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(),
+ video_frame->video_frame_buffer()->DataU() +
+ video_frame->allocated_size(kUPlane));
int total_size = video_frame->allocated_size(kYPlane) +
video_frame->allocated_size(kUPlane) +
video_frame->allocated_size(kVPlane);
@@ -141,12 +143,18 @@ int H264DecoderImpl::AVGetBuffer2(
av_frame->reordered_opaque = context->reordered_opaque;
// Set |av_frame| members as required by FFmpeg.
- av_frame->data[kYPlaneIndex] = video_frame->buffer(kYPlane);
- av_frame->linesize[kYPlaneIndex] = video_frame->stride(kYPlane);
- av_frame->data[kUPlaneIndex] = video_frame->buffer(kUPlane);
- av_frame->linesize[kUPlaneIndex] = video_frame->stride(kUPlane);
- av_frame->data[kVPlaneIndex] = video_frame->buffer(kVPlane);
- av_frame->linesize[kVPlaneIndex] = video_frame->stride(kVPlane);
+ av_frame->data[kYPlaneIndex] =
+ video_frame->video_frame_buffer()->MutableDataY();
+ av_frame->linesize[kYPlaneIndex] =
+ video_frame->video_frame_buffer()->StrideY();
+ av_frame->data[kUPlaneIndex] =
+ video_frame->video_frame_buffer()->MutableDataU();
+ av_frame->linesize[kUPlaneIndex] =
+ video_frame->video_frame_buffer()->StrideU();
+ av_frame->data[kVPlaneIndex] =
+ video_frame->video_frame_buffer()->MutableDataV();
+ av_frame->linesize[kVPlaneIndex] =
+ video_frame->video_frame_buffer()->StrideV();
RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex],
@@ -339,9 +347,12 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
VideoFrame* video_frame = static_cast<VideoFrame*>(
av_buffer_get_opaque(av_frame_->buf[0]));
RTC_DCHECK(video_frame);
- RTC_CHECK_EQ(av_frame_->data[kYPlane], video_frame->buffer(kYPlane));
- RTC_CHECK_EQ(av_frame_->data[kUPlane], video_frame->buffer(kUPlane));
- RTC_CHECK_EQ(av_frame_->data[kVPlane], video_frame->buffer(kVPlane));
+ RTC_CHECK_EQ(av_frame_->data[kYPlane],
+ video_frame->video_frame_buffer()->DataY());
+ RTC_CHECK_EQ(av_frame_->data[kUPlane],
+ video_frame->video_frame_buffer()->DataU());
+ RTC_CHECK_EQ(av_frame_->data[kVPlane],
+ video_frame->video_frame_buffer()->DataV());
video_frame->set_timestamp(input_image._timeStamp);
// The decoded image may be larger than what is supposed to be visible, see
@@ -352,9 +363,9 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
video_frame->set_video_frame_buffer(
new rtc::RefCountedObject<WrappedI420Buffer>(
av_frame_->width, av_frame_->height,
- buf->data(kYPlane), buf->stride(kYPlane),
- buf->data(kUPlane), buf->stride(kUPlane),
- buf->data(kVPlane), buf->stride(kVPlane),
+ buf->DataY(), buf->StrideY(),
+ buf->DataU(), buf->StrideU(),
+ buf->DataV(), buf->StrideV(),
rtc::KeepRefUntilDone(buf)));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
index 0e065c5e497..4d85858a162 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_encoder_impl.cc
@@ -50,12 +50,8 @@ int NumberOfThreads(int width, int height, int number_of_cores) {
return 1;
}
-} // namespace
-
-static FrameType EVideoFrameType_to_FrameType(EVideoFrameType type) {
+FrameType ConvertToVideoFrameType(EVideoFrameType type) {
switch (type) {
- case videoFrameTypeInvalid:
- return kEmptyFrame;
case videoFrameTypeIDR:
return kVideoFrameKey;
case videoFrameTypeSkip:
@@ -63,12 +59,15 @@ static FrameType EVideoFrameType_to_FrameType(EVideoFrameType type) {
case videoFrameTypeP:
case videoFrameTypeIPMixed:
return kVideoFrameDelta;
- default:
- LOG(LS_WARNING) << "Unknown EVideoFrameType: " << type;
- return kVideoFrameDelta;
+ case videoFrameTypeInvalid:
+ break;
}
+ RTC_NOTREACHED() << "Unexpected/invalid frame type: " << type;
+ return kEmptyFrame;
}
+} // namespace
+
// Helper method used by H264EncoderImpl::Encode.
// Copies the encoded bytes from |info| to |encoded_image| and updates the
// fragmentation information of |frag_header|. The |encoded_image->_buffer| may
@@ -368,12 +367,12 @@ int32_t H264EncoderImpl::Encode(
picture.iPicHeight = frame.height();
picture.iColorFormat = EVideoFormatType::videoFormatI420;
picture.uiTimeStamp = frame.ntp_time_ms();
- picture.iStride[0] = frame.stride(kYPlane);
- picture.iStride[1] = frame.stride(kUPlane);
- picture.iStride[2] = frame.stride(kVPlane);
- picture.pData[0] = const_cast<uint8_t*>(frame.buffer(kYPlane));
- picture.pData[1] = const_cast<uint8_t*>(frame.buffer(kUPlane));
- picture.pData[2] = const_cast<uint8_t*>(frame.buffer(kVPlane));
+ picture.iStride[0] = frame.video_frame_buffer()->StrideY();
+ picture.iStride[1] = frame.video_frame_buffer()->StrideU();
+ picture.iStride[2] = frame.video_frame_buffer()->StrideV();
+ picture.pData[0] = const_cast<uint8_t*>(frame.video_frame_buffer()->DataY());
+ picture.pData[1] = const_cast<uint8_t*>(frame.video_frame_buffer()->DataU());
+ picture.pData[2] = const_cast<uint8_t*>(frame.video_frame_buffer()->DataV());
// EncodeFrame output.
SFrameBSInfo info;
@@ -393,7 +392,8 @@ int32_t H264EncoderImpl::Encode(
encoded_image_._timeStamp = frame.timestamp();
encoded_image_.ntp_time_ms_ = frame.ntp_time_ms();
encoded_image_.capture_time_ms_ = frame.render_time_ms();
- encoded_image_._frameType = EVideoFrameType_to_FrameType(info.eFrameType);
+ encoded_image_.rotation_ = frame.rotation();
+ encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
// Split encoded image up into fragments. This also updates |encoded_image_|.
RTPFragmentationHeader frag_header;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
index 0ea2600197e..18820d3ded7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc
@@ -15,12 +15,12 @@
#include <memory>
+#if defined(WEBRTC_IOS)
+#include "RTCUIApplication.h"
+#endif
#include "libyuv/convert.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
-#if defined(WEBRTC_IOS)
-#include "webrtc/base/objc/RTCUIApplication.h"
-#endif
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
#include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h"
#include "webrtc/video_frame.h"
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
index 8cfe63dbb78..5f6a231288d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.cc
@@ -17,12 +17,12 @@
#include <string>
#include <vector>
+#if defined(WEBRTC_IOS)
+#include "RTCUIApplication.h"
+#endif
#include "libyuv/convert_from.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
-#if defined(WEBRTC_IOS)
-#include "webrtc/base/objc/RTCUIApplication.h"
-#endif
#include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h"
#include "webrtc/system_wrappers/include/clock.h"
@@ -118,8 +118,14 @@ struct FrameEncodeParams {
int32_t w,
int32_t h,
int64_t rtms,
- uint32_t ts)
- : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts) {
+ uint32_t ts,
+ webrtc::VideoRotation r)
+ : encoder(e),
+ width(w),
+ height(h),
+ render_time_ms(rtms),
+ timestamp(ts),
+ rotation(r) {
if (csi) {
codec_specific_info = *csi;
} else {
@@ -133,6 +139,7 @@ struct FrameEncodeParams {
int32_t height;
int64_t render_time_ms;
uint32_t timestamp;
+ webrtc::VideoRotation rotation;
};
// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
@@ -161,10 +168,14 @@ bool CopyVideoFrameToPixelBuffer(const webrtc::VideoFrame& frame,
int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
// Convert I420 to NV12.
int ret = libyuv::I420ToNV12(
- frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
- frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
- frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), dst_y,
- dst_stride_y, dst_uv, dst_stride_uv, frame.width(), frame.height());
+ frame.video_frame_buffer()->DataY(),
+ frame.video_frame_buffer()->StrideY(),
+ frame.video_frame_buffer()->DataU(),
+ frame.video_frame_buffer()->StrideU(),
+ frame.video_frame_buffer()->DataV(),
+ frame.video_frame_buffer()->StrideV(),
+ dst_y, dst_stride_y, dst_uv, dst_stride_uv,
+ frame.width(), frame.height());
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
if (ret) {
LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
@@ -185,7 +196,8 @@ void VTCompressionOutputCallback(void* encoder,
encode_params->encoder->OnEncodedFrame(
status, info_flags, sample_buffer, encode_params->codec_specific_info,
encode_params->width, encode_params->height,
- encode_params->render_time_ms, encode_params->timestamp);
+ encode_params->render_time_ms, encode_params->timestamp,
+ encode_params->rotation);
}
} // namespace internal
@@ -248,6 +260,7 @@ int H264VideoToolboxEncoder::Encode(
return WEBRTC_VIDEO_CODEC_OK;
}
#endif
+ bool is_keyframe_required = false;
// Get a pixel buffer from the pool and copy frame data over.
CVPixelBufferPoolRef pixel_buffer_pool =
VTCompressionSessionGetPixelBufferPool(compression_session_);
@@ -257,9 +270,11 @@ int H264VideoToolboxEncoder::Encode(
// invalidated, which causes this pool call to fail when the application
// is foregrounded and frames are being sent for encoding again.
// Resetting the session when this happens fixes the issue.
+ // In addition we request a keyframe so video can recover quickly.
ResetCompressionSession();
pixel_buffer_pool =
VTCompressionSessionGetPixelBufferPool(compression_session_);
+ is_keyframe_required = true;
}
#endif
if (!pixel_buffer_pool) {
@@ -283,8 +298,7 @@ int H264VideoToolboxEncoder::Encode(
}
// Check if we need a keyframe.
- bool is_keyframe_required = false;
- if (frame_types) {
+ if (!is_keyframe_required && frame_types) {
for (auto frame_type : *frame_types) {
if (frame_type == kVideoFrameKey) {
is_keyframe_required = true;
@@ -304,7 +318,7 @@ int H264VideoToolboxEncoder::Encode(
std::unique_ptr<internal::FrameEncodeParams> encode_params;
encode_params.reset(new internal::FrameEncodeParams(
this, codec_specific_info, width_, height_, input_image.render_time_ms(),
- input_image.timestamp()));
+ input_image.timestamp(), input_image.rotation()));
// Update the bitrate if needed.
SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps());
@@ -469,7 +483,8 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
int32_t width,
int32_t height,
int64_t render_time_ms,
- uint32_t timestamp) {
+ uint32_t timestamp,
+ VideoRotation rotation) {
if (status != noErr) {
LOG(LS_ERROR) << "H264 encode failed.";
return;
@@ -509,6 +524,7 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
is_keyframe ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta;
frame.capture_time_ms_ = render_time_ms;
frame._timeStamp = timestamp;
+ frame.rotation_ = rotation;
int result = callback_->Encoded(frame, &codec_specific_info, header.get());
if (result != 0) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
index 779889d43cc..d54fa612c3d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_encoder.h
@@ -12,8 +12,9 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_ENCODER_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_ENCODER_H_
+#include "webrtc/common_video/include/bitrate_adjuster.h"
+#include "webrtc/common_video/rotation.h"
#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
-#include "webrtc/modules/video_coding/include/bitrate_adjuster.h"
#if defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
@@ -58,7 +59,8 @@ class H264VideoToolboxEncoder : public H264Encoder {
int32_t width,
int32_t height,
int64_t render_time_ms,
- uint32_t timestamp);
+ uint32_t timestamp,
+ VideoRotation rotation);
private:
int ResetCompressionSession();
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
index e64babd599f..9a9a0ddf165 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
@@ -17,6 +17,7 @@
#include <memory>
#include <vector>
+#include "webrtc/base/timeutils.h"
#include "webrtc/system_wrappers/include/cpu_info.h"
namespace webrtc {
@@ -198,7 +199,7 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
// Ensure we have a new statistics data object we can fill:
FrameStatistic& stat = stats_->NewFrame(frame_number);
- encode_start_ = TickTime::Now();
+ encode_start_ns_ = rtc::TimeNanos();
// Use the frame number as "timestamp" to identify frames
source_frame_.set_timestamp(frame_number);
@@ -248,11 +249,11 @@ void VideoProcessorImpl::FrameEncoded(
encoded_frame_type_ = encoded_image._frameType;
- TickTime encode_stop = TickTime::Now();
+ int64_t encode_stop_ns = rtc::TimeNanos();
int frame_number = encoded_image._timeStamp;
FrameStatistic& stat = stats_->stats_[frame_number];
stat.encode_time_in_us =
- GetElapsedTimeMicroseconds(encode_start_, encode_stop);
+ GetElapsedTimeMicroseconds(encode_start_ns_, encode_stop_ns);
stat.encoding_successful = true;
stat.encoded_frame_length_in_bytes = encoded_image._length;
stat.frame_number = encoded_image._timeStamp;
@@ -299,7 +300,7 @@ void VideoProcessorImpl::FrameEncoded(
// Keep track of if frames are lost due to packet loss so we can tell
// this to the encoder (this is handled by the RTP logic in the full stack)
- decode_start_ = TickTime::Now();
+ decode_start_ns_ = rtc::TimeNanos();
// TODO(kjellander): Pass fragmentation header to the decoder when
// CL 172001 has been submitted and PacketManipulator supports this.
int32_t decode_result =
@@ -315,12 +316,12 @@ void VideoProcessorImpl::FrameEncoded(
}
void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
- TickTime decode_stop = TickTime::Now();
+ int64_t decode_stop_ns = rtc::TimeNanos();
int frame_number = image.timestamp();
// Report stats
FrameStatistic& stat = stats_->stats_[frame_number];
stat.decode_time_in_us =
- GetElapsedTimeMicroseconds(decode_start_, decode_stop);
+ GetElapsedTimeMicroseconds(decode_start_ns_, decode_stop_ns);
stat.decoding_successful = true;
// Check for resize action (either down or up):
@@ -378,10 +379,9 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
}
}
-int VideoProcessorImpl::GetElapsedTimeMicroseconds(
- const webrtc::TickTime& start,
- const webrtc::TickTime& stop) {
- uint64_t encode_time = (stop - start).Microseconds();
+int VideoProcessorImpl::GetElapsedTimeMicroseconds(int64_t start,
+ int64_t stop) {
+ uint64_t encode_time = (stop - start) / rtc::kNumNanosecsPerMicrosec;
assert(encode_time <
static_cast<unsigned int>(std::numeric_limits<int>::max()));
return static_cast<int>(encode_time);
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h
index cd1c7b9d62c..f0322dd67b4 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor.h
@@ -19,7 +19,6 @@
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/test/packet_manipulator.h"
#include "webrtc/modules/video_coding/codecs/test/stats.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/frame_reader.h"
#include "webrtc/test/testsupport/frame_writer.h"
#include "webrtc/video_frame.h"
@@ -179,8 +178,7 @@ class VideoProcessorImpl : public VideoProcessor {
void FrameDecoded(const webrtc::VideoFrame& image);
// Used for getting a 32-bit integer representing time
// (checks the size is within signed 32-bit bounds before casting it)
- int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
- const webrtc::TickTime& stop);
+ int GetElapsedTimeMicroseconds(int64_t start, int64_t stop);
// Updates the encoder with the target bit rate and the frame rate.
void SetRates(int bit_rate, int frame_rate) override;
// Return the size of the encoded frame in bytes.
@@ -225,8 +223,8 @@ class VideoProcessorImpl : public VideoProcessor {
// Statistics
double bit_rate_factor_; // multiply frame length with this to get bit rate
- webrtc::TickTime encode_start_;
- webrtc::TickTime decode_start_;
+ int64_t encode_start_ns_;
+ int64_t decode_start_ns_;
// Callback class required to implement according to the VideoEncoder API.
class VideoProcessorEncodeCompleteCallback
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
index 897870a2717..9f361dc6261 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
@@ -626,6 +626,7 @@ TEST_F(VideoProcessorIntegrationTest, Process0PercentPacketLossH264) {
// Fails on iOS. See webrtc:4755.
#if !defined(WEBRTC_IOS)
+#if !defined(RTC_DISABLE_VP9)
// VP9: Run with no packet loss and fixed bitrate. Quality should be very high.
// One key frame (first frame only) in sequence. Setting |key_frame_interval|
// to -1 below means no periodic key frames in test.
@@ -780,6 +781,8 @@ TEST_F(VideoProcessorIntegrationTest, ProcessNoLossSpatialResizeFrameDropVP9) {
// TODO(marpan): Add temporal layer test for VP9, once changes are in
// vp9 wrapper for this.
+#endif // !defined(RTC_DISABLE_VP9)
+
// VP8: Run with no packet loss and fixed bitrate. Quality should be very high.
// One key frame (first frame only) in sequence. Setting |key_frame_interval|
// to -1 below means no periodic key frames in test.
@@ -961,7 +964,7 @@ TEST_F(VideoProcessorIntegrationTest,
SetQualityMetrics(&quality_metrics, 25.0, 15.0, 0.70, 0.40);
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
- SetRateControlMetrics(rc_metrics, 0, 160, 60, 120, 20, 70, 1, 2);
+ SetRateControlMetrics(rc_metrics, 0, 160, 80, 120, 20, 70, 1, 2);
ProcessFramesAndVerify(quality_metrics, rate_profile, process_settings,
rc_metrics);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
index d22601358f2..b9721cde1bc 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/realtime_temporal_layers.cc
@@ -101,10 +101,10 @@ class RealTimeTemporalLayers : public TemporalLayers {
virtual ~RealTimeTemporalLayers() {}
- virtual bool ConfigureBitrates(int bitrate_kbit,
- int max_bitrate_kbit,
- int framerate,
- vpx_codec_enc_cfg_t* cfg) {
+ bool ConfigureBitrates(int bitrate_kbit,
+ int max_bitrate_kbit,
+ int framerate,
+ vpx_codec_enc_cfg_t* cfg) override {
temporal_layers_ =
CalculateNumberOfTemporalLayers(temporal_layers_, framerate);
temporal_layers_ = std::min(temporal_layers_, max_temporal_layers_);
@@ -184,7 +184,7 @@ class RealTimeTemporalLayers : public TemporalLayers {
return true;
}
- virtual int EncodeFlags(uint32_t timestamp) {
+ int EncodeFlags(uint32_t timestamp) override {
frame_counter_++;
return CurrentEncodeFlags();
}
@@ -196,16 +196,16 @@ class RealTimeTemporalLayers : public TemporalLayers {
return encode_flags_[index];
}
- virtual int CurrentLayerId() const {
+ int CurrentLayerId() const override {
assert(layer_ids_length_ > 0 && layer_ids_ != NULL);
int index = frame_counter_ % layer_ids_length_;
assert(index >= 0 && index < layer_ids_length_);
return layer_ids_[index];
}
- virtual void PopulateCodecSpecific(bool base_layer_sync,
- CodecSpecificInfoVP8* vp8_info,
- uint32_t timestamp) {
+ void PopulateCodecSpecific(bool base_layer_sync,
+ CodecSpecificInfoVP8* vp8_info,
+ uint32_t timestamp) override {
assert(temporal_layers_ > 0);
if (temporal_layers_ == 1) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
index 55a4402cbe4..be55133dd6b 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.cc
@@ -301,14 +301,21 @@ int SimulcastEncoderAdapter::Encode(
// Aligning stride values based on width.
dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
(dst_width + 1) / 2, (dst_width + 1) / 2);
- libyuv::I420Scale(
- input_image.buffer(kYPlane), input_image.stride(kYPlane),
- input_image.buffer(kUPlane), input_image.stride(kUPlane),
- input_image.buffer(kVPlane), input_image.stride(kVPlane), src_width,
- src_height, dst_frame.buffer(kYPlane), dst_frame.stride(kYPlane),
- dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane),
- dst_frame.buffer(kVPlane), dst_frame.stride(kVPlane), dst_width,
- dst_height, libyuv::kFilterBilinear);
+ libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
+ input_image.video_frame_buffer()->StrideY(),
+ input_image.video_frame_buffer()->DataU(),
+ input_image.video_frame_buffer()->StrideU(),
+ input_image.video_frame_buffer()->DataV(),
+ input_image.video_frame_buffer()->StrideV(),
+ src_width, src_height,
+ dst_frame.video_frame_buffer()->MutableDataY(),
+ dst_frame.video_frame_buffer()->StrideY(),
+ dst_frame.video_frame_buffer()->MutableDataU(),
+ dst_frame.video_frame_buffer()->StrideU(),
+ dst_frame.video_frame_buffer()->MutableDataV(),
+ dst_frame.video_frame_buffer()->StrideV(),
+ dst_width, dst_height,
+ libyuv::kFilterBilinear);
dst_frame.set_timestamp(input_image.timestamp());
dst_frame.set_render_time_ms(input_image.render_time_ms());
streaminfos_[stream_idx].encoder->Encode(dst_frame, codec_specific_info,
@@ -494,10 +501,6 @@ void SimulcastEncoderAdapter::OnDroppedFrame() {
streaminfos_[0].encoder->OnDroppedFrame();
}
-int SimulcastEncoderAdapter::GetTargetFramerate() {
- return streaminfos_[0].encoder->GetTargetFramerate();
-}
-
bool SimulcastEncoderAdapter::SupportsNativeHandle() const {
// We should not be calling this method before streaminfos_ are configured.
RTC_DCHECK(!streaminfos_.empty());
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
index 777ac1ba368..fca16df6fad 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h
@@ -58,7 +58,6 @@ class SimulcastEncoderAdapter : public VP8Encoder {
void OnDroppedFrame() override;
- int GetTargetFramerate() override;
bool SupportsNativeHandle() const override;
const char* ImplementationName() const override;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
index 9a7e1b2e7ca..aafcd797ac5 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter_unittest.cc
@@ -107,34 +107,40 @@ TEST_F(TestSimulcastEncoderAdapter, DISABLED_TestRPSIEncoder) {
class MockVideoEncoder : public VideoEncoder {
public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
int32_t InitEncode(const VideoCodec* codecSettings,
int32_t numberOfCores,
- size_t maxPayloadSize) override {
+ size_t maxPayloadSize) /* override */ {
codec_ = *codecSettings;
return 0;
}
int32_t Encode(const VideoFrame& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<FrameType>* frame_types) override {
+ const std::vector<FrameType>* frame_types) /* override */ {
return 0;
}
int32_t RegisterEncodeCompleteCallback(
- EncodedImageCallback* callback) override {
+ EncodedImageCallback* callback) /* override */ {
callback_ = callback;
return 0;
}
- int32_t Release() override { return 0; }
+ int32_t Release() /* override */ { return 0; }
- int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override {
+ int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) /* override */ {
return 0;
}
MOCK_METHOD2(SetChannelParameters, int32_t(uint32_t packetLoss, int64_t rtt));
- bool SupportsNativeHandle() const override { return supports_native_handle_; }
+ bool SupportsNativeHandle() const /* override */ {
+ return supports_native_handle_;
+ }
virtual ~MockVideoEncoder() {}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
index 2b2aa5de69f..b277ad2ee48 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/simulcast_unittest.h
@@ -119,13 +119,13 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
int32_t Decoded(VideoFrame& decoded_image) override {
for (int i = 0; i < decoded_image.width(); ++i) {
- EXPECT_NEAR(kColorY, decoded_image.buffer(kYPlane)[i], 1);
+ EXPECT_NEAR(kColorY, decoded_image.video_frame_buffer()->DataY()[i], 1);
}
// TODO(mikhal): Verify the difference between U,V and the original.
for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) {
- EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4);
- EXPECT_NEAR(kColorV, decoded_image.buffer(kVPlane)[i], 4);
+ EXPECT_NEAR(kColorU, decoded_image.video_frame_buffer()->DataU()[i], 4);
+ EXPECT_NEAR(kColorV, decoded_image.video_frame_buffer()->DataV()[i], 4);
}
decoded_frames_++;
return 0;
@@ -168,7 +168,7 @@ class SkipEncodingUnusedStreamsTest {
virtual ~SpyingTemporalLayers() { delete layers_; }
- virtual int EncodeFlags(uint32_t timestamp) {
+ int EncodeFlags(uint32_t timestamp) override {
return layers_->EncodeFlags(timestamp);
}
@@ -222,26 +222,40 @@ class TestVp8Simulcast : public ::testing::Test {
TestVp8Simulcast(VP8Encoder* encoder, VP8Decoder* decoder)
: encoder_(encoder), decoder_(decoder) {}
- // Creates an VideoFrame from |plane_colors|.
- static void CreateImage(VideoFrame* frame, int plane_colors[kNumOfPlanes]) {
- for (int plane_num = 0; plane_num < kNumOfPlanes; ++plane_num) {
- int width =
- (plane_num != kYPlane ? (frame->width() + 1) / 2 : frame->width());
- int height =
- (plane_num != kYPlane ? (frame->height() + 1) / 2 : frame->height());
- PlaneType plane_type = static_cast<PlaneType>(plane_num);
- uint8_t* data = frame->buffer(plane_type);
+ static void SetPlane(uint8_t* data,
+ uint8_t value,
+ int width,
+ int height,
+ int stride) {
+ for (int i = 0; i < height; i++, data += stride) {
// Setting allocated area to zero - setting only image size to
// requested values - will make it easier to distinguish between image
// size and frame size (accounting for stride).
- memset(frame->buffer(plane_type), 0, frame->allocated_size(plane_type));
- for (int i = 0; i < height; i++) {
- memset(data, plane_colors[plane_num], width);
- data += frame->stride(plane_type);
- }
+ memset(data, value, width);
+ memset(data + width, 0, stride - width);
}
}
+ // Fills in an VideoFrameBuffer from |plane_colors|.
+ static void CreateImage(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
+ int plane_colors[kNumOfPlanes]) {
+ int width = buffer->width();
+ int height = buffer->height();
+ int chroma_width = (width + 1) / 2;
+ int chroma_height = (height + 1) / 2;
+
+ SetPlane(buffer->MutableDataY(), plane_colors[0],
+ width, height, buffer->StrideY());
+
+ SetPlane(buffer->MutableDataU(), plane_colors[1],
+ chroma_width, chroma_height,
+ buffer->StrideU());
+
+ SetPlane(buffer->MutableDataV(), plane_colors[2],
+ chroma_width, chroma_height,
+ buffer->StrideV());
+ }
+
static void DefaultSettings(VideoCodec* settings,
const int* temporal_layer_profile) {
assert(settings);
@@ -305,11 +319,11 @@ class TestVp8Simulcast : public ::testing::Test {
int half_width = (kDefaultWidth + 1) / 2;
input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
half_width, half_width);
- memset(input_frame_.buffer(kYPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
- memset(input_frame_.buffer(kUPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
- memset(input_frame_.buffer(kVPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
}
@@ -555,11 +569,11 @@ class TestVp8Simulcast : public ::testing::Test {
int half_width = (settings_.width + 1) / 2;
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width);
- memset(input_frame_.buffer(kYPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
- memset(input_frame_.buffer(kUPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
- memset(input_frame_.buffer(kVPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
// The for loop above did not set the bitrate of the highest layer.
@@ -596,11 +610,11 @@ class TestVp8Simulcast : public ::testing::Test {
half_width = (settings_.width + 1) / 2;
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
settings_.width, half_width, half_width);
- memset(input_frame_.buffer(kYPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
input_frame_.allocated_size(kYPlane));
- memset(input_frame_.buffer(kUPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
input_frame_.allocated_size(kUPlane));
- memset(input_frame_.buffer(kVPlane), 0,
+ memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
input_frame_.allocated_size(kVPlane));
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
}
@@ -691,7 +705,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
int picture_id = -1;
@@ -707,7 +721,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@@ -715,7 +729,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@@ -724,7 +738,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@@ -739,7 +753,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
@@ -898,7 +912,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] = kColorY;
plane_offset[kUPlane] = kColorU;
plane_offset[kVPlane] = kColorV;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
@@ -906,7 +920,7 @@ class TestVp8Simulcast : public ::testing::Test {
plane_offset[kYPlane] += 1;
plane_offset[kUPlane] += 1;
plane_offset[kVPlane] += 1;
- CreateImage(&input_frame_, plane_offset);
+ CreateImage(input_frame_.video_frame_buffer(), plane_offset);
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
index f3ebfa1f766..d562dd4bb97 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc
@@ -14,9 +14,9 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@@ -159,8 +159,8 @@ class TestVp8Impl : public ::testing::Test {
}
size_t WaitForEncodedFrame() const {
- int64_t startTime = TickTime::MillisecondTimestamp();
- while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs) {
+ int64_t startTime = rtc::TimeMillis();
+ while (rtc::TimeMillis() - startTime < kMaxWaitEncTimeMs) {
if (encode_complete_callback_->EncodeComplete()) {
return encoded_frame_._length;
}
@@ -169,8 +169,8 @@ class TestVp8Impl : public ::testing::Test {
}
size_t WaitForDecodedFrame() const {
- int64_t startTime = TickTime::MillisecondTimestamp();
- while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitDecTimeMs) {
+ int64_t startTime = rtc::TimeMillis();
+ while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) {
if (decode_complete_callback_->DecodeComplete()) {
return CalcBufferSize(kI420, decoded_frame_.width(),
decoded_frame_.height());
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index b34288632cd..f035568355d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -20,6 +20,7 @@
#include "libyuv/convert.h" // NOLINT
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
@@ -29,7 +30,6 @@
#include "webrtc/modules/video_coding/codecs/vp8/screenshare_layers.h"
#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace {
@@ -166,7 +166,7 @@ VP8EncoderImpl::VP8EncoderImpl()
tl1_frame_dropper_(kTl1MaxTimeToDropFrames),
key_frame_request_(kMaxSimulcastStreams, false),
quality_scaler_enabled_(false) {
- uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
+ uint32_t seed = rtc::Time32();
srand(seed);
picture_id_.reserve(kMaxSimulcastStreams);
@@ -598,14 +598,9 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
}
rps_.Init();
- // Disable both high-QP limits and framedropping. Both are handled by libvpx
- // internally.
- const int kDisabledBadQpThreshold = 64;
- // TODO(glaznev/sprang): consider passing codec initial bitrate to quality
- // scaler to avoid starting with HD for low initial bitrates.
- quality_scaler_.Init(codec_.qpMax / QualityScaler::kDefaultLowQpDenominator,
- kDisabledBadQpThreshold, false, 0, 0, 0,
- codec_.maxFramerate);
+ quality_scaler_.Init(QualityScaler::kLowVp8QpThreshold,
+ QualityScaler::kBadVp8QpThreshold, codec_.startBitrate,
+ codec_.width, codec_.height, codec_.maxFramerate);
// Only apply scaling to improve for single-layer streams. The scaling metrics
// use frame drops as a signal and is only applicable when we drop frames.
@@ -751,15 +746,18 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
// Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const.
raw_images_[0].planes[VPX_PLANE_Y] =
- const_cast<uint8_t*>(input_image.buffer(kYPlane));
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataY());
raw_images_[0].planes[VPX_PLANE_U] =
- const_cast<uint8_t*>(input_image.buffer(kUPlane));
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataU());
raw_images_[0].planes[VPX_PLANE_V] =
- const_cast<uint8_t*>(input_image.buffer(kVPlane));
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataV());
- raw_images_[0].stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
- raw_images_[0].stride[VPX_PLANE_U] = input_image.stride(kUPlane);
- raw_images_[0].stride[VPX_PLANE_V] = input_image.stride(kVPlane);
+ raw_images_[0].stride[VPX_PLANE_Y] =
+ input_image.video_frame_buffer()->StrideY();
+ raw_images_[0].stride[VPX_PLANE_U] =
+ input_image.video_frame_buffer()->StrideU();
+ raw_images_[0].stride[VPX_PLANE_V] =
+ input_image.video_frame_buffer()->StrideV();
for (size_t i = 1; i < encoders_.size(); ++i) {
// Scale the image down a number of times by downsampling factor
@@ -1020,6 +1018,7 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
encoded_images_[encoder_idx]._timeStamp = input_image.timestamp();
encoded_images_[encoder_idx].capture_time_ms_ =
input_image.render_time_ms();
+ encoded_images_[encoder_idx].rotation_ = input_image.rotation();
int qp = -1;
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
@@ -1053,9 +1052,9 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
}
if (encoders_.size() == 1 && send_stream_[0]) {
if (encoded_images_[0]._length > 0) {
- int qp;
- vpx_codec_control(&encoders_[0], VP8E_GET_LAST_QUANTIZER_64, &qp);
- quality_scaler_.ReportQP(qp);
+ int qp_128;
+ vpx_codec_control(&encoders_[0], VP8E_GET_LAST_QUANTIZER, &qp_128);
+ quality_scaler_.ReportQP(qp_128);
} else {
quality_scaler_.ReportDroppedFrame();
}
@@ -1355,9 +1354,12 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
- decoded_image.buffer(kYPlane), decoded_image.stride(kYPlane),
- decoded_image.buffer(kUPlane), decoded_image.stride(kUPlane),
- decoded_image.buffer(kVPlane), decoded_image.stride(kVPlane),
+ decoded_image.video_frame_buffer()->MutableDataY(),
+ decoded_image.video_frame_buffer()->StrideY(),
+ decoded_image.video_frame_buffer()->MutableDataU(),
+ decoded_image.video_frame_buffer()->StrideU(),
+ decoded_image.video_frame_buffer()->MutableDataV(),
+ decoded_image.video_frame_buffer()->StrideV(),
img->d_w, img->d_h);
decoded_image.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image);
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
index 6906a322afc..f8af6422538 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
@@ -40,21 +40,21 @@ class VP8EncoderImpl : public VP8Encoder {
virtual ~VP8EncoderImpl();
- virtual int Release();
+ int Release() override;
- virtual int InitEncode(const VideoCodec* codec_settings,
- int number_of_cores,
- size_t max_payload_size);
+ int InitEncode(const VideoCodec* codec_settings,
+ int number_of_cores,
+ size_t max_payload_size) override;
- virtual int Encode(const VideoFrame& input_image,
- const CodecSpecificInfo* codec_specific_info,
- const std::vector<FrameType>* frame_types);
+ int Encode(const VideoFrame& input_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const std::vector<FrameType>* frame_types) override;
- virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
+ int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;
- virtual int SetChannelParameters(uint32_t packet_loss, int64_t rtt);
+ int SetChannelParameters(uint32_t packet_loss, int64_t rtt) override;
- virtual int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate);
+ int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate) override;
void OnDroppedFrame() override {}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index 33dae8d8e4c..28027009537 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -12,10 +12,10 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/include/video_image.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/metrics/video_metrics.h"
#include "webrtc/tools/simple_command_line_parser.h"
@@ -158,7 +158,7 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
decoder->RegisterDecodeCompleteCallback(&decoder_callback);
// Read->Encode->Decode sequence.
// num_frames = -1 implies unlimited encoding (entire sequence).
- int64_t starttime = webrtc::TickTime::MillisecondTimestamp();
+ int64_t starttime = rtc::TimeMillis();
int frame_cnt = 1;
int frames_processed = 0;
input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
@@ -176,7 +176,7 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
++frame_cnt;
}
printf("\nProcessed %d frames\n", frames_processed);
- int64_t endtime = webrtc::TickTime::MillisecondTimestamp();
+ int64_t endtime = rtc::TimeMillis();
int64_t totalExecutionTime = endtime - starttime;
printf("Total execution time: %.2lf ms\n",
static_cast<double>(totalExecutionTime));
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
index 3bcbe46b3a8..3b726a0cc5d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/include/vp9.h
@@ -18,6 +18,7 @@ namespace webrtc {
class VP9Encoder : public VideoEncoder {
public:
+ static bool IsSupported();
static VP9Encoder* Create();
virtual ~VP9Encoder() {}
@@ -25,6 +26,7 @@ class VP9Encoder : public VideoEncoder {
class VP9Decoder : public VideoDecoder {
public:
+ static bool IsSupported();
static VP9Decoder* Create();
virtual ~VP9Decoder() {}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
index cd5201f8aac..9124e5fad30 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9.gyp
@@ -20,6 +20,21 @@
'<(libvpx_dir)/libvpx.gyp:libvpx',
],
}],
+ ['libvpx_build_vp9==1', {
+ 'sources': [
+ 'screenshare_layers.cc',
+ 'screenshare_layers.h',
+ 'vp9_frame_buffer_pool.cc',
+ 'vp9_frame_buffer_pool.h',
+ 'vp9_impl.cc',
+ 'vp9_impl.h',
+ ],
+ }, {
+ 'sources': [
+ 'vp9_noop.cc',
+ ],
+ }
+ ],
],
'dependencies': [
'<(webrtc_root)/common_video/common_video.gyp:common_video',
@@ -28,12 +43,6 @@
],
'sources': [
'include/vp9.h',
- 'screenshare_layers.cc',
- 'screenshare_layers.h',
- 'vp9_frame_buffer_pool.cc',
- 'vp9_frame_buffer_pool.h',
- 'vp9_impl.cc',
- 'vp9_impl.h',
],
},
],
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index fcefdb89fa2..750f7427cdd 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -22,13 +22,13 @@
#include "vpx/vp8dx.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/keep_ref_until_done.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/codecs/vp9/screenshare_layers.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -47,6 +47,10 @@ int GetCpuSpeed(int width, int height) {
#endif
}
+bool VP9Encoder::IsSupported() {
+ return true;
+}
+
VP9Encoder* VP9Encoder::Create() {
return new VP9EncoderImpl();
}
@@ -77,7 +81,7 @@ VP9EncoderImpl::VP9EncoderImpl()
// Use two spatial when screensharing with flexible mode.
spatial_layer_(new ScreenshareLayersVP9(2)) {
memset(&codec_, 0, sizeof(codec_));
- uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
+ uint32_t seed = rtc::Time32();
srand(seed);
}
@@ -500,12 +504,15 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
// Image in vpx_image_t format.
// Input image is const. VPX's raw image is not defined as const.
- raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(input_image.buffer(kYPlane));
- raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(input_image.buffer(kUPlane));
- raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(input_image.buffer(kVPlane));
- raw_->stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
- raw_->stride[VPX_PLANE_U] = input_image.stride(kUPlane);
- raw_->stride[VPX_PLANE_V] = input_image.stride(kVPlane);
+ raw_->planes[VPX_PLANE_Y] =
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataY());
+ raw_->planes[VPX_PLANE_U] =
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataU());
+ raw_->planes[VPX_PLANE_V] =
+ const_cast<uint8_t*>(input_image.video_frame_buffer()->DataV());
+ raw_->stride[VPX_PLANE_Y] = input_image.video_frame_buffer()->StrideY();
+ raw_->stride[VPX_PLANE_U] = input_image.video_frame_buffer()->StrideU();
+ raw_->stride[VPX_PLANE_V] = input_image.video_frame_buffer()->StrideV();
vpx_enc_frame_flags_t flags = 0;
bool send_keyframe = (frame_type == kVideoFrameKey);
@@ -692,8 +699,12 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length);
encoded_image_._timeStamp = input_image_->timestamp();
encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
+ encoded_image_.rotation_ = input_image_->rotation();
encoded_image_._encodedHeight = raw_->d_h;
encoded_image_._encodedWidth = raw_->d_w;
+ int qp = -1;
+ vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER, &qp);
+ encoded_image_.qp_ = qp;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
&frag_info);
}
@@ -816,6 +827,10 @@ const char* VP9EncoderImpl::ImplementationName() const {
return "libvpx";
}
+bool VP9Decoder::IsSupported() {
+ return true;
+}
+
VP9Decoder* VP9Decoder::Create() {
return new VP9DecoderImpl();
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc
new file mode 100644
index 00000000000..cc37e1adbbd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp9/vp9_noop.cc
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#if !defined(RTC_DISABLE_VP9)
+#error
+#endif // !defined(RTC_DISABLE_VP9)
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
+
+namespace webrtc {
+
+bool VP9Encoder::IsSupported() {
+ return false;
+}
+
+VP9Encoder* VP9Encoder::Create() {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+bool VP9Decoder::IsSupported() {
+ return false;
+}
+
+VP9Decoder* VP9Decoder::Create() {
+ RTC_NOTREACHED();
+ return nullptr;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.cc b/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.cc
deleted file mode 100644
index b2586fce3fb..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.cc
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/content_metrics_processing.h"
-
-#include <math.h>
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_coding/include/video_coding_defines.h"
-
-namespace webrtc {
-//////////////////////////////////
-/// VCMContentMetricsProcessing //
-//////////////////////////////////
-
-VCMContentMetricsProcessing::VCMContentMetricsProcessing()
- : recursive_avg_factor_(1 / 150.0f), // matched to 30fps.
- frame_cnt_uniform_avg_(0),
- avg_motion_level_(0.0f),
- avg_spatial_level_(0.0f) {
- recursive_avg_ = new VideoContentMetrics();
- uniform_avg_ = new VideoContentMetrics();
-}
-
-VCMContentMetricsProcessing::~VCMContentMetricsProcessing() {
- delete recursive_avg_;
- delete uniform_avg_;
-}
-
-int VCMContentMetricsProcessing::Reset() {
- recursive_avg_->Reset();
- uniform_avg_->Reset();
- frame_cnt_uniform_avg_ = 0;
- avg_motion_level_ = 0.0f;
- avg_spatial_level_ = 0.0f;
- return VCM_OK;
-}
-
-void VCMContentMetricsProcessing::UpdateFrameRate(uint32_t frameRate) {
- if (frameRate == 0)
- frameRate = 1;
- // Update factor for recursive averaging.
- recursive_avg_factor_ = static_cast<float>(1000.0f) /
- static_cast<float>(frameRate * kQmMinIntervalMs);
-}
-
-VideoContentMetrics* VCMContentMetricsProcessing::LongTermAvgData() {
- return recursive_avg_;
-}
-
-VideoContentMetrics* VCMContentMetricsProcessing::ShortTermAvgData() {
- if (frame_cnt_uniform_avg_ == 0) {
- return NULL;
- }
- // Two metrics are used: motion and spatial level.
- uniform_avg_->motion_magnitude =
- avg_motion_level_ / static_cast<float>(frame_cnt_uniform_avg_);
- uniform_avg_->spatial_pred_err =
- avg_spatial_level_ / static_cast<float>(frame_cnt_uniform_avg_);
- return uniform_avg_;
-}
-
-void VCMContentMetricsProcessing::ResetShortTermAvgData() {
- // Reset.
- avg_motion_level_ = 0.0f;
- avg_spatial_level_ = 0.0f;
- frame_cnt_uniform_avg_ = 0;
-}
-
-int VCMContentMetricsProcessing::UpdateContentData(
- const VideoContentMetrics* contentMetrics) {
- if (contentMetrics == NULL) {
- return VCM_OK;
- }
- return ProcessContent(contentMetrics);
-}
-
-int VCMContentMetricsProcessing::ProcessContent(
- const VideoContentMetrics* contentMetrics) {
- // Update the recursive averaged metrics: average is over longer window
- // of time: over QmMinIntervalMs ms.
- UpdateRecursiveAvg(contentMetrics);
- // Update the uniform averaged metrics: average is over shorter window
- // of time: based on ~RTCP reports.
- UpdateUniformAvg(contentMetrics);
- return VCM_OK;
-}
-
-void VCMContentMetricsProcessing::UpdateUniformAvg(
- const VideoContentMetrics* contentMetrics) {
- // Update frame counter.
- frame_cnt_uniform_avg_ += 1;
- // Update averaged metrics: motion and spatial level are used.
- avg_motion_level_ += contentMetrics->motion_magnitude;
- avg_spatial_level_ += contentMetrics->spatial_pred_err;
- return;
-}
-
-void VCMContentMetricsProcessing::UpdateRecursiveAvg(
- const VideoContentMetrics* contentMetrics) {
- // Spatial metrics: 2x2, 1x2(H), 2x1(V).
- recursive_avg_->spatial_pred_err =
- (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err;
-
- recursive_avg_->spatial_pred_err_h =
- (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err_h +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err_h;
-
- recursive_avg_->spatial_pred_err_v =
- (1 - recursive_avg_factor_) * recursive_avg_->spatial_pred_err_v +
- recursive_avg_factor_ * contentMetrics->spatial_pred_err_v;
-
- // Motion metric: Derived from NFD (normalized frame difference).
- recursive_avg_->motion_magnitude =
- (1 - recursive_avg_factor_) * recursive_avg_->motion_magnitude +
- recursive_avg_factor_ * contentMetrics->motion_magnitude;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.h b/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.h
deleted file mode 100644
index 3f67ec19c98..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/content_metrics_processing.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
-#define WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct VideoContentMetrics;
-
-// QM interval time (in ms)
-enum { kQmMinIntervalMs = 10000 };
-
-// Flag for NFD metric vs motion metric
-enum { kNfdMetric = 1 };
-
-/**********************************/
-/* Content Metrics Processing */
-/**********************************/
-class VCMContentMetricsProcessing {
- public:
- VCMContentMetricsProcessing();
- ~VCMContentMetricsProcessing();
-
- // Update class with latest metrics.
- int UpdateContentData(const VideoContentMetrics* contentMetrics);
-
- // Reset the short-term averaged content data.
- void ResetShortTermAvgData();
-
- // Initialize.
- int Reset();
-
- // Inform class of current frame rate.
- void UpdateFrameRate(uint32_t frameRate);
-
- // Returns the long-term averaged content data: recursive average over longer
- // time scale.
- VideoContentMetrics* LongTermAvgData();
-
- // Returns the short-term averaged content data: uniform average over
- // shorter time scalE.
- VideoContentMetrics* ShortTermAvgData();
-
- private:
- // Compute working average.
- int ProcessContent(const VideoContentMetrics* contentMetrics);
-
- // Update the recursive averaged metrics: longer time average (~5/10 secs).
- void UpdateRecursiveAvg(const VideoContentMetrics* contentMetrics);
-
- // Update the uniform averaged metrics: shorter time average (~RTCP report).
- void UpdateUniformAvg(const VideoContentMetrics* contentMetrics);
-
- VideoContentMetrics* recursive_avg_;
- VideoContentMetrics* uniform_avg_;
- float recursive_avg_factor_;
- uint32_t frame_cnt_uniform_avg_;
- float avg_motion_level_;
- float avg_spatial_level_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc
new file mode 100644
index 00000000000..c6a1a06e756
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.cc
@@ -0,0 +1,154 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/frame_buffer2.h"
+
+#include <algorithm>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/modules/video_coding/frame_object.h"
+#include "webrtc/modules/video_coding/jitter_estimator.h"
+#include "webrtc/modules/video_coding/sequence_number_util.h"
+#include "webrtc/modules/video_coding/timing.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace video_coding {
+
+namespace {
+// The maximum age of decoded frames tracked by frame buffer, compared to
+// |newest_picture_id_|.
+constexpr int kMaxFrameAge = 4096;
+
+// The maximum number of decoded frames being tracked by the frame buffer.
+constexpr int kMaxNumHistoryFrames = 256;
+} // namespace
+
+bool FrameBuffer::FrameComp::operator()(const FrameKey& f1,
+ const FrameKey& f2) const {
+ // first = picture id
+ // second = spatial layer
+ if (f1.first == f2.first)
+ return f1.second < f2.second;
+ return AheadOf(f2.first, f1.first);
+}
+
+FrameBuffer::FrameBuffer(Clock* clock,
+ VCMJitterEstimator* jitter_estimator,
+ const VCMTiming* timing)
+ : clock_(clock),
+ frame_inserted_event_(false, false),
+ jitter_estimator_(jitter_estimator),
+ timing_(timing),
+ newest_picture_id_(-1) {}
+
+std::unique_ptr<FrameObject> FrameBuffer::NextFrame(int64_t max_wait_time_ms) {
+ int64_t latest_return_time = clock_->TimeInMilliseconds() + max_wait_time_ms;
+ while (true) {
+ int64_t now = clock_->TimeInMilliseconds();
+ int64_t wait_ms = max_wait_time_ms;
+
+ crit_.Enter();
+ frame_inserted_event_.Reset();
+ auto next_frame = frames_.end();
+ for (auto frame_it = frames_.begin(); frame_it != frames_.end();
+ ++frame_it) {
+ const FrameObject& frame = *frame_it->second;
+ if (IsContinuous(frame)) {
+ next_frame = frame_it;
+ int64_t render_time = timing_->RenderTimeMs(frame.timestamp, now);
+ wait_ms = timing_->MaxWaitingTime(render_time, now);
+
+ // This will cause the frame buffer to prefer high framerate rather
+ // than high resolution in the case of the decoder not decoding fast
+ // enough and the stream has multiple spatial and temporal layers.
+ if (wait_ms == 0)
+ continue;
+
+ break;
+ }
+ }
+ crit_.Leave();
+
+ // If the timout occures, return. Otherwise a new frame has been inserted
+ // and the best frame to decode next will be selected again.
+ wait_ms = std::min<int64_t>(wait_ms, latest_return_time - now);
+ wait_ms = std::max<int64_t>(wait_ms, 0);
+ if (!frame_inserted_event_.Wait(wait_ms)) {
+ crit_.Enter();
+ if (next_frame != frames_.end()) {
+ // TODO(philipel): update jitter estimator with correct values.
+ jitter_estimator_->UpdateEstimate(100, 100);
+
+ decoded_frames_.insert(next_frame->first);
+ std::unique_ptr<FrameObject> frame = std::move(next_frame->second);
+ frames_.erase(frames_.begin(), ++next_frame);
+ crit_.Leave();
+ return frame;
+ } else {
+ crit_.Leave();
+ return std::unique_ptr<FrameObject>();
+ }
+ }
+ }
+}
+
+void FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
+ rtc::CritScope lock(&crit_);
+ if (newest_picture_id_ == -1)
+ newest_picture_id_ = frame->picture_id;
+
+ if (AheadOf<uint16_t>(frame->picture_id, newest_picture_id_))
+ newest_picture_id_ = frame->picture_id;
+
+ // Remove frames as long as we have too many, |kMaxNumHistoryFrames|.
+ while (decoded_frames_.size() > kMaxNumHistoryFrames)
+ decoded_frames_.erase(decoded_frames_.begin());
+
+ // Remove frames that are too old, |kMaxNumHistoryFrames|.
+ uint16_t old_picture_id = Subtract<1 << 16>(newest_picture_id_, kMaxFrameAge);
+ auto old_decoded_it =
+ decoded_frames_.lower_bound(FrameKey(old_picture_id, 0));
+ decoded_frames_.erase(decoded_frames_.begin(), old_decoded_it);
+
+ FrameKey key(frame->picture_id, frame->spatial_layer);
+ frames_[key] = std::move(frame);
+ frame_inserted_event_.Set();
+}
+
+bool FrameBuffer::IsContinuous(const FrameObject& frame) const {
+ // If a frame with an earlier picture id was inserted compared to the last
+ // decoded frames picture id then that frame arrived too late.
+ if (!decoded_frames_.empty() &&
+ AheadOf(decoded_frames_.rbegin()->first, frame.picture_id)) {
+ return false;
+ }
+
+ // Have we decoded all frames that this frame depend on?
+ for (size_t r = 0; r < frame.num_references; ++r) {
+ FrameKey ref_key(frame.references[r], frame.spatial_layer);
+ if (decoded_frames_.find(ref_key) == decoded_frames_.end())
+ return false;
+ }
+
+ // If this is a layer frame, have we decoded the lower layer of this
+ // super frame.
+ if (frame.inter_layer_predicted) {
+ RTC_DCHECK_GT(frame.spatial_layer, 0);
+ FrameKey ref_key(frame.picture_id, frame.spatial_layer - 1);
+ if (decoded_frames_.find(ref_key) == decoded_frames_.end())
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace video_coding
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h
new file mode 100644
index 00000000000..10cae426f62
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER2_H_
+#define WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER2_H_
+
+#include <array>
+#include <map>
+#include <memory>
+#include <set>
+#include <utility>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/event.h"
+#include "webrtc/base/thread_annotations.h"
+
+namespace webrtc {
+
+class Clock;
+class VCMJitterEstimator;
+class VCMTiming;
+
+namespace video_coding {
+
+class FrameObject;
+
+class FrameBuffer {
+ public:
+ FrameBuffer(Clock* clock,
+ VCMJitterEstimator* jitter_estimator,
+ const VCMTiming* timing);
+
+ // Insert a frame into the frame buffer.
+ void InsertFrame(std::unique_ptr<FrameObject> frame);
+
+ // Get the next frame for decoding. Will return at latest after
+ // |max_wait_time_ms|, with either a managed FrameObject or an empty
+ // unique ptr if there is no available frame for decoding.
+ std::unique_ptr<FrameObject> NextFrame(int64_t max_wait_time_ms);
+
+ private:
+ // FrameKey is a pair of (picture id, spatial layer).
+ using FrameKey = std::pair<uint16_t, uint8_t>;
+
+ // Comparator used to sort frames, first on their picture id, and second
+ // on their spatial layer.
+ struct FrameComp {
+ bool operator()(const FrameKey& f1, const FrameKey& f2) const;
+ };
+
+ // Determines whether a frame is continuous.
+ bool IsContinuous(const FrameObject& frame) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Keep track of decoded frames.
+ std::set<FrameKey, FrameComp> decoded_frames_ GUARDED_BY(crit_);
+
+ // The actual buffer that holds the FrameObjects.
+ std::map<FrameKey, std::unique_ptr<FrameObject>, FrameComp> frames_
+ GUARDED_BY(crit_);
+
+ rtc::CriticalSection crit_;
+ Clock* const clock_;
+ rtc::Event frame_inserted_event_;
+ VCMJitterEstimator* const jitter_estimator_;
+ const VCMTiming* const timing_;
+ int newest_picture_id_ GUARDED_BY(crit_);
+
+ RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameBuffer);
+};
+
+} // namespace video_coding
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER2_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc
new file mode 100644
index 00000000000..67706ce0581
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_buffer2_unittest.cc
@@ -0,0 +1,329 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/frame_buffer2.h"
+
+#include <algorithm>
+#include <cstring>
+#include <limits>
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/random.h"
+#include "webrtc/modules/video_coding/frame_object.h"
+#include "webrtc/modules/video_coding/jitter_estimator.h"
+#include "webrtc/modules/video_coding/sequence_number_util.h"
+#include "webrtc/modules/video_coding/timing.h"
+#include "webrtc/system_wrappers/include/clock.h"
+
+namespace webrtc {
+namespace video_coding {
+
+class VCMTimingFake : public VCMTiming {
+ public:
+ explicit VCMTimingFake(Clock* clock) : VCMTiming(clock) {}
+
+ int64_t RenderTimeMs(uint32_t frame_timestamp,
+ int64_t now_ms) const override {
+ if (last_ms_ == -1) {
+ last_ms_ = now_ms + kDelayMs;
+ last_timestamp_ = frame_timestamp;
+ }
+
+ uint32_t diff = MinDiff(frame_timestamp, last_timestamp_);
+ if (AheadOf(frame_timestamp, last_timestamp_))
+ last_ms_ += diff / 90;
+ else
+ last_ms_ -= diff / 90;
+
+ last_timestamp_ = frame_timestamp;
+ return last_ms_;
+ }
+
+ uint32_t MaxWaitingTime(int64_t render_time_ms,
+ int64_t now_ms) const override {
+ return std::max<int>(0, render_time_ms - now_ms - kDecodeTime);
+ }
+
+ private:
+ static constexpr int kDelayMs = 50;
+ static constexpr int kDecodeTime = kDelayMs / 2;
+ mutable uint32_t last_timestamp_ = 0;
+ mutable int64_t last_ms_ = -1;
+};
+
+class VCMJitterEstimatorMock : public VCMJitterEstimator {
+ public:
+ explicit VCMJitterEstimatorMock(Clock* clock) : VCMJitterEstimator(clock) {}
+
+ MOCK_METHOD1(UpdateRtt, void(int64_t rttMs));
+ MOCK_METHOD3(UpdateEstimate,
+ void(int64_t frameDelayMs,
+ uint32_t frameSizeBytes,
+ bool incompleteFrame));
+};
+
+class FrameObjectMock : public FrameObject {
+ public:
+ MOCK_CONST_METHOD1(GetBitstream, bool(uint8_t* destination));
+};
+
+class TestFrameBuffer2 : public ::testing::Test {
+ protected:
+ static constexpr int kMaxReferences = 5;
+ static constexpr int kFps1 = 1000;
+ static constexpr int kFps10 = kFps1 / 10;
+ static constexpr int kFps20 = kFps1 / 20;
+
+ TestFrameBuffer2()
+ : clock_(0),
+ timing_(&clock_),
+ jitter_estimator_(&clock_),
+ buffer_(&clock_, &jitter_estimator_, &timing_),
+ rand_(0x34678213),
+ tear_down_(false),
+ extract_thread_(&ExtractLoop, this, "Extract Thread"),
+ trigger_extract_event_(false, false),
+ crit_acquired_event_(false, false) {}
+
+ void SetUp() override { extract_thread_.Start(); }
+
+ void TearDown() override {
+ tear_down_ = true;
+ trigger_extract_event_.Set();
+ extract_thread_.Stop();
+ }
+
+ template <typename... T>
+ void InsertFrame(uint16_t picture_id,
+ uint8_t spatial_layer,
+ int64_t ts_ms,
+ bool inter_layer_predicted,
+ T... refs) {
+ static_assert(sizeof...(refs) <= kMaxReferences,
+ "To many references specified for FrameObject.");
+ std::array<uint16_t, sizeof...(refs)> references = {{refs...}};
+
+ std::unique_ptr<FrameObjectMock> frame(new FrameObjectMock());
+ frame->picture_id = picture_id;
+ frame->spatial_layer = spatial_layer;
+ frame->timestamp = ts_ms * 90;
+ frame->num_references = references.size();
+ frame->inter_layer_predicted = inter_layer_predicted;
+ for (size_t r = 0; r < references.size(); ++r)
+ frame->references[r] = references[r];
+
+ buffer_.InsertFrame(std::move(frame));
+ }
+
+ void ExtractFrame(int64_t max_wait_time = 0) {
+ crit_.Enter();
+ if (max_wait_time == 0) {
+ frames_.emplace_back(buffer_.NextFrame(0));
+ crit_.Leave();
+ } else {
+ max_wait_time_ = max_wait_time;
+ trigger_extract_event_.Set();
+ crit_.Leave();
+ // Make sure |crit_| is aquired by |extract_thread_| before returning.
+ crit_acquired_event_.Wait(rtc::Event::kForever);
+ }
+ }
+
+ void CheckFrame(size_t index, int picture_id, int spatial_layer) {
+ rtc::CritScope lock(&crit_);
+ ASSERT_LT(index, frames_.size());
+ ASSERT_TRUE(frames_[index]);
+ ASSERT_EQ(picture_id, frames_[index]->picture_id);
+ ASSERT_EQ(spatial_layer, frames_[index]->spatial_layer);
+ }
+
+ void CheckNoFrame(size_t index) {
+ rtc::CritScope lock(&crit_);
+ ASSERT_LT(index, frames_.size());
+ ASSERT_FALSE(frames_[index]);
+ }
+
+ static bool ExtractLoop(void* obj) {
+ TestFrameBuffer2* tfb = static_cast<TestFrameBuffer2*>(obj);
+ while (true) {
+ tfb->trigger_extract_event_.Wait(rtc::Event::kForever);
+ {
+ rtc::CritScope lock(&tfb->crit_);
+ tfb->crit_acquired_event_.Set();
+ if (tfb->tear_down_)
+ return false;
+
+ tfb->frames_.emplace_back(tfb->buffer_.NextFrame(tfb->max_wait_time_));
+ }
+ }
+ }
+
+ uint32_t Rand() { return rand_.Rand<uint32_t>(); }
+
+ SimulatedClock clock_;
+ VCMTimingFake timing_;
+ VCMJitterEstimatorMock jitter_estimator_;
+ FrameBuffer buffer_;
+ std::vector<std::unique_ptr<FrameObject>> frames_;
+ Random rand_;
+
+ int64_t max_wait_time_;
+ bool tear_down_;
+ rtc::PlatformThread extract_thread_;
+ rtc::Event trigger_extract_event_;
+ rtc::Event crit_acquired_event_;
+ rtc::CriticalSection crit_;
+};
+
+TEST_F(TestFrameBuffer2, ExtractFromEmptyBuffer) {
+ ExtractFrame();
+ CheckNoFrame(0);
+}
+
+TEST_F(TestFrameBuffer2, WaitForFrame) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ ExtractFrame(20);
+ InsertFrame(pid, 0, ts, false);
+ CheckFrame(0, pid, 0);
+}
+
+TEST_F(TestFrameBuffer2, OneSuperFrame) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ ExtractFrame(20);
+ InsertFrame(pid, 1, ts, true);
+ InsertFrame(pid, 0, ts, false);
+ ExtractFrame();
+
+ CheckFrame(0, pid, 0);
+ CheckFrame(1, pid, 1);
+}
+
+TEST_F(TestFrameBuffer2, OneLayerStream) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ ExtractFrame();
+ CheckFrame(0, pid, 0);
+ for (int i = 1; i < 10; ++i) {
+ InsertFrame(pid + i, 0, ts + i * kFps10, false, pid + i - 1);
+ ExtractFrame();
+ clock_.AdvanceTimeMilliseconds(kFps10);
+ CheckFrame(i, pid + i, 0);
+ }
+}
+
+TEST_F(TestFrameBuffer2, OneLayerStreamReordered) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ ExtractFrame();
+ CheckFrame(0, pid, 0);
+ for (int i = 1; i < 10; i += 2) {
+ ExtractFrame(15);
+ InsertFrame(pid + i + 1, 0, ts + (i + 1) * kFps10, false, pid + i);
+ clock_.AdvanceTimeMilliseconds(kFps10);
+ InsertFrame(pid + i, 0, ts + i * kFps10, false, pid + i - 1);
+ clock_.AdvanceTimeMilliseconds(kFps10);
+ ExtractFrame();
+ CheckFrame(i, pid + i, 0);
+ CheckFrame(i + 1, pid + i + 1, 0);
+ }
+}
+
+TEST_F(TestFrameBuffer2, DropTemporalLayerSlowDecoder) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ InsertFrame(pid + 1, 0, ts + kFps20, false);
+ for (int i = 2; i < 10; i += 2) {
+ uint32_t ts_tl0 = ts + i / 2 * kFps10;
+ InsertFrame(pid + i, 0, ts_tl0, false, pid + i - 2);
+ InsertFrame(pid + i + 1, 0, ts_tl0 + kFps20, false, pid + i, pid + i - 1);
+ }
+
+ for (int i = 0; i < 10; ++i) {
+ ExtractFrame();
+ clock_.AdvanceTimeMilliseconds(60);
+ }
+
+ CheckFrame(0, pid, 0);
+ CheckFrame(1, pid + 1, 0);
+ CheckFrame(2, pid + 2, 0);
+ CheckFrame(3, pid + 4, 0);
+ CheckFrame(4, pid + 6, 0);
+ CheckFrame(5, pid + 8, 0);
+ CheckNoFrame(6);
+ CheckNoFrame(7);
+ CheckNoFrame(8);
+ CheckNoFrame(9);
+}
+
+TEST_F(TestFrameBuffer2, DropSpatialLayerSlowDecoder) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ InsertFrame(pid, 1, ts, false);
+ for (int i = 1; i < 6; ++i) {
+ uint32_t ts_tl0 = ts + i * kFps10;
+ InsertFrame(pid + i, 0, ts_tl0, false, pid + i - 1);
+ InsertFrame(pid + i, 1, ts_tl0, false, pid + i - 1);
+ }
+
+ ExtractFrame();
+ ExtractFrame();
+ clock_.AdvanceTimeMilliseconds(55);
+ for (int i = 2; i < 12; ++i) {
+ ExtractFrame();
+ clock_.AdvanceTimeMilliseconds(55);
+ }
+
+ CheckFrame(0, pid, 0);
+ CheckFrame(1, pid, 1);
+ CheckFrame(2, pid + 1, 0);
+ CheckFrame(3, pid + 1, 1);
+ CheckFrame(4, pid + 2, 0);
+ CheckFrame(5, pid + 2, 1);
+ CheckFrame(6, pid + 3, 0);
+ CheckFrame(7, pid + 4, 0);
+ CheckFrame(8, pid + 5, 0);
+ CheckNoFrame(9);
+ CheckNoFrame(10);
+ CheckNoFrame(11);
+}
+
+TEST_F(TestFrameBuffer2, InsertLateFrame) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+
+ InsertFrame(pid, 0, ts, false);
+ ExtractFrame();
+ InsertFrame(pid + 2, 0, ts, false);
+ ExtractFrame();
+ InsertFrame(pid + 1, 0, ts, false, pid);
+ ExtractFrame();
+
+ CheckFrame(0, pid, 0);
+ CheckFrame(1, pid + 2, 0);
+ CheckNoFrame(2);
+}
+
+} // namespace video_coding
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_object.cc b/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
index 363c8a70357..7b9ec0d2835 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_object.cc
@@ -15,33 +15,56 @@
namespace webrtc {
namespace video_coding {
+FrameObject::FrameObject()
+ : picture_id(0),
+ spatial_layer(0),
+ timestamp(0),
+ num_references(0),
+ inter_layer_predicted(false) {}
+
RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
- uint16_t picture_id,
- uint16_t first_packet,
- uint16_t last_packet)
+ uint16_t first_seq_num,
+ uint16_t last_seq_num)
: packet_buffer_(packet_buffer),
- first_packet_(first_packet),
- last_packet_(last_packet) {}
+ first_seq_num_(first_seq_num),
+ last_seq_num_(last_seq_num) {
+ VCMPacket* packet = packet_buffer_->GetPacket(first_seq_num);
+ if (packet) {
+ frame_type_ = packet->frameType;
+ codec_type_ = packet->codec;
+ }
+}
RtpFrameObject::~RtpFrameObject() {
packet_buffer_->ReturnFrame(this);
}
-uint16_t RtpFrameObject::first_packet() const {
- return first_packet_;
+uint16_t RtpFrameObject::first_seq_num() const {
+ return first_seq_num_;
+}
+
+uint16_t RtpFrameObject::last_seq_num() const {
+ return last_seq_num_;
}
-uint16_t RtpFrameObject::last_packet() const {
- return last_packet_;
+FrameType RtpFrameObject::frame_type() const {
+ return frame_type_;
}
-uint16_t RtpFrameObject::picture_id() const {
- return picture_id_;
+VideoCodecType RtpFrameObject::codec_type() const {
+ return codec_type_;
}
bool RtpFrameObject::GetBitstream(uint8_t* destination) const {
return packet_buffer_->GetBitstream(*this, destination);
}
+RTPVideoTypeHeader* RtpFrameObject::GetCodecHeader() const {
+ VCMPacket* packet = packet_buffer_->GetPacket(first_seq_num_);
+ if (!packet)
+ return nullptr;
+ return &packet->codecSpecificHeader.codecHeader;
+}
+
} // namespace video_coding
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/frame_object.h b/chromium/third_party/webrtc/modules/video_coding/frame_object.h
index 2a68293d638..e8bb4811e6e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/frame_object.h
+++ b/chromium/third_party/webrtc/modules/video_coding/frame_object.h
@@ -11,16 +11,31 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_OBJECT_H_
#define WEBRTC_MODULES_VIDEO_CODING_FRAME_OBJECT_H_
-#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
namespace webrtc {
namespace video_coding {
class FrameObject {
public:
- virtual uint16_t picture_id() const = 0;
+ static const uint8_t kMaxFrameReferences = 5;
+
+ FrameObject();
+
virtual bool GetBitstream(uint8_t* destination) const = 0;
virtual ~FrameObject() {}
+
+ // The tuple (|picture_id|, |spatial_layer|) uniquely identifies a frame
+ // object. For codec types that don't necessarily have picture ids they
+ // have to be constructed from the header data relevant to that codec.
+ uint16_t picture_id;
+ uint8_t spatial_layer;
+ uint32_t timestamp;
+
+ size_t num_references;
+ uint16_t references[kMaxFrameReferences];
+ bool inter_layer_predicted;
};
class PacketBuffer;
@@ -28,20 +43,23 @@ class PacketBuffer;
class RtpFrameObject : public FrameObject {
public:
RtpFrameObject(PacketBuffer* packet_buffer,
- uint16_t picture_id,
- uint16_t first_packet,
- uint16_t last_packet);
+ uint16_t first_seq_num,
+ uint16_t last_seq_num);
+
~RtpFrameObject();
- uint16_t first_packet() const;
- uint16_t last_packet() const;
- uint16_t picture_id() const override;
+ uint16_t first_seq_num() const;
+ uint16_t last_seq_num() const;
+ FrameType frame_type() const;
+ VideoCodecType codec_type() const;
bool GetBitstream(uint8_t* destination) const override;
+ RTPVideoTypeHeader* GetCodecHeader() const;
private:
PacketBuffer* packet_buffer_;
- uint16_t picture_id_;
- uint16_t first_packet_;
- uint16_t last_packet_;
+ FrameType frame_type_;
+ VideoCodecType codec_type_;
+ uint16_t first_seq_num_;
+ uint16_t last_seq_num_;
};
} // namespace video_coding
diff --git a/chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc b/chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc
index 2463cf5c7a5..abc6369a005 100644
--- a/chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/generic_encoder.cc
@@ -1,12 +1,12 @@
/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
+* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+*
+* Use of this source code is governed by a BSD-style license
+* that can be found in the LICENSE file in the root of the source
+* tree. An additional intellectual property rights grant can be found
+* in the file PATENTS. All contributing project authors may
+* be found in the AUTHORS file in the root of the source tree.
+*/
#include "webrtc/modules/video_coding/generic_encoder.h"
@@ -21,89 +21,16 @@
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
-namespace {
-// Map information from info into rtp. If no relevant information is found
-// in info, rtp is set to NULL.
-void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader* rtp) {
- RTC_DCHECK(info);
- switch (info->codecType) {
- case kVideoCodecVP8: {
- rtp->codec = kRtpVideoVp8;
- rtp->codecHeader.VP8.InitRTPVideoHeaderVP8();
- rtp->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId;
- rtp->codecHeader.VP8.nonReference = info->codecSpecific.VP8.nonReference;
- rtp->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx;
- rtp->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync;
- rtp->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx;
- rtp->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx;
- rtp->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
- return;
- }
- case kVideoCodecVP9: {
- rtp->codec = kRtpVideoVp9;
- rtp->codecHeader.VP9.InitRTPVideoHeaderVP9();
- rtp->codecHeader.VP9.inter_pic_predicted =
- info->codecSpecific.VP9.inter_pic_predicted;
- rtp->codecHeader.VP9.flexible_mode =
- info->codecSpecific.VP9.flexible_mode;
- rtp->codecHeader.VP9.ss_data_available =
- info->codecSpecific.VP9.ss_data_available;
- rtp->codecHeader.VP9.picture_id = info->codecSpecific.VP9.picture_id;
- rtp->codecHeader.VP9.tl0_pic_idx = info->codecSpecific.VP9.tl0_pic_idx;
- rtp->codecHeader.VP9.temporal_idx = info->codecSpecific.VP9.temporal_idx;
- rtp->codecHeader.VP9.spatial_idx = info->codecSpecific.VP9.spatial_idx;
- rtp->codecHeader.VP9.temporal_up_switch =
- info->codecSpecific.VP9.temporal_up_switch;
- rtp->codecHeader.VP9.inter_layer_predicted =
- info->codecSpecific.VP9.inter_layer_predicted;
- rtp->codecHeader.VP9.gof_idx = info->codecSpecific.VP9.gof_idx;
- rtp->codecHeader.VP9.num_spatial_layers =
- info->codecSpecific.VP9.num_spatial_layers;
-
- if (info->codecSpecific.VP9.ss_data_available) {
- rtp->codecHeader.VP9.spatial_layer_resolution_present =
- info->codecSpecific.VP9.spatial_layer_resolution_present;
- if (info->codecSpecific.VP9.spatial_layer_resolution_present) {
- for (size_t i = 0; i < info->codecSpecific.VP9.num_spatial_layers;
- ++i) {
- rtp->codecHeader.VP9.width[i] = info->codecSpecific.VP9.width[i];
- rtp->codecHeader.VP9.height[i] = info->codecSpecific.VP9.height[i];
- }
- }
- rtp->codecHeader.VP9.gof.CopyGofInfoVP9(info->codecSpecific.VP9.gof);
- }
-
- rtp->codecHeader.VP9.num_ref_pics = info->codecSpecific.VP9.num_ref_pics;
- for (int i = 0; i < info->codecSpecific.VP9.num_ref_pics; ++i)
- rtp->codecHeader.VP9.pid_diff[i] = info->codecSpecific.VP9.p_diff[i];
- return;
- }
- case kVideoCodecH264:
- rtp->codec = kRtpVideoH264;
- return;
- case kVideoCodecGeneric:
- rtp->codec = kRtpVideoGeneric;
- rtp->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
- return;
- default:
- return;
- }
-}
-} // namespace
-
-// #define DEBUG_ENCODER_BIT_STREAM
-
VCMGenericEncoder::VCMGenericEncoder(
VideoEncoder* encoder,
VideoEncoderRateObserver* rate_observer,
VCMEncodedFrameCallback* encoded_frame_callback,
- bool internalSource)
+ bool internal_source)
: encoder_(encoder),
rate_observer_(rate_observer),
vcm_encoded_frame_callback_(encoded_frame_callback),
- internal_source_(internalSource),
+ internal_source_(internal_source),
encoder_params_({0, 0, 0, 0}),
- rotation_(kVideoRotation_0),
is_screenshare_(false) {}
VCMGenericEncoder::~VCMGenericEncoder() {}
@@ -114,8 +41,8 @@ int32_t VCMGenericEncoder::Release() {
}
int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
- int32_t numberOfCores,
- size_t maxPayloadSize) {
+ int32_t number_of_cores,
+ size_t max_payload_size) {
TRACE_EVENT0("webrtc", "VCMGenericEncoder::InitEncode");
{
rtc::CritScope lock(&params_lock_);
@@ -124,7 +51,7 @@ int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
}
is_screenshare_ = settings->mode == VideoCodecMode::kScreensharing;
- if (encoder_->InitEncode(settings, numberOfCores, maxPayloadSize) != 0) {
+ if (encoder_->InitEncode(settings, number_of_cores, max_payload_size) != 0) {
LOG(LS_ERROR) << "Failed to initialize the encoder associated with "
"payload name: "
<< settings->plName;
@@ -134,40 +61,30 @@ int32_t VCMGenericEncoder::InitEncode(const VideoCodec* settings,
return 0;
}
-int32_t VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
- const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<FrameType>& frameTypes) {
+int32_t VCMGenericEncoder::Encode(const VideoFrame& frame,
+ const CodecSpecificInfo* codec_specific,
+ const std::vector<FrameType>& frame_types) {
TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp",
- inputFrame.timestamp());
+ frame.timestamp());
- for (FrameType frame_type : frameTypes)
+ for (FrameType frame_type : frame_types)
RTC_DCHECK(frame_type == kVideoFrameKey || frame_type == kVideoFrameDelta);
- rotation_ = inputFrame.rotation();
-
- // Keep track of the current frame rotation and apply to the output of the
- // encoder. There might not be exact as the encoder could have one frame delay
- // but it should be close enough.
- // TODO(pbos): Map from timestamp, this is racy (even if rotation_ is locked
- // properly, which it isn't). More than one frame may be in the pipeline.
- vcm_encoded_frame_callback_->SetRotation(rotation_);
-
- int32_t result = encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
-
- if (vcm_encoded_frame_callback_) {
- vcm_encoded_frame_callback_->SignalLastEncoderImplementationUsed(
- encoder_->ImplementationName());
- }
+ int32_t result = encoder_->Encode(frame, codec_specific, &frame_types);
if (is_screenshare_ &&
result == WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT) {
// Target bitrate exceeded, encoder state has been reset - try again.
- return encoder_->Encode(inputFrame, codecSpecificInfo, &frameTypes);
+ return encoder_->Encode(frame, codec_specific, &frame_types);
}
return result;
}
+const char* VCMGenericEncoder::ImplementationName() const {
+ return encoder_->ImplementationName();
+}
+
void VCMGenericEncoder::SetEncoderParameters(const EncoderParameters& params) {
bool channel_parameters_have_changed;
bool rates_have_changed;
@@ -186,7 +103,7 @@ void VCMGenericEncoder::SetEncoderParameters(const EncoderParameters& params) {
if (rates_have_changed) {
uint32_t target_bitrate_kbps = (params.target_bitrate + 500) / 1000;
encoder_->SetRates(target_bitrate_kbps, params.input_frame_rate);
- if (rate_observer_ != nullptr) {
+ if (rate_observer_) {
rate_observer_->OnSetRates(params.target_bitrate,
params.input_frame_rate);
}
@@ -220,93 +137,32 @@ bool VCMGenericEncoder::SupportsNativeHandle() const {
return encoder_->SupportsNativeHandle();
}
-int VCMGenericEncoder::GetTargetFramerate() {
- return encoder_->GetTargetFramerate();
-}
-
-/***************************
- * Callback Implementation
- ***************************/
VCMEncodedFrameCallback::VCMEncodedFrameCallback(
- EncodedImageCallback* post_encode_callback)
- : send_callback_(),
- _mediaOpt(NULL),
- _payloadType(0),
- _internalSource(false),
- _rotation(kVideoRotation_0),
- post_encode_callback_(post_encode_callback)
-#ifdef DEBUG_ENCODER_BIT_STREAM
- ,
- _bitStreamAfterEncoder(NULL)
-#endif
-{
-#ifdef DEBUG_ENCODER_BIT_STREAM
- _bitStreamAfterEncoder = fopen("encoderBitStream.bit", "wb");
-#endif
-}
+ EncodedImageCallback* post_encode_callback,
+ media_optimization::MediaOptimization* media_opt)
+ : internal_source_(false),
+ post_encode_callback_(post_encode_callback),
+ media_opt_(media_opt) {}
-VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {
-#ifdef DEBUG_ENCODER_BIT_STREAM
- fclose(_bitStreamAfterEncoder);
-#endif
-}
-
-int32_t VCMEncodedFrameCallback::SetTransportCallback(
- VCMPacketizationCallback* transport) {
- send_callback_ = transport;
- return VCM_OK;
-}
+VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {}
int32_t VCMEncodedFrameCallback::Encoded(
const EncodedImage& encoded_image,
- const CodecSpecificInfo* codecSpecificInfo,
- const RTPFragmentationHeader* fragmentationHeader) {
+ const CodecSpecificInfo* codec_specific,
+ const RTPFragmentationHeader* fragmentation_header) {
TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded",
"timestamp", encoded_image._timeStamp);
- post_encode_callback_->Encoded(encoded_image, NULL, NULL);
-
- if (send_callback_ == NULL) {
- return VCM_UNINITIALIZED;
- }
-
-#ifdef DEBUG_ENCODER_BIT_STREAM
- if (_bitStreamAfterEncoder != NULL) {
- fwrite(encoded_image._buffer, 1, encoded_image._length,
- _bitStreamAfterEncoder);
- }
-#endif
-
- RTPVideoHeader rtpVideoHeader;
- memset(&rtpVideoHeader, 0, sizeof(RTPVideoHeader));
- RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
- if (codecSpecificInfo) {
- CopyCodecSpecific(codecSpecificInfo, rtpVideoHeaderPtr);
- }
- rtpVideoHeader.rotation = _rotation;
-
- int32_t callbackReturn = send_callback_->SendData(
- _payloadType, encoded_image, fragmentationHeader, rtpVideoHeaderPtr);
- if (callbackReturn < 0) {
- return callbackReturn;
- }
-
- if (_mediaOpt != NULL) {
- _mediaOpt->UpdateWithEncodedData(encoded_image);
- if (_internalSource)
- return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame.
+ int ret_val = post_encode_callback_->Encoded(encoded_image, codec_specific,
+ fragmentation_header);
+ if (ret_val < 0)
+ return ret_val;
+
+ if (media_opt_) {
+ media_opt_->UpdateWithEncodedData(encoded_image);
+ if (internal_source_)
+ return media_opt_->DropFrame(); // Signal to encoder to drop next frame.
}
return VCM_OK;
}
-void VCMEncodedFrameCallback::SetMediaOpt(
- media_optimization::MediaOptimization* mediaOpt) {
- _mediaOpt = mediaOpt;
-}
-
-void VCMEncodedFrameCallback::SignalLastEncoderImplementationUsed(
- const char* implementation_name) {
- if (send_callback_)
- send_callback_->OnEncoderImplementationName(implementation_name);
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/generic_encoder.h b/chromium/third_party/webrtc/modules/video_coding/generic_encoder.h
index e96d9957815..469f04ded40 100644
--- a/chromium/third_party/webrtc/modules/video_coding/generic_encoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/generic_encoder.h
@@ -33,60 +33,26 @@ struct EncoderParameters {
uint32_t input_frame_rate;
};
-/*************************************/
-/* VCMEncodeFrameCallback class */
-/***********************************/
class VCMEncodedFrameCallback : public EncodedImageCallback {
public:
- explicit VCMEncodedFrameCallback(
- EncodedImageCallback* post_encode_callback);
- virtual ~VCMEncodedFrameCallback();
-
- /*
- * Callback implementation - codec encode complete
- */
- int32_t Encoded(
- const EncodedImage& encodedImage,
- const CodecSpecificInfo* codecSpecificInfo = NULL,
- const RTPFragmentationHeader* fragmentationHeader = NULL);
- /*
- * Callback implementation - generic encoder encode complete
- */
- int32_t SetTransportCallback(VCMPacketizationCallback* transport);
- /**
- * Set media Optimization
- */
- void SetMediaOpt(media_optimization::MediaOptimization* mediaOpt);
-
- void SetPayloadType(uint8_t payloadType) {
- _payloadType = payloadType;
- }
-
- void SetInternalSource(bool internalSource) {
- _internalSource = internalSource;
- }
-
- void SetRotation(VideoRotation rotation) { _rotation = rotation; }
- void SignalLastEncoderImplementationUsed(
- const char* encoder_implementation_name);
+ VCMEncodedFrameCallback(EncodedImageCallback* post_encode_callback,
+ media_optimization::MediaOptimization* media_opt);
+ virtual ~VCMEncodedFrameCallback();
+
+ // Implements EncodedImageCallback.
+ int32_t Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific,
+ const RTPFragmentationHeader* fragmentation_header) override;
+ void SetInternalSource(bool internal_source) {
+ internal_source_ = internal_source;
+ }
private:
- VCMPacketizationCallback* send_callback_;
- media_optimization::MediaOptimization* _mediaOpt;
- uint8_t _payloadType;
- bool _internalSource;
- VideoRotation _rotation;
-
- EncodedImageCallback* post_encode_callback_;
-
-#ifdef DEBUG_ENCODER_BIT_STREAM
- FILE* _bitStreamAfterEncoder;
-#endif
-}; // end of VCMEncodeFrameCallback class
-
-/******************************/
-/* VCMGenericEncoder class */
-/******************************/
+ bool internal_source_;
+ EncodedImageCallback* const post_encode_callback_;
+ media_optimization::MediaOptimization* const media_opt_;
+};
+
class VCMGenericEncoder {
friend class VCMCodecDataBase;
@@ -94,44 +60,27 @@ class VCMGenericEncoder {
VCMGenericEncoder(VideoEncoder* encoder,
VideoEncoderRateObserver* rate_observer,
VCMEncodedFrameCallback* encoded_frame_callback,
- bool internalSource);
+ bool internal_source);
~VCMGenericEncoder();
- /**
- * Free encoder memory
- */
int32_t Release();
- /**
- * Initialize the encoder with the information from the VideoCodec
- */
int32_t InitEncode(const VideoCodec* settings,
- int32_t numberOfCores,
- size_t maxPayloadSize);
- /**
- * Encode raw image
- * inputFrame : Frame containing raw image
- * codecSpecificInfo : Specific codec data
- * cameraFrameRate : Request or information from the remote side
- * frameType : The requested frame type to encode
- */
- int32_t Encode(const VideoFrame& inputFrame,
- const CodecSpecificInfo* codecSpecificInfo,
- const std::vector<FrameType>& frameTypes);
+ int32_t number_of_cores,
+ size_t max_payload_size);
+ int32_t Encode(const VideoFrame& frame,
+ const CodecSpecificInfo* codec_specific,
+ const std::vector<FrameType>& frame_types);
+
+ const char* ImplementationName() const;
void SetEncoderParameters(const EncoderParameters& params);
EncoderParameters GetEncoderParameters() const;
int32_t SetPeriodicKeyFrames(bool enable);
-
int32_t RequestFrame(const std::vector<FrameType>& frame_types);
-
bool InternalSource() const;
-
void OnDroppedFrame();
-
bool SupportsNativeHandle() const;
- int GetTargetFramerate();
-
private:
VideoEncoder* const encoder_;
VideoEncoderRateObserver* const rate_observer_;
@@ -139,9 +88,8 @@ class VCMGenericEncoder {
const bool internal_source_;
rtc::CriticalSection params_lock_;
EncoderParameters encoder_params_ GUARDED_BY(params_lock_);
- VideoRotation rotation_;
bool is_screenshare_;
-}; // end of VCMGenericEncoder class
+};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/include/video_coding.h b/chromium/third_party/webrtc/modules/video_coding/include/video_coding.h
index 0c508b7739a..0f8567963a0 100644
--- a/chromium/third_party/webrtc/modules/video_coding/include/video_coding.h
+++ b/chromium/third_party/webrtc/modules/video_coding/include/video_coding.h
@@ -31,6 +31,10 @@ namespace webrtc {
class Clock;
class EncodedImageCallback;
+// TODO(pbos): Remove VCMQMSettingsCallback completely. This might be done by
+// removing the VCM and use VideoSender/VideoReceiver as a public interface
+// directly.
+class VCMQMSettingsCallback;
class VideoEncoder;
class VideoDecoder;
struct CodecSpecificInfo;
@@ -78,7 +82,8 @@ class VideoCodingModule : public Module {
VideoEncoderRateObserver* encoder_rate_observer,
VCMQMSettingsCallback* qm_settings_callback,
NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender);
+ KeyFrameRequestSender* keyframe_request_sender,
+ EncodedImageCallback* pre_decode_image_callback);
static VideoCodingModule* Create(Clock* clock, EventFactory* event_factory);
@@ -183,32 +188,6 @@ class VideoCodingModule : public Module {
// < 0, on error.
virtual int32_t SetReceiveChannelParameters(int64_t rtt) = 0;
- // Register a transport callback which will be called to deliver the encoded
- // data and
- // side information.
- //
- // Input:
- // - transport : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterTransportCallback(
- VCMPacketizationCallback* transport) = 0;
-
- // Register video output information callback which will be called to deliver
- // information
- // about the video stream produced by the encoder, for instance the average
- // frame rate and
- // bit rate.
- //
- // Input:
- // - outputInformation : The callback object to register.
- //
- // Return value : VCM_OK, on success.
- // < 0, on error.
- virtual int32_t RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) = 0;
-
// Register a video protection callback which will be called to deliver
// the requested FEC rate and NACK status (on/off).
//
@@ -248,14 +227,13 @@ class VideoCodingModule : public Module {
// < 0, on error.
virtual int32_t AddVideoFrame(
const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics = NULL,
const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;
// Next frame encoded should be an intra frame (keyframe).
//
// Return value : VCM_OK, on success.
// < 0, on error.
- virtual int32_t IntraFrameRequest(int stream_index) = 0;
+ virtual int32_t IntraFrameRequest(size_t stream_index) = 0;
// Frame Dropper enable. Can be used to disable the frame dropping when the
// encoder
@@ -391,10 +369,6 @@ class VideoCodingModule : public Module {
// < 0, on error.
virtual int32_t Decode(uint16_t maxWaitTimeMs = 200) = 0;
- // Registers a callback which conveys the size of the render buffer.
- virtual int RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) = 0;
-
// API to get the codec which is currently used for decoding by the module.
//
// Input:
@@ -511,8 +485,6 @@ class VideoCodingModule : public Module {
// suspended due to bandwidth limitations; otherwise false.
virtual bool VideoSuspended() const = 0;
- virtual void RegisterPreDecodeImageCallback(
- EncodedImageCallback* observer) = 0;
virtual void RegisterPostEncodeImageCallback(
EncodedImageCallback* post_encode_callback) = 0;
// Releases pending decode calls, permitting faster thread shutdown.
diff --git a/chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h b/chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h
index 4fe8c797933..ba71803c7c9 100644
--- a/chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/chromium/third_party/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
#define WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
+#include <string>
#include <vector>
#include "webrtc/modules/include/module_common_types.h"
@@ -56,20 +57,6 @@ struct VCMFrameCount {
uint32_t numDeltaFrames;
};
-// Callback class used for sending data ready to be packetized
-class VCMPacketizationCallback {
- public:
- virtual int32_t SendData(uint8_t payloadType,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentationHeader,
- const RTPVideoHeader* rtpVideoHdr) = 0;
-
- virtual void OnEncoderImplementationName(const char* implementation_name) {}
-
- protected:
- virtual ~VCMPacketizationCallback() {}
-};
-
// Callback class used for passing decoded frames which are ready to be
// rendered.
class VCMReceiveCallback {
@@ -86,13 +73,13 @@ class VCMReceiveCallback {
virtual ~VCMReceiveCallback() {}
};
-// Callback class used for informing the user of the bit rate and frame rate
-// produced by the
-// encoder.
+// Callback class used for informing the user of the bit rate and frame rate,
+// and the name of the encoder.
class VCMSendStatisticsCallback {
public:
- virtual int32_t SendStatistics(const uint32_t bitRate,
- const uint32_t frameRate) = 0;
+ virtual void SendStatistics(uint32_t bitRate,
+ uint32_t frameRate,
+ const std::string& encoder_name) = 0;
protected:
virtual ~VCMSendStatisticsCallback() {}
@@ -189,30 +176,6 @@ class KeyFrameRequestSender {
virtual ~KeyFrameRequestSender() {}
};
-// Callback used to inform the user of the the desired resolution
-// as subscribed by Media Optimization (Quality Modes)
-class VCMQMSettingsCallback {
- public:
- virtual int32_t SetVideoQMSettings(const uint32_t frameRate,
- const uint32_t width,
- const uint32_t height) = 0;
-
- virtual void SetTargetFramerate(int frame_rate) = 0;
-
- protected:
- virtual ~VCMQMSettingsCallback() {}
-};
-
-// Callback class used for telling the user about the size (in time) of the
-// render buffer, that is the size in time of the complete continuous frames.
-class VCMRenderBufferSizeCallback {
- public:
- virtual void RenderBufferSizeMs(int buffer_size_ms) = 0;
-
- protected:
- virtual ~VCMRenderBufferSizeCallback() {}
-};
-
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_DEFINES_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc
index f048b0a883c..9c50a945191 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.cc
@@ -601,7 +601,8 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
// Frame pulled out from jitter buffer, update the jitter estimate.
const bool retransmitted = (frame->GetNackCount() > 0);
if (retransmitted) {
- jitter_estimate_.FrameNacked();
+ if (WaitForRetransmissions())
+ jitter_estimate_.FrameNacked();
} else if (frame->Length() > 0) {
// Ignore retransmitted and empty frames.
if (waiting_for_completion_.latest_packet_time >= 0) {
@@ -958,6 +959,8 @@ void VCMJitterBuffer::UpdateRtt(int64_t rtt_ms) {
jitter_estimate_.UpdateRtt(rtt_ms);
if (nack_module_)
nack_module_->UpdateRtt(rtt_ms);
+ if (!WaitForRetransmissions())
+ jitter_estimate_.ResetNackCount();
}
void VCMJitterBuffer::SetNackMode(VCMNackMode mode,
@@ -1194,19 +1197,6 @@ int64_t VCMJitterBuffer::LastDecodedTimestamp() const {
return last_decoded_state_.time_stamp();
}
-void VCMJitterBuffer::RenderBufferSize(uint32_t* timestamp_start,
- uint32_t* timestamp_end) {
- CriticalSectionScoped cs(crit_sect_);
- CleanUpOldOrEmptyFrames();
- *timestamp_start = 0;
- *timestamp_end = 0;
- if (decodable_frames_.empty()) {
- return;
- }
- *timestamp_start = decodable_frames_.Front()->TimeStamp();
- *timestamp_end = decodable_frames_.Back()->TimeStamp();
-}
-
void VCMJitterBuffer::RegisterStatsCallback(
VCMReceiveStatisticsCallback* callback) {
CriticalSectionScoped cs(crit_sect_);
@@ -1282,9 +1272,13 @@ void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
if (frame.IsSessionComplete()) {
if (frame.FrameType() == kVideoFrameKey) {
++receive_statistics_.key_frames;
+ if (receive_statistics_.key_frames == 1) {
+ LOG(LS_INFO) << "Received first complete key frame";
+ }
} else {
++receive_statistics_.delta_frames;
}
+
if (stats_callback_ != NULL)
stats_callback_->OnFrameCountsUpdated(receive_statistics_);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h
index 0cc03dd8107..e36f2cd6d52 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer.h
@@ -212,10 +212,6 @@ class VCMJitterBuffer {
int64_t LastDecodedTimestamp() const;
VCMDecodeErrorMode decode_error_mode() const { return decode_error_mode_; }
- // Used to compute time of complete continuous frames. Returns the timestamps
- // corresponding to the start and end of the continuous complete buffer.
- void RenderBufferSize(uint32_t* timestamp_start, uint32_t* timestamp_end);
-
void RegisterStatsCallback(VCMReceiveStatisticsCallback* callback);
int64_t TimeUntilNextProcess();
diff --git a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc
index df70ea98261..af9c20aaefe 100644
--- a/chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/jitter_buffer_unittest.cc
@@ -195,7 +195,7 @@ class ProcessThreadMock : public ProcessThread {
MOCK_METHOD1(WakeUp, void(Module* module));
MOCK_METHOD1(RegisterModule, void(Module* module));
MOCK_METHOD1(DeRegisterModule, void(Module* module));
- void PostTask(rtc::scoped_ptr<ProcessTask> task) {}
+ void PostTask(std::unique_ptr<ProcessTask> task) {}
};
class TestBasicJitterBuffer : public ::testing::TestWithParam<std::string>,
@@ -215,7 +215,7 @@ class TestBasicJitterBuffer : public ::testing::TestWithParam<std::string>,
protected:
TestBasicJitterBuffer() : scoped_field_trial_(GetParam()) {}
- virtual void SetUp() {
+ void SetUp() override {
clock_.reset(new SimulatedClock(0));
jitter_buffer_.reset(new VCMJitterBuffer(
clock_.get(),
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc b/chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc
index 69cf757f2b4..42db2facf1c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/media_opt_util.cc
@@ -34,19 +34,10 @@ VCMProtectionMethod::VCMProtectionMethod()
_protectionFactorD(0),
_scaleProtKey(2.0f),
_maxPayloadSize(1460),
- _qmRobustness(new VCMQmRobustness()),
- _useUepProtectionK(false),
- _useUepProtectionD(true),
_corrFecCost(1.0),
_type(kNone) {}
-VCMProtectionMethod::~VCMProtectionMethod() {
- delete _qmRobustness;
-}
-void VCMProtectionMethod::UpdateContentMetrics(
- const VideoContentMetrics* contentMetrics) {
- _qmRobustness->UpdateContent(contentMetrics);
-}
+VCMProtectionMethod::~VCMProtectionMethod() {}
VCMNackFecMethod::VCMNackFecMethod(int64_t lowRttNackThresholdMs,
int64_t highRttNackThresholdMs)
@@ -333,17 +324,6 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) {
codeRateDelta = kPacketLossMax - 1;
}
- float adjustFec = 1.0f;
- // Avoid additional adjustments when layers are active.
- // TODO(mikhal/marco): Update adjusmtent based on layer info.
- if (parameters->numLayers == 1) {
- adjustFec = _qmRobustness->AdjustFecFactor(
- codeRateDelta, parameters->bitRate, parameters->frameRate,
- parameters->rtt, packetLoss);
- }
-
- codeRateDelta = static_cast<uint8_t>(codeRateDelta * adjustFec);
-
// For Key frame:
// Effectively at a higher rate, so we scale/boost the rate
// The boost factor may depend on several factors: ratio of packet
@@ -411,13 +391,6 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) {
_corrFecCost = 0.0f;
}
- // TODO(marpan): Set the UEP protection on/off for Key and Delta frames
- _useUepProtectionK = _qmRobustness->SetUepProtection(
- codeRateKey, parameters->bitRate, packetLoss, 0);
-
- _useUepProtectionD = _qmRobustness->SetUepProtection(
- codeRateDelta, parameters->bitRate, packetLoss, 1);
-
// DONE WITH FEC PROTECTION SETTINGS
return true;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_opt_util.h b/chromium/third_party/webrtc/modules/video_coding/media_opt_util.h
index 6b47e3b2d99..ad314aca8c2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_opt_util.h
+++ b/chromium/third_party/webrtc/modules/video_coding/media_opt_util.h
@@ -18,7 +18,6 @@
#include "webrtc/base/exp_filter.h"
#include "webrtc/modules/video_coding/internal_defines.h"
-#include "webrtc/modules/video_coding/qm_select.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/typedefs.h"
@@ -45,6 +44,10 @@ enum FilterPacketLossMode {
// common to media optimization and the jitter buffer.
const int64_t kLowRttNackMs = 20;
+// If the RTT is higher than this an extra RTT wont be added to to the jitter
+// buffer delay.
+const int kMaxRttDelayThreshold = 500;
+
struct VCMProtectionParameters {
VCMProtectionParameters()
: rtt(0),
@@ -138,9 +141,6 @@ class VCMProtectionMethod {
virtual int MaxFramesFec() const { return 1; }
- // Updates content metrics
- void UpdateContentMetrics(const VideoContentMetrics* contentMetrics);
-
protected:
uint8_t _effectivePacketLoss;
uint8_t _protectionFactorK;
@@ -149,7 +149,6 @@ class VCMProtectionMethod {
float _scaleProtKey;
int32_t _maxPayloadSize;
- VCMQmRobustness* _qmRobustness;
bool _useUepProtectionK;
bool _useUepProtectionD;
float _corrFecCost;
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_optimization.cc b/chromium/third_party/webrtc/modules/video_coding/media_optimization.cc
index a234a06f9b4..d5fbadc122f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_optimization.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/media_optimization.cc
@@ -11,8 +11,6 @@
#include "webrtc/modules/video_coding/media_optimization.h"
#include "webrtc/base/logging.h"
-#include "webrtc/modules/video_coding/content_metrics_processing.h"
-#include "webrtc/modules/video_coding/qm_select.h"
#include "webrtc/modules/video_coding/utility/frame_dropper.h"
#include "webrtc/system_wrappers/include/clock.h"
@@ -33,13 +31,6 @@ void UpdateProtectionCallback(
// Get the FEC code rate for Delta frames (set to 0 when NA).
delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD();
- // Get the FEC-UEP protection status for Key frames: UEP on/off.
- key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK();
-
- // Get the FEC-UEP protection status for Delta frames: UEP on/off.
- delta_fec_params.use_uep_protection =
- selected_method->RequiredUepProtectionD();
-
// The RTP module currently requires the same |max_fec_frames| for both
// key and delta frames.
delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
@@ -88,16 +79,11 @@ MediaOptimization::MediaOptimization(Clock* clock)
max_payload_size_(1460),
video_target_bitrate_(0),
incoming_frame_rate_(0),
- enable_qm_(false),
encoded_frame_samples_(),
avg_sent_bit_rate_bps_(0),
avg_sent_framerate_(0),
key_frame_cnt_(0),
delta_frame_cnt_(0),
- content_(new VCMContentMetricsProcessing()),
- qm_resolution_(new VCMQmResolution()),
- last_qm_update_time_(0),
- last_change_time_(0),
num_layers_(0),
suspension_enabled_(false),
video_suspended_(false),
@@ -120,8 +106,6 @@ void MediaOptimization::Reset() {
frame_dropper_->Reset();
loss_prot_logic_->Reset(clock_->TimeInMilliseconds());
frame_dropper_->SetRates(0, 0);
- content_->Reset();
- qm_resolution_->Reset();
loss_prot_logic_->UpdateFrameRate(incoming_frame_rate_);
loss_prot_logic_->Reset(clock_->TimeInMilliseconds());
send_statistics_zero_encode_ = 0;
@@ -131,8 +115,6 @@ void MediaOptimization::Reset() {
user_frame_rate_ = 0;
key_frame_cnt_ = 0;
delta_frame_cnt_ = 0;
- last_qm_update_time_ = 0;
- last_change_time_ = 0;
encoded_frame_samples_.clear();
avg_sent_bit_rate_bps_ = 0;
num_layers_ = 1;
@@ -160,12 +142,7 @@ void MediaOptimization::SetEncodingDataInternal(VideoCodecType send_codec_type,
int num_layers,
int32_t mtu) {
// Everything codec specific should be reset here since this means the codec
- // has changed. If native dimension values have changed, then either user
- // initiated change, or QM initiated change. Will be able to determine only
- // after the processing of the first frame.
- last_change_time_ = clock_->TimeInMilliseconds();
- content_->Reset();
- content_->UpdateFrameRate(frame_rate);
+ // has changed.
max_bit_rate_ = max_bit_rate;
send_codec_type_ = send_codec_type;
@@ -182,16 +159,13 @@ void MediaOptimization::SetEncodingDataInternal(VideoCodecType send_codec_type,
codec_height_ = height;
num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero.
max_payload_size_ = mtu;
- qm_resolution_->Initialize(target_bitrate_kbps, user_frame_rate_,
- codec_width_, codec_height_, num_layers_);
}
uint32_t MediaOptimization::SetTargetRates(
uint32_t target_bitrate,
uint8_t fraction_lost,
int64_t round_trip_time_ms,
- VCMProtectionCallback* protection_callback,
- VCMQMSettingsCallback* qmsettings_callback) {
+ VCMProtectionCallback* protection_callback) {
CriticalSectionScoped lock(crit_sect_.get());
VCMProtectionMethod* selected_method = loss_prot_logic_->SelectedMethod();
float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
@@ -227,11 +201,7 @@ uint32_t MediaOptimization::SetTargetRates(
float protection_overhead_rate = 0.0f;
// Update protection settings, when applicable.
- float sent_video_rate_kbps = 0.0f;
if (loss_prot_logic_->SelectedType() != kNone) {
- // Update protection method with content metrics.
- selected_method->UpdateContentMetrics(content_->ShortTermAvgData());
-
// Update method will compute the robustness settings for the given
// protection method and the overhead cost
// the protection method is set by the user via SetVideoProtection.
@@ -265,7 +235,6 @@ uint32_t MediaOptimization::SetTargetRates(
// Get the effective packet loss for encoder ER when applicable. Should be
// passed to encoder via fraction_lost.
packet_loss_enc = selected_method->RequiredPacketLossER();
- sent_video_rate_kbps = static_cast<float>(sent_video_rate_bps) / 1000.0f;
}
// Source coding rate: total rate - protection overhead.
@@ -281,19 +250,6 @@ uint32_t MediaOptimization::SetTargetRates(
static_cast<float>(video_target_bitrate_) / 1000.0f;
frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_);
- if (enable_qm_ && qmsettings_callback) {
- // Update QM with rates.
- qm_resolution_->UpdateRates(target_video_bitrate_kbps, sent_video_rate_kbps,
- incoming_frame_rate_, fraction_lost_);
- // Check for QM selection.
- bool select_qm = CheckStatusForQMchange();
- if (select_qm) {
- SelectQuality(qmsettings_callback);
- }
- // Reset the short-term averaged content data.
- content_->ResetShortTermAvgData();
- }
-
CheckSuspendConditions();
return video_target_bitrate_;
@@ -367,11 +323,6 @@ int32_t MediaOptimization::UpdateWithEncodedData(
loss_prot_logic_->UpdatePacketsPerFrameKey(
min_packets_per_frame, clock_->TimeInMilliseconds());
}
-
- if (enable_qm_) {
- // Update quality select with encoded length.
- qm_resolution_->UpdateEncodedSize(encoded_length);
- }
}
if (!delta_frame && encoded_length > 0) {
loss_prot_logic_->UpdateKeyFrameSize(static_cast<float>(encoded_length));
@@ -388,11 +339,6 @@ int32_t MediaOptimization::UpdateWithEncodedData(
return VCM_OK;
}
-void MediaOptimization::EnableQM(bool enable) {
- CriticalSectionScoped lock(crit_sect_.get());
- enable_qm_ = enable;
-}
-
void MediaOptimization::EnableFrameDropper(bool enable) {
CriticalSectionScoped lock(crit_sect_.get());
frame_dropper_->Enable(enable);
@@ -424,19 +370,6 @@ bool MediaOptimization::DropFrame() {
return frame_dropper_->DropFrame();
}
-void MediaOptimization::UpdateContentData(
- const VideoContentMetrics* content_metrics) {
- CriticalSectionScoped lock(crit_sect_.get());
- // Updating content metrics.
- if (content_metrics == NULL) {
- // Disable QM if metrics are NULL.
- enable_qm_ = false;
- qm_resolution_->Reset();
- } else {
- content_->UpdateContentData(content_metrics);
- }
-}
-
void MediaOptimization::UpdateIncomingFrameRate() {
int64_t now = clock_->TimeInMilliseconds();
if (incoming_frame_times_[0] == 0) {
@@ -451,36 +384,6 @@ void MediaOptimization::UpdateIncomingFrameRate() {
ProcessIncomingFrameRate(now);
}
-int32_t MediaOptimization::SelectQuality(
- VCMQMSettingsCallback* video_qmsettings_callback) {
- // Reset quantities for QM select.
- qm_resolution_->ResetQM();
-
- // Update QM will long-term averaged content metrics.
- qm_resolution_->UpdateContent(content_->LongTermAvgData());
-
- // Select quality mode.
- VCMResolutionScale* qm = NULL;
- int32_t ret = qm_resolution_->SelectResolution(&qm);
- if (ret < 0) {
- return ret;
- }
-
- // Check for updates to spatial/temporal modes.
- QMUpdate(qm, video_qmsettings_callback);
-
- // Reset all the rate and related frame counters quantities.
- qm_resolution_->ResetRates();
-
- // Reset counters.
- last_qm_update_time_ = clock_->TimeInMilliseconds();
-
- // Reset content metrics.
- content_->Reset();
-
- return VCM_OK;
-}
-
void MediaOptimization::PurgeOldFrameSamples(int64_t now_ms) {
while (!encoded_frame_samples_.empty()) {
if (now_ms - encoded_frame_samples_.front().time_complete_ms >
@@ -527,65 +430,6 @@ void MediaOptimization::UpdateSentFramerate() {
}
}
-bool MediaOptimization::QMUpdate(
- VCMResolutionScale* qm,
- VCMQMSettingsCallback* video_qmsettings_callback) {
- // Check for no change.
- if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) {
- return false;
- }
-
- // Check for change in frame rate.
- if (qm->change_resolution_temporal) {
- incoming_frame_rate_ = qm->frame_rate;
- // Reset frame rate estimate.
- memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
- }
-
- // Check for change in frame size.
- if (qm->change_resolution_spatial) {
- codec_width_ = qm->codec_width;
- codec_height_ = qm->codec_height;
- }
-
- LOG(LS_INFO) << "Media optimizer requests the video resolution to be changed "
- "to "
- << qm->codec_width << "x" << qm->codec_height << "@"
- << qm->frame_rate;
-
- // Update VPM with new target frame rate and frame size.
- // Note: use |qm->frame_rate| instead of |_incoming_frame_rate| for updating
- // target frame rate in VPM frame dropper. The quantity |_incoming_frame_rate|
- // will vary/fluctuate, and since we don't want to change the state of the
- // VPM frame dropper, unless a temporal action was selected, we use the
- // quantity |qm->frame_rate| for updating.
- video_qmsettings_callback->SetVideoQMSettings(qm->frame_rate, codec_width_,
- codec_height_);
- content_->UpdateFrameRate(qm->frame_rate);
- qm_resolution_->UpdateCodecParameters(qm->frame_rate, codec_width_,
- codec_height_);
- return true;
-}
-
-// Check timing constraints and look for significant change in:
-// (1) scene content,
-// (2) target bit rate.
-bool MediaOptimization::CheckStatusForQMchange() {
- bool status = true;
-
- // Check that we do not call QMSelect too often, and that we waited some time
- // (to sample the metrics) from the event last_change_time
- // last_change_time is the time where user changed the size/rate/frame rate
- // (via SetEncodingData).
- int64_t now = clock_->TimeInMilliseconds();
- if ((now - last_qm_update_time_) < kQmMinIntervalMs ||
- (now - last_change_time_) < kQmMinIntervalMs) {
- status = false;
- }
-
- return status;
-}
-
// Allowing VCM to keep track of incoming frame rate.
void MediaOptimization::ProcessIncomingFrameRate(int64_t now) {
int32_t num = 0;
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_optimization.h b/chromium/third_party/webrtc/modules/video_coding/media_optimization.h
index 060cd893ffe..081b2a900a2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_optimization.h
+++ b/chromium/third_party/webrtc/modules/video_coding/media_optimization.h
@@ -17,7 +17,6 @@
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/modules/video_coding/media_opt_util.h"
-#include "webrtc/modules/video_coding/qm_select.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -59,11 +58,9 @@ class MediaOptimization {
uint32_t SetTargetRates(uint32_t target_bitrate,
uint8_t fraction_lost,
int64_t round_trip_time_ms,
- VCMProtectionCallback* protection_callback,
- VCMQMSettingsCallback* qmsettings_callback);
+ VCMProtectionCallback* protection_callback);
void SetProtectionMethod(VCMProtectionMethodEnum method);
- void EnableQM(bool enable);
void EnableFrameDropper(bool enable);
// Lets the sender suspend video when the rate drops below
@@ -74,8 +71,6 @@ class MediaOptimization {
bool DropFrame();
- void UpdateContentData(const VideoContentMetrics* content_metrics);
-
// Informs Media Optimization of encoded output.
int32_t UpdateWithEncodedData(const EncodedImage& encoded_image);
@@ -98,19 +93,6 @@ class MediaOptimization {
void UpdateSentBitrate(int64_t now_ms) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
void UpdateSentFramerate() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
- // Computes new Quality Mode.
- int32_t SelectQuality(VCMQMSettingsCallback* qmsettings_callback)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Verifies if QM settings differ from default, i.e. if an update is required.
- // Computes actual values, as will be sent to the encoder.
- bool QMUpdate(VCMResolutionScale* qm,
- VCMQMSettingsCallback* qmsettings_callback)
- EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
- // Checks if we should make a QM change. Return true if yes, false otherwise.
- bool CheckStatusForQMchange() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
-
void ProcessIncomingFrameRate(int64_t now)
EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
@@ -152,16 +134,11 @@ class MediaOptimization {
int video_target_bitrate_ GUARDED_BY(crit_sect_);
float incoming_frame_rate_ GUARDED_BY(crit_sect_);
int64_t incoming_frame_times_[kFrameCountHistorySize] GUARDED_BY(crit_sect_);
- bool enable_qm_ GUARDED_BY(crit_sect_);
std::list<EncodedFrameSample> encoded_frame_samples_ GUARDED_BY(crit_sect_);
uint32_t avg_sent_bit_rate_bps_ GUARDED_BY(crit_sect_);
uint32_t avg_sent_framerate_ GUARDED_BY(crit_sect_);
uint32_t key_frame_cnt_ GUARDED_BY(crit_sect_);
uint32_t delta_frame_cnt_ GUARDED_BY(crit_sect_);
- std::unique_ptr<VCMContentMetricsProcessing> content_ GUARDED_BY(crit_sect_);
- std::unique_ptr<VCMQmResolution> qm_resolution_ GUARDED_BY(crit_sect_);
- int64_t last_qm_update_time_ GUARDED_BY(crit_sect_);
- int64_t last_change_time_ GUARDED_BY(crit_sect_); // Content/user triggered.
int num_layers_ GUARDED_BY(crit_sect_);
bool suspension_enabled_ GUARDED_BY(crit_sect_);
bool video_suspended_ GUARDED_BY(crit_sect_);
diff --git a/chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc
index 3f8ac5d0752..e6a1bcccd96 100644
--- a/chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/media_optimization_unittest.cc
@@ -66,7 +66,7 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SetTargetRates(target_bitrate_kbps * 1000,
0, // Lossrate.
100, // RTT in ms.
- nullptr, nullptr);
+ nullptr);
media_opt_.EnableFrameDropper(true);
for (int time = 0; time < 2000; time += frame_time_ms_) {
ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, false));
@@ -76,7 +76,7 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SetTargetRates(kThresholdBps - 1000,
0, // Lossrate.
100, // RTT in ms.
- nullptr, nullptr);
+ nullptr);
// Expect the muter to engage immediately and stay muted.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
@@ -89,7 +89,7 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SetTargetRates(kThresholdBps + 1000,
0, // Lossrate.
100, // RTT in ms.
- nullptr, nullptr);
+ nullptr);
// Expect the muter to stay muted.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
@@ -101,7 +101,7 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SetTargetRates(kThresholdBps + kWindowBps + 1000,
0, // Lossrate.
100, // RTT in ms.
- nullptr, nullptr);
+ nullptr);
// Expect the muter to disengage immediately.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
@@ -138,7 +138,7 @@ TEST_F(TestMediaOptimization, ProtectsUsingFecBitrateAboveCodecMax) {
// Using 10% of codec bitrate for FEC, should still be able to use all of it.
protection_callback.fec_rate_bps_ = kCodecBitrateBps / 10;
uint32_t target_bitrate = media_opt_.SetTargetRates(
- kMaxBitrateBps, 0, 0, &protection_callback, nullptr);
+ kMaxBitrateBps, 0, 0, &protection_callback);
EXPECT_EQ(kCodecBitrateBps, static_cast<int>(target_bitrate));
@@ -146,7 +146,7 @@ TEST_F(TestMediaOptimization, ProtectsUsingFecBitrateAboveCodecMax) {
// both equally, but only be half of max (since that ceiling should be hit).
protection_callback.fec_rate_bps_ = kCodecBitrateBps;
target_bitrate = media_opt_.SetTargetRates(kMaxBitrateBps, 128, 100,
- &protection_callback, nullptr);
+ &protection_callback);
EXPECT_EQ(kMaxBitrateBps / 2, static_cast<int>(target_bitrate));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module.cc b/chromium/third_party/webrtc/modules/video_coding/nack_module.cc
index 1b12afe0f0d..43244321ea0 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module.cc
@@ -49,17 +49,17 @@ NackModule::NackModule(Clock* clock,
running_(true),
initialized_(false),
rtt_ms_(kDefaultRttMs),
- last_seq_num_(0),
+ newest_seq_num_(0),
next_process_time_ms_(-1) {
RTC_DCHECK(clock_);
RTC_DCHECK(nack_sender_);
RTC_DCHECK(keyframe_request_sender_);
}
-void NackModule::OnReceivedPacket(const VCMPacket& packet) {
+int NackModule::OnReceivedPacket(const VCMPacket& packet) {
rtc::CritScope lock(&crit_);
if (!running_)
- return;
+ return -1;
uint16_t seq_num = packet.seqNum;
// TODO(philipel): When the packet includes information whether it is
// retransmitted or not, use that value instead. For
@@ -69,40 +69,48 @@ void NackModule::OnReceivedPacket(const VCMPacket& packet) {
bool is_keyframe = packet.isFirstPacket && packet.frameType == kVideoFrameKey;
if (!initialized_) {
- last_seq_num_ = seq_num;
+ newest_seq_num_ = seq_num;
if (is_keyframe)
keyframe_list_.insert(seq_num);
initialized_ = true;
- return;
+ return 0;
}
- if (seq_num == last_seq_num_)
- return;
+ // Since the |newest_seq_num_| is a packet we have actually received we know
+ // that packet has never been Nacked.
+ if (seq_num == newest_seq_num_)
+ return 0;
- if (AheadOf(last_seq_num_, seq_num)) {
+ if (AheadOf(newest_seq_num_, seq_num)) {
// An out of order packet has been received.
- nack_list_.erase(seq_num);
+ auto nack_list_it = nack_list_.find(seq_num);
+ int nacks_sent_for_packet = 0;
+ if (nack_list_it != nack_list_.end()) {
+ nacks_sent_for_packet = nack_list_it->second.retries;
+ nack_list_.erase(nack_list_it);
+ }
if (!is_retransmitted)
UpdateReorderingStatistics(seq_num);
- return;
- } else {
- AddPacketsToNack(last_seq_num_ + 1, seq_num);
- last_seq_num_ = seq_num;
+ return nacks_sent_for_packet;
+ }
+ AddPacketsToNack(newest_seq_num_ + 1, seq_num);
+ newest_seq_num_ = seq_num;
- // Keep track of new keyframes.
- if (is_keyframe)
- keyframe_list_.insert(seq_num);
+ // Keep track of new keyframes.
+ if (is_keyframe)
+ keyframe_list_.insert(seq_num);
- // And remove old ones so we don't accumulate keyframes.
- auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge);
- if (it != keyframe_list_.begin())
- keyframe_list_.erase(keyframe_list_.begin(), it);
+ // And remove old ones so we don't accumulate keyframes.
+ auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge);
+ if (it != keyframe_list_.begin())
+ keyframe_list_.erase(keyframe_list_.begin(), it);
- // Are there any nacks that are waiting for this seq_num.
- std::vector<uint16_t> nack_batch = GetNackBatch(kSeqNumOnly);
- if (!nack_batch.empty())
- nack_sender_->SendNack(nack_batch);
- }
+ // Are there any nacks that are waiting for this seq_num.
+ std::vector<uint16_t> nack_batch = GetNackBatch(kSeqNumOnly);
+ if (!nack_batch.empty())
+ nack_sender_->SendNack(nack_batch);
+
+ return 0;
}
void NackModule::ClearUpTo(uint16_t seq_num) {
@@ -215,7 +223,7 @@ std::vector<uint16_t> NackModule::GetNackBatch(NackFilterOptions options) {
auto it = nack_list_.begin();
while (it != nack_list_.end()) {
if (consider_seq_num && it->second.sent_at_time == -1 &&
- AheadOrAt(last_seq_num_, it->second.send_at_seq_num)) {
+ AheadOrAt(newest_seq_num_, it->second.send_at_seq_num)) {
nack_batch.emplace_back(it->second.seq_num);
++it->second.retries;
it->second.sent_at_time = now_ms;
@@ -248,8 +256,8 @@ std::vector<uint16_t> NackModule::GetNackBatch(NackFilterOptions options) {
}
void NackModule::UpdateReorderingStatistics(uint16_t seq_num) {
- RTC_DCHECK(AheadOf(last_seq_num_, seq_num));
- uint16_t diff = ReverseDiff(last_seq_num_, seq_num);
+ RTC_DCHECK(AheadOf(newest_seq_num_, seq_num));
+ uint16_t diff = ReverseDiff(newest_seq_num_, seq_num);
reordering_histogram_.Add(diff);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module.h b/chromium/third_party/webrtc/modules/video_coding/nack_module.h
index 7163a8e9054..58d6cfa985e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module.h
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module.h
@@ -32,7 +32,7 @@ class NackModule : public Module {
NackSender* nack_sender,
KeyFrameRequestSender* keyframe_request_sender);
- void OnReceivedPacket(const VCMPacket& packet);
+ int OnReceivedPacket(const VCMPacket& packet);
void ClearUpTo(uint16_t seq_num);
void UpdateRtt(int64_t rtt_ms);
void Clear();
@@ -59,11 +59,6 @@ class NackModule : public Module {
int64_t sent_at_time;
int retries;
};
-
- struct SeqNumComparator {
- bool operator()(uint16_t s1, uint16_t s2) const { return AheadOf(s2, s1); }
- };
-
void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end)
EXCLUSIVE_LOCKS_REQUIRED(crit_);
@@ -87,13 +82,15 @@ class NackModule : public Module {
NackSender* const nack_sender_;
KeyFrameRequestSender* const keyframe_request_sender_;
- std::map<uint16_t, NackInfo, SeqNumComparator> nack_list_ GUARDED_BY(crit_);
- std::set<uint16_t, SeqNumComparator> keyframe_list_ GUARDED_BY(crit_);
+ std::map<uint16_t, NackInfo, DescendingSeqNumComp<uint16_t>> nack_list_
+ GUARDED_BY(crit_);
+ std::set<uint16_t, DescendingSeqNumComp<uint16_t>> keyframe_list_
+ GUARDED_BY(crit_);
video_coding::Histogram reordering_histogram_ GUARDED_BY(crit_);
bool running_ GUARDED_BY(crit_);
bool initialized_ GUARDED_BY(crit_);
int64_t rtt_ms_ GUARDED_BY(crit_);
- uint16_t last_seq_num_ GUARDED_BY(crit_);
+ uint16_t newest_seq_num_ GUARDED_BY(crit_);
int64_t next_process_time_ms_ GUARDED_BY(crit_);
};
diff --git a/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
index 3870742016a..9c2eb4ac0c3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/nack_module_unittest.cc
@@ -290,4 +290,28 @@ TEST_F(TestNackModule, ClearUpToWrap) {
EXPECT_EQ(0, sent_nacks_[0]);
}
+TEST_F(TestNackModule, PacketNackCount) {
+ VCMPacket packet;
+ packet.seqNum = 0;
+ EXPECT_EQ(0, nack_module_.OnReceivedPacket(packet));
+ packet.seqNum = 2;
+ EXPECT_EQ(0, nack_module_.OnReceivedPacket(packet));
+ packet.seqNum = 1;
+ EXPECT_EQ(1, nack_module_.OnReceivedPacket(packet));
+
+ sent_nacks_.clear();
+ nack_module_.UpdateRtt(100);
+ packet.seqNum = 5;
+ EXPECT_EQ(0, nack_module_.OnReceivedPacket(packet));
+ clock_->AdvanceTimeMilliseconds(100);
+ nack_module_.Process();
+ clock_->AdvanceTimeMilliseconds(100);
+ nack_module_.Process();
+ packet.seqNum = 3;
+ EXPECT_EQ(3, nack_module_.OnReceivedPacket(packet));
+ packet.seqNum = 4;
+ EXPECT_EQ(3, nack_module_.OnReceivedPacket(packet));
+ EXPECT_EQ(0, nack_module_.OnReceivedPacket(packet));
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
index 0a05baa16ab..09fb2499074 100644
--- a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.cc
@@ -14,8 +14,8 @@
#include <limits>
#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
#include "webrtc/modules/video_coding/frame_object.h"
-#include "webrtc/modules/video_coding/sequence_number_util.h"
namespace webrtc {
namespace video_coding {
@@ -25,12 +25,12 @@ PacketBuffer::PacketBuffer(size_t start_buffer_size,
OnCompleteFrameCallback* frame_callback)
: size_(start_buffer_size),
max_size_(max_buffer_size),
- last_seq_num_(0),
first_seq_num_(0),
- initialized_(false),
+ last_seq_num_(0),
+ first_packet_received_(false),
data_buffer_(start_buffer_size),
sequence_buffer_(start_buffer_size),
- frame_callback_(frame_callback) {
+ reference_finder_(frame_callback) {
RTC_DCHECK_LE(start_buffer_size, max_buffer_size);
// Buffer size must always be a power of 2.
RTC_DCHECK((start_buffer_size & (start_buffer_size - 1)) == 0);
@@ -40,12 +40,12 @@ PacketBuffer::PacketBuffer(size_t start_buffer_size,
bool PacketBuffer::InsertPacket(const VCMPacket& packet) {
rtc::CritScope lock(&crit_);
uint16_t seq_num = packet.seqNum;
- int index = seq_num % size_;
+ size_t index = seq_num % size_;
- if (!initialized_) {
+ if (!first_packet_received_) {
first_seq_num_ = seq_num - 1;
last_seq_num_ = seq_num;
- initialized_ = true;
+ first_packet_received_ = true;
}
if (sequence_buffer_[index].used) {
@@ -70,16 +70,17 @@ bool PacketBuffer::InsertPacket(const VCMPacket& packet) {
sequence_buffer_[index].frame_end = packet.markerBit;
sequence_buffer_[index].seq_num = packet.seqNum;
sequence_buffer_[index].continuous = false;
+ sequence_buffer_[index].frame_created = false;
sequence_buffer_[index].used = true;
data_buffer_[index] = packet;
- FindCompleteFrames(seq_num);
+ FindFrames(seq_num);
return true;
}
void PacketBuffer::ClearTo(uint16_t seq_num) {
rtc::CritScope lock(&crit_);
- int index = first_seq_num_ % size_;
+ size_t index = first_seq_num_ % size_;
while (AheadOf<uint16_t>(seq_num, first_seq_num_ + 1)) {
index = (index + 1) % size_;
first_seq_num_ = Add<1 << 16>(first_seq_num_, 1);
@@ -96,7 +97,7 @@ bool PacketBuffer::ExpandBufferSize() {
std::vector<ContinuityInfo> new_sequence_buffer(new_size);
for (size_t i = 0; i < size_; ++i) {
if (sequence_buffer_[i].used) {
- int index = sequence_buffer_[i].seq_num % new_size;
+ size_t index = sequence_buffer_[i].seq_num % new_size;
new_sequence_buffer[index] = sequence_buffer_[i];
new_data_buffer[index] = data_buffer_[i];
}
@@ -108,38 +109,47 @@ bool PacketBuffer::ExpandBufferSize() {
}
bool PacketBuffer::IsContinuous(uint16_t seq_num) const {
- int index = seq_num % size_;
+ size_t index = seq_num % size_;
int prev_index = index > 0 ? index - 1 : size_ - 1;
+
if (!sequence_buffer_[index].used)
return false;
+ if (sequence_buffer_[index].frame_created)
+ return false;
if (sequence_buffer_[index].frame_begin)
return true;
if (!sequence_buffer_[prev_index].used)
return false;
+ if (sequence_buffer_[prev_index].seq_num !=
+ static_cast<uint16_t>(seq_num - 1))
+ return false;
if (sequence_buffer_[prev_index].continuous)
return true;
return false;
}
-void PacketBuffer::FindCompleteFrames(uint16_t seq_num) {
- int index = seq_num % size_;
+void PacketBuffer::FindFrames(uint16_t seq_num) {
+ size_t index = seq_num % size_;
while (IsContinuous(seq_num)) {
sequence_buffer_[index].continuous = true;
- // If the frame is complete, find the first packet of the frame and
- // create a FrameObject.
+ // If all packets of the frame is continuous, find the first packet of the
+ // frame and create an RtpFrameObject.
if (sequence_buffer_[index].frame_end) {
- int rindex = index;
+ int start_index = index;
uint16_t start_seq_num = seq_num;
- while (!sequence_buffer_[rindex].frame_begin) {
- rindex = rindex > 0 ? rindex - 1 : size_ - 1;
+
+ while (!sequence_buffer_[start_index].frame_begin) {
+ sequence_buffer_[start_index].frame_created = true;
+ start_index = start_index > 0 ? start_index - 1 : size_ - 1;
start_seq_num--;
}
+ sequence_buffer_[start_index].frame_created = true;
- std::unique_ptr<FrameObject> frame(
- new RtpFrameObject(this, 1, start_seq_num, seq_num));
- frame_callback_->OnCompleteFrame(std::move(frame));
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(this, start_seq_num, seq_num));
+ reference_finder_.ManageFrame(std::move(frame));
}
index = (index + 1) % size_;
@@ -149,14 +159,13 @@ void PacketBuffer::FindCompleteFrames(uint16_t seq_num) {
void PacketBuffer::ReturnFrame(RtpFrameObject* frame) {
rtc::CritScope lock(&crit_);
- int index = frame->first_packet() % size_;
- int end = (frame->last_packet() + 1) % size_;
- uint16_t seq_num = frame->first_packet();
+ size_t index = frame->first_seq_num() % size_;
+ size_t end = (frame->last_seq_num() + 1) % size_;
+ uint16_t seq_num = frame->first_seq_num();
while (index != end) {
- if (sequence_buffer_[index].seq_num == seq_num) {
+ if (sequence_buffer_[index].seq_num == seq_num)
sequence_buffer_[index].used = false;
- sequence_buffer_[index].continuous = false;
- }
+
index = (index + 1) % size_;
++seq_num;
}
@@ -173,9 +182,9 @@ bool PacketBuffer::GetBitstream(const RtpFrameObject& frame,
uint8_t* destination) {
rtc::CritScope lock(&crit_);
- int index = frame.first_packet() % size_;
- int end = (frame.last_packet() + 1) % size_;
- uint16_t seq_num = frame.first_packet();
+ size_t index = frame.first_seq_num() % size_;
+ size_t end = (frame.last_seq_num() + 1) % size_;
+ uint16_t seq_num = frame.first_seq_num();
while (index != end) {
if (!sequence_buffer_[index].used ||
sequence_buffer_[index].seq_num != seq_num) {
@@ -192,12 +201,22 @@ bool PacketBuffer::GetBitstream(const RtpFrameObject& frame,
return true;
}
-void PacketBuffer::Flush() {
+VCMPacket* PacketBuffer::GetPacket(uint16_t seq_num) {
rtc::CritScope lock(&crit_);
- for (size_t i = 0; i < size_; ++i) {
- sequence_buffer_[i].used = false;
- sequence_buffer_[i].continuous = false;
+ size_t index = seq_num % size_;
+ if (!sequence_buffer_[index].used ||
+ seq_num != sequence_buffer_[index].seq_num) {
+ return nullptr;
}
+ return &data_buffer_[index];
+}
+
+void PacketBuffer::Clear() {
+ rtc::CritScope lock(&crit_);
+ for (size_t i = 0; i < size_; ++i)
+ sequence_buffer_[i].used = false;
+
+ first_packet_received_ = false;
}
} // namespace video_coding
diff --git a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
index 6ca514536ef..ae0916a75f3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/video_coding/packet_buffer.h
@@ -14,9 +14,11 @@
#include <vector>
#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/packet.h"
+#include "webrtc/modules/video_coding/rtp_frame_reference_finder.h"
+#include "webrtc/modules/video_coding/sequence_number_util.h"
namespace webrtc {
namespace video_coding {
@@ -39,25 +41,50 @@ class PacketBuffer {
bool InsertPacket(const VCMPacket& packet);
void ClearTo(uint16_t seq_num);
- void Flush();
+ void Clear();
private:
friend RtpFrameObject;
// Since we want the packet buffer to be as packet type agnostic
// as possible we extract only the information needed in order
- // to determin whether a sequence of packets is continuous or not.
+ // to determine whether a sequence of packets is continuous or not.
struct ContinuityInfo {
+ // The sequence number of the packet.
uint16_t seq_num = 0;
+
+ // If this is the first packet of the frame.
bool frame_begin = false;
+
+ // If this is the last packet of the frame.
bool frame_end = false;
+
+ // If this slot is currently used.
bool used = false;
+
+ // If all its previous packets have been inserted into the packet buffer.
bool continuous = false;
+
+ // If this packet has been used to create a frame already.
+ bool frame_created = false;
};
+ // Tries to expand the buffer.
bool ExpandBufferSize() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Test if all previous packets has arrived for the given sequence number.
bool IsContinuous(uint16_t seq_num) const EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void FindCompleteFrames(uint16_t seq_num) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Test if all packets of a frame has arrived, and if so, creates a frame.
+ // May create multiple frames per invocation.
+ void FindFrames(uint16_t seq_num) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Copy the bitstream for |frame| to |destination|.
bool GetBitstream(const RtpFrameObject& frame, uint8_t* destination);
+
+ // Get the packet with sequence number |seq_num|.
+ VCMPacket* GetPacket(uint16_t seq_num);
+
+ // Mark all slots used by |frame| as not used.
void ReturnFrame(RtpFrameObject* frame);
rtc::CriticalSection crit_;
@@ -66,13 +93,25 @@ class PacketBuffer {
size_t size_ GUARDED_BY(crit_);
const size_t max_size_;
- uint16_t last_seq_num_ GUARDED_BY(crit_);
+ // The fist sequence number currently in the buffer.
uint16_t first_seq_num_ GUARDED_BY(crit_);
- bool initialized_ GUARDED_BY(crit_);
+
+ // The last sequence number currently in the buffer.
+ uint16_t last_seq_num_ GUARDED_BY(crit_);
+
+ // If the packet buffer has received its first packet.
+ bool first_packet_received_ GUARDED_BY(crit_);
+
+ // Buffer that holds the inserted packets.
std::vector<VCMPacket> data_buffer_ GUARDED_BY(crit_);
+
+ // Buffer that holds the information about which slot that is currently in use
+ // and information needed to determine the continuity between packets.
std::vector<ContinuityInfo> sequence_buffer_ GUARDED_BY(crit_);
- OnCompleteFrameCallback* const frame_callback_;
+ // Frames that have received all their packets are handed off to the
+ // |reference_finder_| which finds the dependencies between the frames.
+ RtpFrameReferenceFinder reference_finder_;
};
} // namespace video_coding
diff --git a/chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc
index bc06940391c..b50074d8f60 100644
--- a/chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/packet_buffer_unittest.cc
@@ -10,6 +10,9 @@
#include <cstring>
#include <limits>
+#include <map>
+#include <set>
+#include <utility>
#include "webrtc/modules/video_coding/frame_object.h"
#include "webrtc/modules/video_coding/packet_buffer.h"
@@ -24,142 +27,301 @@ class TestPacketBuffer : public ::testing::Test,
public OnCompleteFrameCallback {
protected:
TestPacketBuffer()
- : rand_(0x8739211), packet_buffer_(kStartSize, kMaxSize, this) {}
+ : rand_(0x8739211),
+ packet_buffer_(new PacketBuffer(kStartSize, kMaxSize, this)),
+ frames_from_callback_(FrameComp()) {}
uint16_t Rand() { return rand_.Rand(std::numeric_limits<uint16_t>::max()); }
void OnCompleteFrame(std::unique_ptr<FrameObject> frame) override {
- frames_from_callback_.emplace_back(std::move(frame));
+ uint16_t pid = frame->picture_id;
+ uint16_t sidx = frame->spatial_layer;
+ auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx));
+ if (frame_it != frames_from_callback_.end()) {
+ ADD_FAILURE() << "Already received frame with (pid:sidx): ("
+ << pid << ":" << sidx << ")";
+ return;
+ }
+
+ frames_from_callback_.insert(
+ std::make_pair(std::make_pair(pid, sidx), std::move(frame)));
}
void TearDown() override {
- // All FrameObjects must be destroyed before the PacketBuffer since
- // a FrameObject will try to remove itself from the packet buffer
+ // All frame objects must be destroyed before the packet buffer since
+ // a frame object will try to remove itself from the packet buffer
// upon destruction.
frames_from_callback_.clear();
}
+ // Short version of true and false.
+ enum {
+ kT = true,
+ kF = false
+ };
+
+ // Insert a generic packet into the packet buffer.
+ void InsertGeneric(uint16_t seq_num, // packet sequence number
+ bool keyframe, // is keyframe
+ bool first, // is first packet of frame
+ bool last, // is last packet of frame
+ size_t data_size = 0, // size of data
+ uint8_t* data = nullptr) { // data pointer
+ VCMPacket packet;
+ packet.codec = kVideoCodecGeneric;
+ packet.seqNum = seq_num;
+ packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
+ packet.isFirstPacket = first;
+ packet.markerBit = last;
+ packet.sizeBytes = data_size;
+ packet.dataPtr = data;
+
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ }
+
+ // Insert a Vp8 packet into the packet buffer.
+ void InsertVp8(uint16_t seq_num, // packet sequence number
+ bool keyframe, // is keyframe
+ bool first, // is first packet of frame
+ bool last, // is last packet of frame
+ bool sync = false, // is sync frame
+ int32_t pid = kNoPictureId, // picture id
+ uint8_t tid = kNoTemporalIdx, // temporal id
+ int32_t tl0 = kNoTl0PicIdx, // tl0 pic index
+ size_t data_size = 0, // size of data
+ uint8_t* data = nullptr) { // data pointer
+ VCMPacket packet;
+ packet.codec = kVideoCodecVP8;
+ packet.seqNum = seq_num;
+ packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
+ packet.isFirstPacket = first;
+ packet.markerBit = last;
+ packet.sizeBytes = data_size;
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codecHeader.VP8.pictureId = pid % (1 << 15);
+ packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = tid;
+ packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = tl0;
+ packet.codecSpecificHeader.codecHeader.VP8.layerSync = sync;
+
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ }
+
+ // Insert a Vp9 packet into the packet buffer.
+ void InsertVp9Gof(uint16_t seq_num, // packet sequence number
+ bool keyframe, // is keyframe
+ bool first, // is first packet of frame
+ bool last, // is last packet of frame
+ bool up = false, // frame is up-switch point
+ int32_t pid = kNoPictureId, // picture id
+ uint8_t sid = kNoSpatialIdx, // spatial id
+ uint8_t tid = kNoTemporalIdx, // temporal id
+ int32_t tl0 = kNoTl0PicIdx, // tl0 pic index
+ GofInfoVP9* ss = nullptr, // scalability structure
+ size_t data_size = 0, // size of data
+ uint8_t* data = nullptr) { // data pointer
+ VCMPacket packet;
+ packet.codec = kVideoCodecVP9;
+ packet.seqNum = seq_num;
+ packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
+ packet.isFirstPacket = first;
+ packet.markerBit = last;
+ packet.sizeBytes = data_size;
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codecHeader.VP9.flexible_mode = false;
+ packet.codecSpecificHeader.codecHeader.VP9.picture_id = pid % (1 << 15);
+ packet.codecSpecificHeader.codecHeader.VP9.temporal_idx = tid;
+ packet.codecSpecificHeader.codecHeader.VP9.spatial_idx = sid;
+ packet.codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = tl0;
+ packet.codecSpecificHeader.codecHeader.VP9.temporal_up_switch = up;
+ if (ss != nullptr) {
+ packet.codecSpecificHeader.codecHeader.VP9.ss_data_available = true;
+ packet.codecSpecificHeader.codecHeader.VP9.gof = *ss;
+ }
+
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ }
+
+ // Insert a Vp9 packet into the packet buffer.
+ void InsertVp9Flex(uint16_t seq_num, // packet sequence number
+ bool keyframe, // is keyframe
+ bool first, // is first packet of frame
+ bool last, // is last packet of frame
+ bool inter, // depends on S-1 layer
+ int32_t pid = kNoPictureId, // picture id
+ uint8_t sid = kNoSpatialIdx, // spatial id
+ uint8_t tid = kNoTemporalIdx, // temporal id
+ int32_t tl0 = kNoTl0PicIdx, // tl0 pic index
+ std::vector<uint8_t> refs =
+ std::vector<uint8_t>(), // frame references
+ size_t data_size = 0, // size of data
+ uint8_t* data = nullptr) { // data pointer
+ VCMPacket packet;
+ packet.codec = kVideoCodecVP9;
+ packet.seqNum = seq_num;
+ packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
+ packet.isFirstPacket = first;
+ packet.markerBit = last;
+ packet.sizeBytes = data_size;
+ packet.dataPtr = data;
+ packet.codecSpecificHeader.codecHeader.VP9.inter_layer_predicted = inter;
+ packet.codecSpecificHeader.codecHeader.VP9.flexible_mode = true;
+ packet.codecSpecificHeader.codecHeader.VP9.picture_id = pid % (1 << 15);
+ packet.codecSpecificHeader.codecHeader.VP9.temporal_idx = tid;
+ packet.codecSpecificHeader.codecHeader.VP9.spatial_idx = sid;
+ packet.codecSpecificHeader.codecHeader.VP9.tl0_pic_idx = tl0;
+ packet.codecSpecificHeader.codecHeader.VP9.num_ref_pics = refs.size();
+ for (size_t i = 0; i < refs.size(); ++i)
+ packet.codecSpecificHeader.codecHeader.VP9.pid_diff[i] = refs[i];
+
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ }
+
+ // Check if a frame with picture id |pid| and spatial index |sidx| has been
+ // delivered from the packet buffer, and if so, if it has the references
+ // specified by |refs|.
+ template <typename... T>
+ void CheckReferences(uint16_t pid, uint16_t sidx, T... refs) const {
+ auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx));
+ if (frame_it == frames_from_callback_.end()) {
+ ADD_FAILURE() << "Could not find frame with (pid:sidx): ("
+ << pid << ":" << sidx << ")";
+ return;
+ }
+
+ std::set<uint16_t> actual_refs;
+ for (uint8_t r = 0; r < frame_it->second->num_references; ++r) {
+ actual_refs.insert(frame_it->second->references[r]);
+ }
+
+ std::set<uint16_t> expected_refs;
+ RefsToSet(&expected_refs, refs...);
+
+ ASSERT_EQ(expected_refs, actual_refs);
+ }
+
+ template <typename... T>
+ void CheckReferencesGeneric(uint16_t pid, T... refs) const {
+ CheckReferences(pid, 0, refs...);
+ }
+
+ template <typename... T>
+ void CheckReferencesVp8(uint16_t pid, T... refs) const {
+ CheckReferences(pid, 0, refs...);
+ }
+
+ template <typename... T>
+ void CheckReferencesVp9(uint16_t pid, uint8_t sidx, T... refs) const {
+ CheckReferences(pid, sidx, refs...);
+ }
+
+ template <typename... T>
+ void RefsToSet(std::set<uint16_t>* m, uint16_t ref, T... refs) const {
+ m->insert(ref);
+ RefsToSet(m, refs...);
+ }
+
+ void RefsToSet(std::set<uint16_t>* m) const {}
+
const int kStartSize = 16;
const int kMaxSize = 64;
Random rand_;
- PacketBuffer packet_buffer_;
- std::vector<std::unique_ptr<FrameObject>> frames_from_callback_;
+ std::unique_ptr<PacketBuffer> packet_buffer_;
+ struct FrameComp {
+ bool operator()(const std::pair<uint16_t, uint8_t> f1,
+ const std::pair<uint16_t, uint8_t> f2) const {
+ if (f1.first == f2.first)
+ return f1.second < f2.second;
+ return f1.first < f2.first;
+ }
+ };
+ std::map<std::pair<uint16_t, uint8_t>,
+ std::unique_ptr<FrameObject>,
+ FrameComp> frames_from_callback_;
};
TEST_F(TestPacketBuffer, InsertOnePacket) {
VCMPacket packet;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
TEST_F(TestPacketBuffer, InsertMultiplePackets) {
VCMPacket packet;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
TEST_F(TestPacketBuffer, InsertDuplicatePacket) {
VCMPacket packet;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
TEST_F(TestPacketBuffer, ExpandBuffer) {
- VCMPacket packet;
- packet.seqNum = Rand();
+ uint16_t seq_num = Rand();
for (int i = 0; i < kStartSize + 1; ++i) {
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num + i, kT , kT, kT);
}
}
TEST_F(TestPacketBuffer, ExpandBufferOverflow) {
- VCMPacket packet;
- packet.seqNum = Rand();
+ uint16_t seq_num = Rand();
for (int i = 0; i < kMaxSize; ++i) {
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num + i, kT, kT , kT);
}
- EXPECT_FALSE(packet_buffer_.InsertPacket(packet));
+ VCMPacket packet;
+ packet.seqNum = seq_num + kMaxSize + 1;
+ packet.sizeBytes = 1;
+ EXPECT_FALSE(packet_buffer_->InsertPacket(packet));
}
-TEST_F(TestPacketBuffer, OnePacketOneFrame) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.markerBit = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(1UL, frames_from_callback_.size());
+TEST_F(TestPacketBuffer, GenericOnePacketOneFrame) {
+ // seq_num, kf, frst, lst
+ InsertGeneric(Rand() , kT, kT , kT);
+ ASSERT_EQ(1UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, TwoPacketsTwoFrames) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.markerBit = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+TEST_F(TestPacketBuffer, GenericTwoPacketsTwoFrames) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kT);
+ InsertGeneric(seq_num + 1, kT, kT , kT);
+
EXPECT_EQ(2UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, TwoPacketsOneFrames) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- packet.markerBit = true;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(1UL, frames_from_callback_.size());
-}
+TEST_F(TestPacketBuffer, GenericTwoPacketsOneFrames) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 1, kT, kF , kT);
-TEST_F(TestPacketBuffer, ThreePacketReorderingOneFrame) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.isFirstPacket = false;
- packet.markerBit = true;
- packet.seqNum += 2;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.markerBit = false;
- packet.seqNum -= 1;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
EXPECT_EQ(1UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, IndexWrapOneFrame) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = kStartSize - 1;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.isFirstPacket = false;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.markerBit = true;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+TEST_F(TestPacketBuffer, GenericThreePacketReorderingOneFrame) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 2, kT, kF , kT);
+ InsertGeneric(seq_num + 1, kT, kF , kF);
+
EXPECT_EQ(1UL, frames_from_callback_.size());
}
@@ -167,45 +329,77 @@ TEST_F(TestPacketBuffer, DiscardOldPacket) {
uint16_t seq_num = Rand();
VCMPacket packet;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
packet.seqNum += 2;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
for (int i = 3; i < kMaxSize; ++i) {
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
++packet.seqNum;
- EXPECT_FALSE(packet_buffer_.InsertPacket(packet));
- packet_buffer_.ClearTo(seq_num + 1);
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_FALSE(packet_buffer_->InsertPacket(packet));
+ packet_buffer_->ClearTo(seq_num + 1);
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
TEST_F(TestPacketBuffer, DiscardMultipleOldPackets) {
uint16_t seq_num = Rand();
VCMPacket packet;
packet.seqNum = seq_num;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
packet.seqNum += 2;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
for (int i = 3; i < kMaxSize; ++i) {
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
- packet_buffer_.ClearTo(seq_num + 15);
+ packet_buffer_->ClearTo(seq_num + 15);
for (int i = 0; i < 15; ++i) {
++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
}
for (int i = 15; i < kMaxSize; ++i) {
++packet.seqNum;
- EXPECT_FALSE(packet_buffer_.InsertPacket(packet));
+ EXPECT_FALSE(packet_buffer_->InsertPacket(packet));
}
}
+TEST_F(TestPacketBuffer, GenericFrames) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , keyf , first, last
+ InsertGeneric(seq_num , true , true , true);
+ InsertGeneric(seq_num + 1, false, true , true);
+ InsertGeneric(seq_num + 2, false, true , true);
+ InsertGeneric(seq_num + 3, false, true , true);
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesGeneric(seq_num);
+ CheckReferencesGeneric(seq_num + 1, seq_num);
+ CheckReferencesGeneric(seq_num + 2, seq_num + 1);
+ CheckReferencesGeneric(seq_num + 3, seq_num + 2);
+}
+
+TEST_F(TestPacketBuffer, GenericFramesReordered) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , keyf , first, last
+ InsertGeneric(seq_num + 1, false, true , true);
+ InsertGeneric(seq_num , true , true , true);
+ InsertGeneric(seq_num + 3, false, true , true);
+ InsertGeneric(seq_num + 2, false, true , true);
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesGeneric(seq_num);
+ CheckReferencesGeneric(seq_num + 1, seq_num);
+ CheckReferencesGeneric(seq_num + 2, seq_num + 1);
+ CheckReferencesGeneric(seq_num + 3, seq_num + 2);
+}
+
TEST_F(TestPacketBuffer, GetBitstreamFromFrame) {
// "many bitstream, such data" with null termination.
uint8_t many[] = {0x6d, 0x61, 0x6e, 0x79, 0x20};
@@ -216,89 +410,997 @@ TEST_F(TestPacketBuffer, GetBitstreamFromFrame) {
uint8_t
result[sizeof(many) + sizeof(bitstream) + sizeof(such) + sizeof(data)];
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = 0xfffe;
- packet.dataPtr = many;
- packet.sizeBytes = sizeof(many);
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- packet.isFirstPacket = false;
- ++packet.seqNum;
- packet.dataPtr = bitstream;
- packet.sizeBytes = sizeof(bitstream);
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ++packet.seqNum;
- packet.dataPtr = such;
- packet.sizeBytes = sizeof(such);
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- packet.markerBit = true;
- ++packet.seqNum;
- packet.dataPtr = data;
- packet.sizeBytes = sizeof(data);
- EXPECT_EQ(0UL, frames_from_callback_.size());
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- ASSERT_EQ(1UL, frames_from_callback_.size());
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, data_size , data
+ InsertGeneric(seq_num , kT, kT , kF , sizeof(many) , many);
+ InsertGeneric(seq_num + 1, kF, kF , kF , sizeof(bitstream), bitstream);
+ InsertGeneric(seq_num + 2, kF, kF , kF , sizeof(such) , such);
+ InsertGeneric(seq_num + 3, kF, kF , kT , sizeof(data) , data);
- EXPECT_TRUE(frames_from_callback_[0]->GetBitstream(result));
- EXPECT_EQ(
- std::strcmp("many bitstream, such data", reinterpret_cast<char*>(result)),
- 0);
+ ASSERT_EQ(1UL, frames_from_callback_.size());
+ CheckReferencesVp8(seq_num + 3);
+ EXPECT_TRUE(frames_from_callback_[std::make_pair(seq_num + 3, 0)]->
+ GetBitstream(result));
+ EXPECT_EQ(std::strcmp("many bitstream, such data",
+ reinterpret_cast<char*>(result)),
+ 0);
}
TEST_F(TestPacketBuffer, FreeSlotsOnFrameDestruction) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.isFirstPacket = false;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- ++packet.seqNum;
- packet.markerBit = true;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 1, kF, kF , kF);
+ InsertGeneric(seq_num + 2, kF, kF , kT);
EXPECT_EQ(1UL, frames_from_callback_.size());
frames_from_callback_.clear();
- packet.isFirstPacket = true;
- packet.markerBit = false;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- packet.isFirstPacket = false;
- ++packet.seqNum;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_EQ(0UL, frames_from_callback_.size());
- ++packet.seqNum;
- packet.markerBit = true;
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 1, kF, kF , kF);
+ InsertGeneric(seq_num + 2, kF, kF , kT);
EXPECT_EQ(1UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, Flush) {
- VCMPacket packet;
- packet.isFirstPacket = true;
- packet.markerBit = true;
- packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
- packet_buffer_.Flush();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+TEST_F(TestPacketBuffer, Clear) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num , kT, kT , kF);
+ InsertGeneric(seq_num + 1, kF, kF , kF);
+ InsertGeneric(seq_num + 2, kF, kF , kT);
+ EXPECT_EQ(1UL, frames_from_callback_.size());
+
+ packet_buffer_->Clear();
+
+ // seq_num , kf, frst, lst
+ InsertGeneric(seq_num + kStartSize , kT, kT , kF);
+ InsertGeneric(seq_num + kStartSize + 1, kF, kF , kF);
+ InsertGeneric(seq_num + kStartSize + 2, kF, kF , kT);
EXPECT_EQ(2UL, frames_from_callback_.size());
}
-TEST_F(TestPacketBuffer, InvalidateFrameByFlushing) {
+TEST_F(TestPacketBuffer, InvalidateFrameByClearing) {
VCMPacket packet;
- packet.isFirstPacket = true;
- packet.markerBit = true;
+ packet.codec = kVideoCodecGeneric;
+ packet.frameType = kVideoFrameKey;
+ packet.isFirstPacket = kT;
+ packet.markerBit = kT;
packet.seqNum = Rand();
- EXPECT_TRUE(packet_buffer_.InsertPacket(packet));
+ EXPECT_TRUE(packet_buffer_->InsertPacket(packet));
ASSERT_EQ(1UL, frames_from_callback_.size());
- packet_buffer_.Flush();
- EXPECT_FALSE(frames_from_callback_[0]->GetBitstream(nullptr));
+ packet_buffer_->Clear();
+ EXPECT_FALSE(frames_from_callback_.begin()->second->GetBitstream(nullptr));
+}
+
+TEST_F(TestPacketBuffer, Vp8NoPictureId) {
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst
+ InsertVp8(seq_num , kT, kT , kF);
+ InsertVp8(seq_num + 1 , kF, kF , kF);
+ InsertVp8(seq_num + 2 , kF, kF , kT);
+ ASSERT_EQ(1UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 3 , kF, kT , kF);
+ InsertVp8(seq_num + 4 , kF, kF , kT);
+ ASSERT_EQ(2UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 5 , kF, kT , kF);
+ InsertVp8(seq_num + 6 , kF, kF , kF);
+ InsertVp8(seq_num + 7 , kF, kF , kF);
+ InsertVp8(seq_num + 8 , kF, kF , kT);
+ ASSERT_EQ(3UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 9 , kF, kT , kT);
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 10, kF, kT , kF);
+ InsertVp8(seq_num + 11, kF, kF , kT);
+ ASSERT_EQ(5UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 12, kT, kT , kT);
+ ASSERT_EQ(6UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 13, kF, kT , kF);
+ InsertVp8(seq_num + 14, kF, kF , kF);
+ InsertVp8(seq_num + 15, kF, kF , kF);
+ InsertVp8(seq_num + 16, kF, kF , kF);
+ InsertVp8(seq_num + 17, kF, kF , kT);
+ ASSERT_EQ(7UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 18, kF, kT , kT);
+ ASSERT_EQ(8UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 19, kF, kT , kF);
+ InsertVp8(seq_num + 20, kF, kF , kT);
+ ASSERT_EQ(9UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 21, kF, kT , kT);
+
+ ASSERT_EQ(10UL, frames_from_callback_.size());
+ CheckReferencesVp8(seq_num + 2);
+ CheckReferencesVp8(seq_num + 4, seq_num + 2);
+ CheckReferencesVp8(seq_num + 8, seq_num + 4);
+ CheckReferencesVp8(seq_num + 9, seq_num + 8);
+ CheckReferencesVp8(seq_num + 11, seq_num + 9);
+ CheckReferencesVp8(seq_num + 12);
+ CheckReferencesVp8(seq_num + 17, seq_num + 12);
+ CheckReferencesVp8(seq_num + 18, seq_num + 17);
+ CheckReferencesVp8(seq_num + 20, seq_num + 18);
+ CheckReferencesVp8(seq_num + 21, seq_num + 20);
+}
+
+TEST_F(TestPacketBuffer, Vp8NoPictureIdReordered) {
+ uint16_t seq_num = 0xfffa;
+
+ // seq_num , kf, frst, lst
+ InsertVp8(seq_num + 1 , kF, kF , kF);
+ InsertVp8(seq_num , kT, kT , kF);
+ InsertVp8(seq_num + 2 , kF, kF , kT);
+ InsertVp8(seq_num + 4 , kF, kF , kT);
+ InsertVp8(seq_num + 6 , kF, kF , kF);
+ InsertVp8(seq_num + 3 , kF, kT , kF);
+ InsertVp8(seq_num + 7 , kF, kF , kF);
+ InsertVp8(seq_num + 5 , kF, kT , kF);
+ InsertVp8(seq_num + 9 , kF, kT , kT);
+ InsertVp8(seq_num + 10, kF, kT , kF);
+ InsertVp8(seq_num + 8 , kF, kF , kT);
+ InsertVp8(seq_num + 13, kF, kT , kF);
+ InsertVp8(seq_num + 14, kF, kF , kF);
+ InsertVp8(seq_num + 12, kT, kT , kT);
+ InsertVp8(seq_num + 11, kF, kF , kT);
+ InsertVp8(seq_num + 16, kF, kF , kF);
+ InsertVp8(seq_num + 19, kF, kT , kF);
+ InsertVp8(seq_num + 15, kF, kF , kF);
+ InsertVp8(seq_num + 17, kF, kF , kT);
+ InsertVp8(seq_num + 20, kF, kF , kT);
+ InsertVp8(seq_num + 21, kF, kT , kT);
+ InsertVp8(seq_num + 18, kF, kT , kT);
+
+ ASSERT_EQ(10UL, frames_from_callback_.size());
+ CheckReferencesVp8(seq_num + 2);
+ CheckReferencesVp8(seq_num + 4, seq_num + 2);
+ CheckReferencesVp8(seq_num + 8, seq_num + 4);
+ CheckReferencesVp8(seq_num + 9, seq_num + 8);
+ CheckReferencesVp8(seq_num + 11, seq_num + 9);
+ CheckReferencesVp8(seq_num + 12);
+ CheckReferencesVp8(seq_num + 17, seq_num + 12);
+ CheckReferencesVp8(seq_num + 18, seq_num + 17);
+ CheckReferencesVp8(seq_num + 20, seq_num + 18);
+ CheckReferencesVp8(seq_num + 21, seq_num + 20);
+}
+
+
+TEST_F(TestPacketBuffer, Vp8KeyFrameReferences) {
+ uint16_t pid = Rand();
+ // seq_num, kf, frst, lst, sync, pid, tid, tl0
+ InsertVp8(Rand() , kT, kT , kT , kF , pid, 0 , 0);
+
+ ASSERT_EQ(1UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+}
+
+// Test with 1 temporal layer.
+TEST_F(TestPacketBuffer, Vp8TemporalLayers_0) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 1);
+ InsertVp8(seq_num + 1, kF, kT , kT , kF , pid + 1, 0 , 2);
+ InsertVp8(seq_num + 2, kF, kT , kT , kF , pid + 2, 0 , 3);
+ InsertVp8(seq_num + 3, kF, kT , kT , kF , pid + 3, 0 , 4);
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid + 1);
+ CheckReferencesVp8(pid + 3, pid + 2);
+}
+
+// Test with 1 temporal layer.
+TEST_F(TestPacketBuffer, Vp8TemporalLayersReordering_0) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 1);
+ InsertVp8(seq_num + 1, kF, kT , kT , kF , pid + 1, 0 , 2);
+ InsertVp8(seq_num + 3, kF, kT , kT , kF , pid + 3, 0 , 4);
+ InsertVp8(seq_num + 2, kF, kT , kT , kF , pid + 2, 0 , 3);
+ InsertVp8(seq_num + 5, kF, kT , kT , kF , pid + 5, 0 , 6);
+ InsertVp8(seq_num + 6, kF, kT , kT , kF , pid + 6, 0 , 7);
+ InsertVp8(seq_num + 4, kF, kT , kT , kF , pid + 4, 0 , 5);
+
+ ASSERT_EQ(7UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid + 1);
+ CheckReferencesVp8(pid + 3, pid + 2);
+ CheckReferencesVp8(pid + 4, pid + 3);
+ CheckReferencesVp8(pid + 5, pid + 4);
+ CheckReferencesVp8(pid + 6, pid + 5);
+}
+
+// Test with 2 temporal layers in a 01 pattern.
+TEST_F(TestPacketBuffer, Vp8TemporalLayers_01) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0, 255);
+ InsertVp8(seq_num + 1, kF, kT , kT , kT , pid + 1, 1, 255);
+ InsertVp8(seq_num + 2, kF, kT , kT , kF , pid + 2, 0, 0);
+ InsertVp8(seq_num + 3, kF, kT , kT , kF , pid + 3, 1, 0);
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid);
+ CheckReferencesVp8(pid + 3, pid + 1, pid + 2);
+}
+
+// Test with 2 temporal layers in a 01 pattern.
+TEST_F(TestPacketBuffer, Vp8TemporalLayersReordering_01) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num + 1, kF, kT , kT , kT , pid + 1, 1 , 255);
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 255);
+ InsertVp8(seq_num + 3, kF, kT , kT , kF , pid + 3, 1 , 0);
+ InsertVp8(seq_num + 5, kF, kT , kT , kF , pid + 5, 1 , 1);
+ InsertVp8(seq_num + 2, kF, kT , kT , kF , pid + 2, 0 , 0);
+ InsertVp8(seq_num + 4, kF, kT , kT , kF , pid + 4, 0 , 1);
+ InsertVp8(seq_num + 6, kF, kT , kT , kF , pid + 6, 0 , 2);
+ InsertVp8(seq_num + 7, kF, kT , kT , kF , pid + 7, 1 , 2);
+
+ ASSERT_EQ(8UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid);
+ CheckReferencesVp8(pid + 3, pid + 1, pid + 2);
+ CheckReferencesVp8(pid + 4, pid + 2);
+ CheckReferencesVp8(pid + 5, pid + 3, pid + 4);
+ CheckReferencesVp8(pid + 6, pid + 4);
+ CheckReferencesVp8(pid + 7, pid + 5, pid + 6);
+}
+
+// Test with 3 temporal layers in a 0212 pattern.
+TEST_F(TestPacketBuffer, Vp8TemporalLayers_0212) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 55);
+ InsertVp8(seq_num + 1 , kF, kT , kT , kT , pid + 1 , 2 , 55);
+ InsertVp8(seq_num + 2 , kF, kT , kT , kT , pid + 2 , 1 , 55);
+ InsertVp8(seq_num + 3 , kF, kT , kT , kF , pid + 3 , 2 , 55);
+ InsertVp8(seq_num + 4 , kF, kT , kT , kF , pid + 4 , 0 , 56);
+ InsertVp8(seq_num + 5 , kF, kT , kT , kF , pid + 5 , 2 , 56);
+ InsertVp8(seq_num + 6 , kF, kT , kT , kF , pid + 6 , 1 , 56);
+ InsertVp8(seq_num + 7 , kF, kT , kT , kF , pid + 7 , 2 , 56);
+ InsertVp8(seq_num + 8 , kF, kT , kT , kF , pid + 8 , 0 , 57);
+ InsertVp8(seq_num + 9 , kF, kT , kT , kT , pid + 9 , 2 , 57);
+ InsertVp8(seq_num + 10, kF, kT , kT , kT , pid + 10, 1 , 57);
+ InsertVp8(seq_num + 11, kF, kT , kT , kF , pid + 11, 2 , 57);
+
+ ASSERT_EQ(12UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1 , pid);
+ CheckReferencesVp8(pid + 2 , pid);
+ CheckReferencesVp8(pid + 3 , pid, pid + 1, pid + 2);
+ CheckReferencesVp8(pid + 4 , pid);
+ CheckReferencesVp8(pid + 5 , pid + 2, pid + 3, pid + 4);
+ CheckReferencesVp8(pid + 6 , pid + 2, pid + 4);
+ CheckReferencesVp8(pid + 7 , pid + 4, pid + 5, pid + 6);
+ CheckReferencesVp8(pid + 8 , pid + 4);
+ CheckReferencesVp8(pid + 9 , pid + 8);
+ CheckReferencesVp8(pid + 10, pid + 8);
+ CheckReferencesVp8(pid + 11, pid + 8, pid + 9, pid + 10);
+}
+
+// Test with 3 temporal layers in a 0212 pattern.
+TEST_F(TestPacketBuffer, Vp8TemporalLayersReordering_0212) {
+ uint16_t pid = 126;
+ uint16_t seq_num = Rand();
+
+ // seq_num , kf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num + 1 , kF, kT , kT , kT , pid + 1 , 2 , 55);
+ InsertVp8(seq_num , kT, kT , kT , kF , pid , 0 , 55);
+ InsertVp8(seq_num + 2 , kF, kT , kT , kT , pid + 2 , 1 , 55);
+ InsertVp8(seq_num + 4 , kF, kT , kT , kF , pid + 4 , 0 , 56);
+ InsertVp8(seq_num + 5 , kF, kT , kT , kF , pid + 5 , 2 , 56);
+ InsertVp8(seq_num + 3 , kF, kT , kT , kF , pid + 3 , 2 , 55);
+ InsertVp8(seq_num + 7 , kF, kT , kT , kF , pid + 7 , 2 , 56);
+ InsertVp8(seq_num + 9 , kF, kT , kT , kT , pid + 9 , 2 , 57);
+ InsertVp8(seq_num + 6 , kF, kT , kT , kF , pid + 6 , 1 , 56);
+ InsertVp8(seq_num + 8 , kF, kT , kT , kF , pid + 8 , 0 , 57);
+ InsertVp8(seq_num + 11, kF, kT , kT , kF , pid + 11, 2 , 57);
+ InsertVp8(seq_num + 10, kF, kT , kT , kT , pid + 10, 1 , 57);
+
+ ASSERT_EQ(12UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1 , pid);
+ CheckReferencesVp8(pid + 2 , pid);
+ CheckReferencesVp8(pid + 3 , pid, pid + 1, pid + 2);
+ CheckReferencesVp8(pid + 4 , pid);
+ CheckReferencesVp8(pid + 5 , pid + 2, pid + 3, pid + 4);
+ CheckReferencesVp8(pid + 6 , pid + 2, pid + 4);
+ CheckReferencesVp8(pid + 7 , pid + 4, pid + 5, pid + 6);
+ CheckReferencesVp8(pid + 8 , pid + 4);
+ CheckReferencesVp8(pid + 9 , pid + 8);
+ CheckReferencesVp8(pid + 10, pid + 8);
+ CheckReferencesVp8(pid + 11, pid + 8, pid + 9, pid + 10);
+}
+
+TEST_F(TestPacketBuffer, Vp8InsertManyFrames_0212) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ const int keyframes_to_insert = 50;
+ const int frames_per_keyframe = 120; // Should be a multiple of 4.
+ uint8_t tl0 = 128;
+
+ for (int k = 0; k < keyframes_to_insert; ++k) {
+ // seq_num , keyf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT , kT , kT , kF , pid , 0 , tl0);
+ InsertVp8(seq_num + 1, kF , kT , kT , kT , pid + 1, 2 , tl0);
+ InsertVp8(seq_num + 2, kF , kT , kT , kT , pid + 2, 1 , tl0);
+ InsertVp8(seq_num + 3, kF , kT , kT , kF , pid + 3, 2 , tl0);
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid);
+ CheckReferencesVp8(pid + 3, pid, pid + 1, pid + 2);
+ frames_from_callback_.clear();
+ ++tl0;
+
+ for (int f = 4; f < frames_per_keyframe; f += 4) {
+ uint16_t sf = seq_num + f;
+ uint16_t pidf = pid + f;
+
+ // seq_num, keyf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(sf , kF , kT , kT , kF , pidf , 0 , tl0);
+ InsertVp8(sf + 1 , kF , kT , kT , kF , pidf + 1, 2 , tl0);
+ InsertVp8(sf + 2 , kF , kT , kT , kF , pidf + 2, 1 , tl0);
+ InsertVp8(sf + 3 , kF , kT , kT , kF , pidf + 3, 2 , tl0);
+ CheckReferencesVp8(pidf, pidf - 4);
+ CheckReferencesVp8(pidf + 1, pidf, pidf - 1, pidf - 2);
+ CheckReferencesVp8(pidf + 2, pidf, pidf - 2);
+ CheckReferencesVp8(pidf + 3, pidf, pidf + 1, pidf + 2);
+ frames_from_callback_.clear();
+ ++tl0;
+ }
+
+ pid += frames_per_keyframe;
+ seq_num += frames_per_keyframe;
+ }
+}
+
+TEST_F(TestPacketBuffer, Vp8LayerSync) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ // seq_num , keyf, frst, lst, sync, pid , tid, tl0
+ InsertVp8(seq_num , kT , kT , kT , kF , pid , 0 , 0);
+ InsertVp8(seq_num + 1 , kF , kT , kT , kT , pid + 1 , 1 , 0);
+ InsertVp8(seq_num + 2 , kF , kT , kT , kF , pid + 2 , 0 , 1);
+ ASSERT_EQ(3UL, frames_from_callback_.size());
+
+ InsertVp8(seq_num + 4 , kF , kT , kT , kF , pid + 4 , 0 , 2);
+ InsertVp8(seq_num + 5 , kF , kT , kT , kT , pid + 5 , 1 , 2);
+ InsertVp8(seq_num + 6 , kF , kT , kT , kF , pid + 6 , 0 , 3);
+ InsertVp8(seq_num + 7 , kF , kT , kT , kF , pid + 7 , 1 , 3);
+
+ ASSERT_EQ(7UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid);
+ CheckReferencesVp8(pid + 4, pid + 2);
+ CheckReferencesVp8(pid + 5, pid + 4);
+ CheckReferencesVp8(pid + 6, pid + 4);
+ CheckReferencesVp8(pid + 7, pid + 6, pid + 5);
+}
+
+TEST_F(TestPacketBuffer, Vp8InsertLargeFrames) {
+ packet_buffer_.reset(new PacketBuffer(1 << 3, 1 << 12, this));
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+
+ const uint16_t packets_per_frame = 1000;
+ uint16_t current = seq_num;
+ uint16_t end = current + packets_per_frame;
+
+ // seq_num , keyf, frst, lst, sync, pid, tid, tl0
+ InsertVp8(current++, kT , kT , kF , kF , pid, 0 , 0);
+ while (current != end)
+ InsertVp8(current++, kF , kF , kF , kF , pid, 0 , 0);
+ InsertVp8(current++, kF , kF , kT , kF , pid, 0 , 0);
+ end = current + packets_per_frame;
+
+ for (int f = 1; f < 4; ++f) {
+ InsertVp8(current++, kF , kT , kF , kF , pid + f, 0, f);
+ while (current != end)
+ InsertVp8(current++, kF , kF , kF , kF , pid + f, 0, f);
+ InsertVp8(current++, kF , kF , kT , kF , pid + f, 0, f);
+ end = current + packets_per_frame;
+ }
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesVp8(pid);
+ CheckReferencesVp8(pid + 1, pid);
+ CheckReferencesVp8(pid + 2, pid + 1);
+ CheckReferencesVp8(pid + 3, pid + 2);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofInsertOneFrame) {
+ uint16_t pid = Rand();
+ uint16_t seq_num = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1);
+
+ // seq_num, keyf, frst, lst, up, pid, sid, tid, tl0, ss
+ InsertVp9Gof(seq_num, kT , kT , kT , kF, pid, 0 , 0 , 0 , &ss);
+
+ CheckReferencesVp9(pid, 0);
+}
+
+TEST_F(TestPacketBuffer, Vp9NoPictureIdReordered) {
+ uint16_t sn = 0xfffa;
+
+ // sn , kf, frst, lst
+ InsertVp9Gof(sn + 1 , kF, kF , kF);
+ InsertVp9Gof(sn , kT, kT , kF);
+ InsertVp9Gof(sn + 2 , kF, kF , kT);
+ InsertVp9Gof(sn + 4 , kF, kF , kT);
+ InsertVp9Gof(sn + 6 , kF, kF , kF);
+ InsertVp9Gof(sn + 3 , kF, kT , kF);
+ InsertVp9Gof(sn + 7 , kF, kF , kF);
+ InsertVp9Gof(sn + 5 , kF, kT , kF);
+ InsertVp9Gof(sn + 9 , kF, kT , kT);
+ InsertVp9Gof(sn + 10, kF, kT , kF);
+ InsertVp9Gof(sn + 8 , kF, kF , kT);
+ InsertVp9Gof(sn + 13, kF, kT , kF);
+ InsertVp9Gof(sn + 14, kF, kF , kF);
+ InsertVp9Gof(sn + 12, kT, kT , kT);
+ InsertVp9Gof(sn + 11, kF, kF , kT);
+ InsertVp9Gof(sn + 16, kF, kF , kF);
+ InsertVp9Gof(sn + 19, kF, kT , kF);
+ InsertVp9Gof(sn + 15, kF, kF , kF);
+ InsertVp9Gof(sn + 17, kF, kF , kT);
+ InsertVp9Gof(sn + 20, kF, kF , kT);
+ InsertVp9Gof(sn + 21, kF, kT , kT);
+ InsertVp9Gof(sn + 18, kF, kT , kT);
+
+ ASSERT_EQ(10UL, frames_from_callback_.size());
+ CheckReferencesVp9(sn + 2 , 0);
+ CheckReferencesVp9(sn + 4 , 0, sn + 2);
+ CheckReferencesVp9(sn + 8 , 0, sn + 4);
+ CheckReferencesVp9(sn + 9 , 0, sn + 8);
+ CheckReferencesVp9(sn + 11, 0, sn + 9);
+ CheckReferencesVp9(sn + 12, 0);
+ CheckReferencesVp9(sn + 17, 0, sn + 12);
+ CheckReferencesVp9(sn + 18, 0, sn + 17);
+ CheckReferencesVp9(sn + 20, 0, sn + 18);
+ CheckReferencesVp9(sn + 21, 0, sn + 20);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayers_0) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1); // Only 1 spatial layer.
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 4);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 0 , 5);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 0 , 6);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 0 , 7);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 8);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 0 , 9);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 0 , 10);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 0 , 11);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 12);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 0 , 13);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 0 , 14);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 0 , 15);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 16);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 0 , 17);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 0 , 18);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 0 , 19);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid + 1);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid + 3);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 5);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 7);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 9);
+ CheckReferencesVp9(pid + 11, 0, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 11);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 13);
+ CheckReferencesVp9(pid + 15, 0, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 15);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 17);
+ CheckReferencesVp9(pid + 19, 0, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersReordered_0) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1); // Only 1 spatial layer.
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 0 , 1);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 4);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 0 , 5);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 0 , 7);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 0 , 6);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 8);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 0 , 10);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 0 , 13);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 0 , 11);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 0 , 9);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 16);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 0 , 14);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 0 , 15);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 12);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 0 , 17);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 0 , 19);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 0 , 18);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid + 1);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid + 3);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 5);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 7);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 9);
+ CheckReferencesVp9(pid + 11, 0, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 11);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 13);
+ CheckReferencesVp9(pid + 15, 0, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 15);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 17);
+ CheckReferencesVp9(pid + 19, 0, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayers_01) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode2); // 0101 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 1 , 2);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 1 , 3);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 4);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 1 , 4);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 0 , 5);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 1 , 5);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 6);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 1 , 6);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 0 , 7);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 1 , 7);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 8);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 1 , 8);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 0 , 9);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 1 , 9);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid + 2);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 6);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 10);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 14);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 16);
+ CheckReferencesVp9(pid + 19, 0, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersReordered_01) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode2); // 01 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 1 , 0);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 1 , 2);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 1 , 3);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 0 , 5);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 4);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 1 , 4);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 1 , 5);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 1 , 6);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 8);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 6);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 0 , 7);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 1 , 8);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 1 , 9);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 1 , 7);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 0 , 9);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid + 2);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 6);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 10);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 14);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 16);
+ CheckReferencesVp9(pid + 19, 0, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayers_0212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode3); // 0212 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 2 , 2);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 3);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 2 , 3);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 1 , 3);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 2 , 3);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 4);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 2 , 4);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 1 , 4);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 2 , 4);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 1, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 5, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 8);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 13, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 12);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 16);
+ CheckReferencesVp9(pid + 19, 0, pid + 17, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersReordered_0212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode3); // 0212 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 2 , 0);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 2 , 2);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 2);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 2 , 3);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 3);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 1 , 3);
+ InsertVp9Gof(sn + 16, kF, kT , kT , kF, pid + 16, 0 , 0 , 4);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 2 , 3);
+ InsertVp9Gof(sn + 17, kF, kT , kT , kF, pid + 17, 0 , 2 , 4);
+ InsertVp9Gof(sn + 19, kF, kT , kT , kF, pid + 19, 0 , 2 , 4);
+ InsertVp9Gof(sn + 18, kF, kT , kT , kF, pid + 18, 0 , 1 , 4);
+
+ ASSERT_EQ(20UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 1, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 5, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 8);
+ CheckReferencesVp9(pid + 13, 0, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 13, pid + 14);
+ CheckReferencesVp9(pid + 16, 0, pid + 12);
+ CheckReferencesVp9(pid + 17, 0, pid + 16);
+ CheckReferencesVp9(pid + 18, 0, pid + 16);
+ CheckReferencesVp9(pid + 19, 0, pid + 17, pid + 18);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersUpSwitch_02120212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode4); // 02120212 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kT, pid + 6 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kT, pid + 8 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kT, pid + 11, 0 , 2 , 2);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 3);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 2 , 3);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 1 , 3);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 2 , 3);
+
+ ASSERT_EQ(16UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 1, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 3, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 2, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 8);
+ CheckReferencesVp9(pid + 13, 0, pid + 11, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 10, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 13, pid + 14);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersUpSwitchReordered_02120212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode4); // 02120212 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 2 , 0);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 1 , 0);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 2 , 0);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 1);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kT, pid + 6 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 12, kF, kT , kT , kF, pid + 12, 0 , 0 , 3);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 2);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kT, pid + 8 , 0 , 0 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kT, pid + 11, 0 , 2 , 2);
+ InsertVp9Gof(sn + 13, kF, kT , kT , kF, pid + 13, 0 , 2 , 3);
+ InsertVp9Gof(sn + 15, kF, kT , kT , kF, pid + 15, 0 , 2 , 3);
+ InsertVp9Gof(sn + 14, kF, kT , kT , kF, pid + 14, 0 , 1 , 3);
+
+ ASSERT_EQ(16UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 1, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 3, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 2, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+ CheckReferencesVp9(pid + 12, 0, pid + 8);
+ CheckReferencesVp9(pid + 13, 0, pid + 11, pid + 12);
+ CheckReferencesVp9(pid + 14, 0, pid + 10, pid + 12);
+ CheckReferencesVp9(pid + 15, 0, pid + 13, pid + 14);
+}
+
+TEST_F(TestPacketBuffer, Vp9GofTemporalLayersReordered_01_0212) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode2); // 01 pattern
+
+ // sn , kf, frst, lst, up, pid , sid, tid, tl0, ss
+ InsertVp9Gof(sn + 1 , kF, kT , kT , kF, pid + 1 , 0 , 1 , 0);
+ InsertVp9Gof(sn , kT, kT , kT , kF, pid , 0 , 0 , 0 , &ss);
+ InsertVp9Gof(sn + 3 , kF, kT , kT , kF, pid + 3 , 0 , 1 , 1);
+ InsertVp9Gof(sn + 6 , kF, kT , kT , kF, pid + 6 , 0 , 1 , 2);
+ ss.SetGofInfoVP9(kTemporalStructureMode3); // 0212 pattern
+ InsertVp9Gof(sn + 4 , kF, kT , kT , kF, pid + 4 , 0 , 0 , 2 , &ss);
+ InsertVp9Gof(sn + 2 , kF, kT , kT , kF, pid + 2 , 0 , 0 , 1);
+ InsertVp9Gof(sn + 5 , kF, kT , kT , kF, pid + 5 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 8 , kF, kT , kT , kF, pid + 8 , 0 , 0 , 3);
+ InsertVp9Gof(sn + 10, kF, kT , kT , kF, pid + 10, 0 , 1 , 3);
+ InsertVp9Gof(sn + 7 , kF, kT , kT , kF, pid + 7 , 0 , 2 , 2);
+ InsertVp9Gof(sn + 11, kF, kT , kT , kF, pid + 11, 0 , 2 , 3);
+ InsertVp9Gof(sn + 9 , kF, kT , kT , kF, pid + 9 , 0 , 2 , 3);
+
+ ASSERT_EQ(12UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+ CheckReferencesVp9(pid + 1 , 0, pid);
+ CheckReferencesVp9(pid + 2 , 0, pid);
+ CheckReferencesVp9(pid + 3 , 0, pid + 2);
+ CheckReferencesVp9(pid + 4 , 0, pid);
+ CheckReferencesVp9(pid + 5 , 0, pid + 4);
+ CheckReferencesVp9(pid + 6 , 0, pid + 4);
+ CheckReferencesVp9(pid + 7 , 0, pid + 5, pid + 6);
+ CheckReferencesVp9(pid + 8 , 0, pid + 4);
+ CheckReferencesVp9(pid + 9 , 0, pid + 8);
+ CheckReferencesVp9(pid + 10, 0, pid + 8);
+ CheckReferencesVp9(pid + 11, 0, pid + 9, pid + 10);
+}
+
+TEST_F(TestPacketBuffer, Vp9FlexibleModeOneFrame) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+
+ // sn, kf, frst, lst, intr, pid, sid, tid, tl0
+ InsertVp9Flex(sn, kT, kT , kT , kF , pid, 0 , 0 , 0);
+
+ ASSERT_EQ(1UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid, 0);
+}
+
+TEST_F(TestPacketBuffer, Vp9FlexibleModeTwoSpatialLayers) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+
+ // sn , kf, frst, lst, intr, pid , sid, tid, tl0, refs
+ InsertVp9Flex(sn , kT, kT , kT , kF , pid , 0 , 0 , 0);
+ InsertVp9Flex(sn + 1 , kT, kT , kT , kT , pid , 1 , 0 , 0);
+ InsertVp9Flex(sn + 2 , kF, kT , kT , kF , pid + 1, 1 , 0 , 0 , {1});
+ InsertVp9Flex(sn + 3 , kF, kT , kT , kF , pid + 2, 0 , 0 , 1 , {2});
+ InsertVp9Flex(sn + 4 , kF, kT , kT , kF , pid + 2, 1 , 0 , 1 , {1});
+ InsertVp9Flex(sn + 5 , kF, kT , kT , kF , pid + 3, 1 , 0 , 1 , {1});
+ InsertVp9Flex(sn + 6 , kF, kT , kT , kF , pid + 4, 0 , 0 , 2 , {2});
+ InsertVp9Flex(sn + 7 , kF, kT , kT , kF , pid + 4, 1 , 0 , 2 , {1});
+ InsertVp9Flex(sn + 8 , kF, kT , kT , kF , pid + 5, 1 , 0 , 2 , {1});
+ InsertVp9Flex(sn + 9 , kF, kT , kT , kF , pid + 6, 0 , 0 , 3 , {2});
+ InsertVp9Flex(sn + 10, kF, kT , kT , kF , pid + 6, 1 , 0 , 3 , {1});
+ InsertVp9Flex(sn + 11, kF, kT , kT , kF , pid + 7, 1 , 0 , 3 , {1});
+ InsertVp9Flex(sn + 12, kF, kT , kT , kF , pid + 8, 0 , 0 , 4 , {2});
+ InsertVp9Flex(sn + 13, kF, kT , kT , kF , pid + 8, 1 , 0 , 4 , {1});
+
+ ASSERT_EQ(14UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid , 0);
+ CheckReferencesVp9(pid , 1);
+ CheckReferencesVp9(pid + 1, 1, pid);
+ CheckReferencesVp9(pid + 2, 0, pid);
+ CheckReferencesVp9(pid + 2, 1, pid + 1);
+ CheckReferencesVp9(pid + 3, 1, pid + 2);
+ CheckReferencesVp9(pid + 4, 0, pid + 2);
+ CheckReferencesVp9(pid + 4, 1, pid + 3);
+ CheckReferencesVp9(pid + 5, 1, pid + 4);
+ CheckReferencesVp9(pid + 6, 0, pid + 4);
+ CheckReferencesVp9(pid + 6, 1, pid + 5);
+ CheckReferencesVp9(pid + 7, 1, pid + 6);
+ CheckReferencesVp9(pid + 8, 0, pid + 6);
+ CheckReferencesVp9(pid + 8, 1, pid + 7);
+}
+
+TEST_F(TestPacketBuffer, Vp9FlexibleModeTwoSpatialLayersReordered) {
+ uint16_t pid = Rand();
+ uint16_t sn = Rand();
+
+ // sn , kf, frst, lst, intr, pid , sid, tid, tl0, refs
+ InsertVp9Flex(sn + 1 , kT, kT , kT , kT , pid , 1 , 0 , 0);
+ InsertVp9Flex(sn + 2 , kF, kT , kT , kF , pid + 1, 1 , 0 , 0 , {1});
+ InsertVp9Flex(sn , kT, kT , kT , kF , pid , 0 , 0 , 0);
+ InsertVp9Flex(sn + 4 , kF, kT , kT , kF , pid + 2, 1 , 0 , 1 , {1});
+ InsertVp9Flex(sn + 5 , kF, kT , kT , kF , pid + 3, 1 , 0 , 1 , {1});
+ InsertVp9Flex(sn + 3 , kF, kT , kT , kF , pid + 2, 0 , 0 , 1 , {2});
+ InsertVp9Flex(sn + 7 , kF, kT , kT , kF , pid + 4, 1 , 0 , 2 , {1});
+ InsertVp9Flex(sn + 6 , kF, kT , kT , kF , pid + 4, 0 , 0 , 2 , {2});
+ InsertVp9Flex(sn + 8 , kF, kT , kT , kF , pid + 5, 1 , 0 , 2 , {1});
+ InsertVp9Flex(sn + 9 , kF, kT , kT , kF , pid + 6, 0 , 0 , 3 , {2});
+ InsertVp9Flex(sn + 11, kF, kT , kT , kF , pid + 7, 1 , 0 , 3 , {1});
+ InsertVp9Flex(sn + 10, kF, kT , kT , kF , pid + 6, 1 , 0 , 3 , {1});
+ InsertVp9Flex(sn + 13, kF, kT , kT , kF , pid + 8, 1 , 0 , 4 , {1});
+ InsertVp9Flex(sn + 12, kF, kT , kT , kF , pid + 8, 0 , 0 , 4 , {2});
+
+ ASSERT_EQ(14UL, frames_from_callback_.size());
+ CheckReferencesVp9(pid , 0);
+ CheckReferencesVp9(pid , 1);
+ CheckReferencesVp9(pid + 1, 1, pid);
+ CheckReferencesVp9(pid + 2, 0, pid);
+ CheckReferencesVp9(pid + 2, 1, pid + 1);
+ CheckReferencesVp9(pid + 3, 1, pid + 2);
+ CheckReferencesVp9(pid + 4, 0, pid + 2);
+ CheckReferencesVp9(pid + 4, 1, pid + 3);
+ CheckReferencesVp9(pid + 5, 1, pid + 4);
+ CheckReferencesVp9(pid + 6, 0, pid + 4);
+ CheckReferencesVp9(pid + 6, 1, pid + 5);
+ CheckReferencesVp9(pid + 7, 1, pid + 6);
+ CheckReferencesVp9(pid + 8, 0, pid + 6);
+ CheckReferencesVp9(pid + 8, 1, pid + 7);
}
} // namespace video_coding
diff --git a/chromium/third_party/webrtc/modules/video_coding/qm_select.cc b/chromium/third_party/webrtc/modules/video_coding/qm_select.cc
deleted file mode 100644
index 9da42bb33c6..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/qm_select.cc
+++ /dev/null
@@ -1,953 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/qm_select.h"
-
-#include <math.h>
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_coding/include/video_coding_defines.h"
-#include "webrtc/modules/video_coding/internal_defines.h"
-#include "webrtc/modules/video_coding/qm_select_data.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-// QM-METHOD class
-
-VCMQmMethod::VCMQmMethod()
- : content_metrics_(NULL),
- width_(0),
- height_(0),
- user_frame_rate_(0.0f),
- native_width_(0),
- native_height_(0),
- native_frame_rate_(0.0f),
- image_type_(kVGA),
- framerate_level_(kFrameRateHigh),
- init_(false) {
- ResetQM();
-}
-
-VCMQmMethod::~VCMQmMethod() {}
-
-void VCMQmMethod::ResetQM() {
- aspect_ratio_ = 1.0f;
- motion_.Reset();
- spatial_.Reset();
- content_class_ = 0;
-}
-
-uint8_t VCMQmMethod::ComputeContentClass() {
- ComputeMotionNFD();
- ComputeSpatial();
- return content_class_ = 3 * motion_.level + spatial_.level;
-}
-
-void VCMQmMethod::UpdateContent(const VideoContentMetrics* contentMetrics) {
- content_metrics_ = contentMetrics;
-}
-
-void VCMQmMethod::ComputeMotionNFD() {
- if (content_metrics_) {
- motion_.value = content_metrics_->motion_magnitude;
- }
- // Determine motion level.
- if (motion_.value < kLowMotionNfd) {
- motion_.level = kLow;
- } else if (motion_.value > kHighMotionNfd) {
- motion_.level = kHigh;
- } else {
- motion_.level = kDefault;
- }
-}
-
-void VCMQmMethod::ComputeSpatial() {
- float spatial_err = 0.0;
- float spatial_err_h = 0.0;
- float spatial_err_v = 0.0;
- if (content_metrics_) {
- spatial_err = content_metrics_->spatial_pred_err;
- spatial_err_h = content_metrics_->spatial_pred_err_h;
- spatial_err_v = content_metrics_->spatial_pred_err_v;
- }
- // Spatial measure: take average of 3 prediction errors.
- spatial_.value = (spatial_err + spatial_err_h + spatial_err_v) / 3.0f;
-
- // Reduce thresholds for large scenes/higher pixel correlation.
- float scale2 = image_type_ > kVGA ? kScaleTexture : 1.0;
-
- if (spatial_.value > scale2 * kHighTexture) {
- spatial_.level = kHigh;
- } else if (spatial_.value < scale2 * kLowTexture) {
- spatial_.level = kLow;
- } else {
- spatial_.level = kDefault;
- }
-}
-
-ImageType VCMQmMethod::GetImageType(uint16_t width, uint16_t height) {
- // Get the image type for the encoder frame size.
- uint32_t image_size = width * height;
- if (image_size == kSizeOfImageType[kQCIF]) {
- return kQCIF;
- } else if (image_size == kSizeOfImageType[kHCIF]) {
- return kHCIF;
- } else if (image_size == kSizeOfImageType[kQVGA]) {
- return kQVGA;
- } else if (image_size == kSizeOfImageType[kCIF]) {
- return kCIF;
- } else if (image_size == kSizeOfImageType[kHVGA]) {
- return kHVGA;
- } else if (image_size == kSizeOfImageType[kVGA]) {
- return kVGA;
- } else if (image_size == kSizeOfImageType[kQFULLHD]) {
- return kQFULLHD;
- } else if (image_size == kSizeOfImageType[kWHD]) {
- return kWHD;
- } else if (image_size == kSizeOfImageType[kFULLHD]) {
- return kFULLHD;
- } else {
- // No exact match, find closet one.
- return FindClosestImageType(width, height);
- }
-}
-
-ImageType VCMQmMethod::FindClosestImageType(uint16_t width, uint16_t height) {
- float size = static_cast<float>(width * height);
- float min = size;
- int isel = 0;
- for (int i = 0; i < kNumImageTypes; ++i) {
- float dist = fabs(size - kSizeOfImageType[i]);
- if (dist < min) {
- min = dist;
- isel = i;
- }
- }
- return static_cast<ImageType>(isel);
-}
-
-FrameRateLevelClass VCMQmMethod::FrameRateLevel(float avg_framerate) {
- if (avg_framerate <= kLowFrameRate) {
- return kFrameRateLow;
- } else if (avg_framerate <= kMiddleFrameRate) {
- return kFrameRateMiddle1;
- } else if (avg_framerate <= kHighFrameRate) {
- return kFrameRateMiddle2;
- } else {
- return kFrameRateHigh;
- }
-}
-
-// RESOLUTION CLASS
-
-VCMQmResolution::VCMQmResolution() : qm_(new VCMResolutionScale()) {
- Reset();
-}
-
-VCMQmResolution::~VCMQmResolution() {
- delete qm_;
-}
-
-void VCMQmResolution::ResetRates() {
- sum_target_rate_ = 0.0f;
- sum_incoming_framerate_ = 0.0f;
- sum_rate_MM_ = 0.0f;
- sum_rate_MM_sgn_ = 0.0f;
- sum_packet_loss_ = 0.0f;
- buffer_level_ = kInitBufferLevel * target_bitrate_;
- frame_cnt_ = 0;
- frame_cnt_delta_ = 0;
- low_buffer_cnt_ = 0;
- update_rate_cnt_ = 0;
-}
-
-void VCMQmResolution::ResetDownSamplingState() {
- state_dec_factor_spatial_ = 1.0;
- state_dec_factor_temporal_ = 1.0;
- for (int i = 0; i < kDownActionHistorySize; i++) {
- down_action_history_[i].spatial = kNoChangeSpatial;
- down_action_history_[i].temporal = kNoChangeTemporal;
- }
-}
-
-void VCMQmResolution::Reset() {
- target_bitrate_ = 0.0f;
- incoming_framerate_ = 0.0f;
- buffer_level_ = 0.0f;
- per_frame_bandwidth_ = 0.0f;
- avg_target_rate_ = 0.0f;
- avg_incoming_framerate_ = 0.0f;
- avg_ratio_buffer_low_ = 0.0f;
- avg_rate_mismatch_ = 0.0f;
- avg_rate_mismatch_sgn_ = 0.0f;
- avg_packet_loss_ = 0.0f;
- encoder_state_ = kStableEncoding;
- num_layers_ = 1;
- ResetRates();
- ResetDownSamplingState();
- ResetQM();
-}
-
-EncoderState VCMQmResolution::GetEncoderState() {
- return encoder_state_;
-}
-
-// Initialize state after re-initializing the encoder,
-// i.e., after SetEncodingData() in mediaOpt.
-int VCMQmResolution::Initialize(float bitrate,
- float user_framerate,
- uint16_t width,
- uint16_t height,
- int num_layers) {
- if (user_framerate == 0.0f || width == 0 || height == 0) {
- return VCM_PARAMETER_ERROR;
- }
- Reset();
- target_bitrate_ = bitrate;
- incoming_framerate_ = user_framerate;
- UpdateCodecParameters(user_framerate, width, height);
- native_width_ = width;
- native_height_ = height;
- native_frame_rate_ = user_framerate;
- num_layers_ = num_layers;
- // Initial buffer level.
- buffer_level_ = kInitBufferLevel * target_bitrate_;
- // Per-frame bandwidth.
- per_frame_bandwidth_ = target_bitrate_ / user_framerate;
- init_ = true;
- return VCM_OK;
-}
-
-void VCMQmResolution::UpdateCodecParameters(float frame_rate,
- uint16_t width,
- uint16_t height) {
- width_ = width;
- height_ = height;
- // |user_frame_rate| is the target frame rate for VPM frame dropper.
- user_frame_rate_ = frame_rate;
- image_type_ = GetImageType(width, height);
-}
-
-// Update rate data after every encoded frame.
-void VCMQmResolution::UpdateEncodedSize(size_t encoded_size) {
- frame_cnt_++;
- // Convert to Kbps.
- float encoded_size_kbits = 8.0f * static_cast<float>(encoded_size) / 1000.0f;
-
- // Update the buffer level:
- // Note this is not the actual encoder buffer level.
- // |buffer_level_| is reset to an initial value after SelectResolution is
- // called, and does not account for frame dropping by encoder or VCM.
- buffer_level_ += per_frame_bandwidth_ - encoded_size_kbits;
-
- // Counter for occurrences of low buffer level:
- // low/negative values means encoder is likely dropping frames.
- if (buffer_level_ <= kPercBufferThr * kInitBufferLevel * target_bitrate_) {
- low_buffer_cnt_++;
- }
-}
-
-// Update various quantities after SetTargetRates in MediaOpt.
-void VCMQmResolution::UpdateRates(float target_bitrate,
- float encoder_sent_rate,
- float incoming_framerate,
- uint8_t packet_loss) {
- // Sum the target bitrate: this is the encoder rate from previous update
- // (~1sec), i.e, before the update for next ~1sec.
- sum_target_rate_ += target_bitrate_;
- update_rate_cnt_++;
-
- // Sum the received (from RTCP reports) packet loss rates.
- sum_packet_loss_ += static_cast<float>(packet_loss / 255.0);
-
- // Sum the sequence rate mismatch:
- // Mismatch here is based on the difference between the target rate
- // used (in previous ~1sec) and the average actual encoding rate measured
- // at previous ~1sec.
- float diff = target_bitrate_ - encoder_sent_rate;
- if (target_bitrate_ > 0.0)
- sum_rate_MM_ += fabs(diff) / target_bitrate_;
- int sgnDiff = diff > 0 ? 1 : (diff < 0 ? -1 : 0);
- // To check for consistent under(+)/over_shooting(-) of target rate.
- sum_rate_MM_sgn_ += sgnDiff;
-
- // Update with the current new target and frame rate:
- // these values are ones the encoder will use for the current/next ~1sec.
- target_bitrate_ = target_bitrate;
- incoming_framerate_ = incoming_framerate;
- sum_incoming_framerate_ += incoming_framerate_;
- // Update the per_frame_bandwidth:
- // this is the per_frame_bw for the current/next ~1sec.
- per_frame_bandwidth_ = 0.0f;
- if (incoming_framerate_ > 0.0f) {
- per_frame_bandwidth_ = target_bitrate_ / incoming_framerate_;
- }
-}
-
-// Select the resolution factors: frame size and frame rate change (qm scales).
-// Selection is for going down in resolution, or for going back up
-// (if a previous down-sampling action was taken).
-
-// In the current version the following constraints are imposed:
-// 1) We only allow for one action, either down or up, at a given time.
-// 2) The possible down-sampling actions are: spatial by 1/2x1/2, 3/4x3/4;
-// temporal/frame rate reduction by 1/2 and 2/3.
-// 3) The action for going back up is the reverse of last (spatial or temporal)
-// down-sampling action. The list of down-sampling actions from the
-// Initialize() state are kept in |down_action_history_|.
-// 4) The total amount of down-sampling (spatial and/or temporal) from the
-// Initialize() state (native resolution) is limited by various factors.
-int VCMQmResolution::SelectResolution(VCMResolutionScale** qm) {
- if (!init_) {
- return VCM_UNINITIALIZED;
- }
- if (content_metrics_ == NULL) {
- Reset();
- *qm = qm_;
- return VCM_OK;
- }
-
- // Check conditions on down-sampling state.
- assert(state_dec_factor_spatial_ >= 1.0f);
- assert(state_dec_factor_temporal_ >= 1.0f);
- assert(state_dec_factor_spatial_ <= kMaxSpatialDown);
- assert(state_dec_factor_temporal_ <= kMaxTempDown);
- assert(state_dec_factor_temporal_ * state_dec_factor_spatial_ <=
- kMaxTotalDown);
-
- // Compute content class for selection.
- content_class_ = ComputeContentClass();
- // Compute various rate quantities for selection.
- ComputeRatesForSelection();
-
- // Get the encoder state.
- ComputeEncoderState();
-
- // Default settings: no action.
- SetDefaultAction();
- *qm = qm_;
-
- // Check for going back up in resolution, if we have had some down-sampling
- // relative to native state in Initialize().
- if (down_action_history_[0].spatial != kNoChangeSpatial ||
- down_action_history_[0].temporal != kNoChangeTemporal) {
- if (GoingUpResolution()) {
- *qm = qm_;
- return VCM_OK;
- }
- }
-
- // Check for going down in resolution.
- if (GoingDownResolution()) {
- *qm = qm_;
- return VCM_OK;
- }
- return VCM_OK;
-}
-
-void VCMQmResolution::SetDefaultAction() {
- qm_->codec_width = width_;
- qm_->codec_height = height_;
- qm_->frame_rate = user_frame_rate_;
- qm_->change_resolution_spatial = false;
- qm_->change_resolution_temporal = false;
- qm_->spatial_width_fact = 1.0f;
- qm_->spatial_height_fact = 1.0f;
- qm_->temporal_fact = 1.0f;
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kNoChangeTemporal;
-}
-
-void VCMQmResolution::ComputeRatesForSelection() {
- avg_target_rate_ = 0.0f;
- avg_incoming_framerate_ = 0.0f;
- avg_ratio_buffer_low_ = 0.0f;
- avg_rate_mismatch_ = 0.0f;
- avg_rate_mismatch_sgn_ = 0.0f;
- avg_packet_loss_ = 0.0f;
- if (frame_cnt_ > 0) {
- avg_ratio_buffer_low_ =
- static_cast<float>(low_buffer_cnt_) / static_cast<float>(frame_cnt_);
- }
- if (update_rate_cnt_ > 0) {
- avg_rate_mismatch_ =
- static_cast<float>(sum_rate_MM_) / static_cast<float>(update_rate_cnt_);
- avg_rate_mismatch_sgn_ = static_cast<float>(sum_rate_MM_sgn_) /
- static_cast<float>(update_rate_cnt_);
- avg_target_rate_ = static_cast<float>(sum_target_rate_) /
- static_cast<float>(update_rate_cnt_);
- avg_incoming_framerate_ = static_cast<float>(sum_incoming_framerate_) /
- static_cast<float>(update_rate_cnt_);
- avg_packet_loss_ = static_cast<float>(sum_packet_loss_) /
- static_cast<float>(update_rate_cnt_);
- }
- // For selection we may want to weight some quantities more heavily
- // with the current (i.e., next ~1sec) rate values.
- avg_target_rate_ =
- kWeightRate * avg_target_rate_ + (1.0 - kWeightRate) * target_bitrate_;
- avg_incoming_framerate_ = kWeightRate * avg_incoming_framerate_ +
- (1.0 - kWeightRate) * incoming_framerate_;
- // Use base layer frame rate for temporal layers: this will favor spatial.
- assert(num_layers_ > 0);
- framerate_level_ = FrameRateLevel(avg_incoming_framerate_ /
- static_cast<float>(1 << (num_layers_ - 1)));
-}
-
-void VCMQmResolution::ComputeEncoderState() {
- // Default.
- encoder_state_ = kStableEncoding;
-
- // Assign stressed state if:
- // 1) occurrences of low buffer levels is high, or
- // 2) rate mis-match is high, and consistent over-shooting by encoder.
- if ((avg_ratio_buffer_low_ > kMaxBufferLow) ||
- ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
- (avg_rate_mismatch_sgn_ < -kRateOverShoot))) {
- encoder_state_ = kStressedEncoding;
- }
- // Assign easy state if:
- // 1) rate mis-match is high, and
- // 2) consistent under-shooting by encoder.
- if ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
- (avg_rate_mismatch_sgn_ > kRateUnderShoot)) {
- encoder_state_ = kEasyEncoding;
- }
-}
-
-bool VCMQmResolution::GoingUpResolution() {
- // For going up, we check for undoing the previous down-sampling action.
-
- float fac_width = kFactorWidthSpatial[down_action_history_[0].spatial];
- float fac_height = kFactorHeightSpatial[down_action_history_[0].spatial];
- float fac_temp = kFactorTemporal[down_action_history_[0].temporal];
- // For going up spatially, we allow for going up by 3/4x3/4 at each stage.
- // So if the last spatial action was 1/2x1/2 it would be undone in 2 stages.
- // Modify the fac_width/height for this case.
- if (down_action_history_[0].spatial == kOneQuarterSpatialUniform) {
- fac_width = kFactorWidthSpatial[kOneQuarterSpatialUniform] /
- kFactorWidthSpatial[kOneHalfSpatialUniform];
- fac_height = kFactorHeightSpatial[kOneQuarterSpatialUniform] /
- kFactorHeightSpatial[kOneHalfSpatialUniform];
- }
-
- // Check if we should go up both spatially and temporally.
- if (down_action_history_[0].spatial != kNoChangeSpatial &&
- down_action_history_[0].temporal != kNoChangeTemporal) {
- if (ConditionForGoingUp(fac_width, fac_height, fac_temp,
- kTransRateScaleUpSpatialTemp)) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = down_action_history_[0].temporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- }
- }
- // Check if we should go up either spatially or temporally.
- bool selected_up_spatial = false;
- bool selected_up_temporal = false;
- if (down_action_history_[0].spatial != kNoChangeSpatial) {
- selected_up_spatial = ConditionForGoingUp(fac_width, fac_height, 1.0f,
- kTransRateScaleUpSpatial);
- }
- if (down_action_history_[0].temporal != kNoChangeTemporal) {
- selected_up_temporal =
- ConditionForGoingUp(1.0f, 1.0f, fac_temp, kTransRateScaleUpTemp);
- }
- if (selected_up_spatial && !selected_up_temporal) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = kNoChangeTemporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- } else if (!selected_up_spatial && selected_up_temporal) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = down_action_history_[0].temporal;
- UpdateDownsamplingState(kUpResolution);
- return true;
- } else if (selected_up_spatial && selected_up_temporal) {
- PickSpatialOrTemporal();
- UpdateDownsamplingState(kUpResolution);
- return true;
- }
- return false;
-}
-
-bool VCMQmResolution::ConditionForGoingUp(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac) {
- float estimated_transition_rate_up =
- GetTransitionRate(fac_width, fac_height, fac_temp, scale_fac);
- // Go back up if:
- // 1) target rate is above threshold and current encoder state is stable, or
- // 2) encoder state is easy (encoder is significantly under-shooting target).
- if (((avg_target_rate_ > estimated_transition_rate_up) &&
- (encoder_state_ == kStableEncoding)) ||
- (encoder_state_ == kEasyEncoding)) {
- return true;
- } else {
- return false;
- }
-}
-
-bool VCMQmResolution::GoingDownResolution() {
- float estimated_transition_rate_down =
- GetTransitionRate(1.0f, 1.0f, 1.0f, 1.0f);
- float max_rate = kFrameRateFac[framerate_level_] * kMaxRateQm[image_type_];
- // Resolution reduction if:
- // (1) target rate is below transition rate, or
- // (2) encoder is in stressed state and target rate below a max threshold.
- if ((avg_target_rate_ < estimated_transition_rate_down) ||
- (encoder_state_ == kStressedEncoding && avg_target_rate_ < max_rate)) {
- // Get the down-sampling action: based on content class, and how low
- // average target rate is relative to transition rate.
- uint8_t spatial_fact =
- kSpatialAction[content_class_ +
- 9 * RateClass(estimated_transition_rate_down)];
- uint8_t temp_fact =
- kTemporalAction[content_class_ +
- 9 * RateClass(estimated_transition_rate_down)];
-
- switch (spatial_fact) {
- case 4: {
- action_.spatial = kOneQuarterSpatialUniform;
- break;
- }
- case 2: {
- action_.spatial = kOneHalfSpatialUniform;
- break;
- }
- case 1: {
- action_.spatial = kNoChangeSpatial;
- break;
- }
- default: { assert(false); }
- }
- switch (temp_fact) {
- case 3: {
- action_.temporal = kTwoThirdsTemporal;
- break;
- }
- case 2: {
- action_.temporal = kOneHalfTemporal;
- break;
- }
- case 1: {
- action_.temporal = kNoChangeTemporal;
- break;
- }
- default: { assert(false); }
- }
- // Only allow for one action (spatial or temporal) at a given time.
- assert(action_.temporal == kNoChangeTemporal ||
- action_.spatial == kNoChangeSpatial);
-
- // Adjust cases not captured in tables, mainly based on frame rate, and
- // also check for odd frame sizes.
- AdjustAction();
-
- // Update down-sampling state.
- if (action_.spatial != kNoChangeSpatial ||
- action_.temporal != kNoChangeTemporal) {
- UpdateDownsamplingState(kDownResolution);
- return true;
- }
- }
- return false;
-}
-
-float VCMQmResolution::GetTransitionRate(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac) {
- ImageType image_type =
- GetImageType(static_cast<uint16_t>(fac_width * width_),
- static_cast<uint16_t>(fac_height * height_));
-
- FrameRateLevelClass framerate_level =
- FrameRateLevel(fac_temp * avg_incoming_framerate_);
- // If we are checking for going up temporally, and this is the last
- // temporal action, then use native frame rate.
- if (down_action_history_[1].temporal == kNoChangeTemporal &&
- fac_temp > 1.0f) {
- framerate_level = FrameRateLevel(native_frame_rate_);
- }
-
- // The maximum allowed rate below which down-sampling is allowed:
- // Nominal values based on image format (frame size and frame rate).
- float max_rate = kFrameRateFac[framerate_level] * kMaxRateQm[image_type];
-
- uint8_t image_class = image_type > kVGA ? 1 : 0;
- uint8_t table_index = image_class * 9 + content_class_;
- // Scale factor for down-sampling transition threshold:
- // factor based on the content class and the image size.
- float scaleTransRate = kScaleTransRateQm[table_index];
- // Threshold bitrate for resolution action.
- return static_cast<float>(scale_fac * scaleTransRate * max_rate);
-}
-
-void VCMQmResolution::UpdateDownsamplingState(UpDownAction up_down) {
- if (up_down == kUpResolution) {
- qm_->spatial_width_fact = 1.0f / kFactorWidthSpatial[action_.spatial];
- qm_->spatial_height_fact = 1.0f / kFactorHeightSpatial[action_.spatial];
- // If last spatial action was 1/2x1/2, we undo it in two steps, so the
- // spatial scale factor in this first step is modified as (4.0/3.0 / 2.0).
- if (action_.spatial == kOneQuarterSpatialUniform) {
- qm_->spatial_width_fact = 1.0f *
- kFactorWidthSpatial[kOneHalfSpatialUniform] /
- kFactorWidthSpatial[kOneQuarterSpatialUniform];
- qm_->spatial_height_fact =
- 1.0f * kFactorHeightSpatial[kOneHalfSpatialUniform] /
- kFactorHeightSpatial[kOneQuarterSpatialUniform];
- }
- qm_->temporal_fact = 1.0f / kFactorTemporal[action_.temporal];
- RemoveLastDownAction();
- } else if (up_down == kDownResolution) {
- ConstrainAmountOfDownSampling();
- ConvertSpatialFractionalToWhole();
- qm_->spatial_width_fact = kFactorWidthSpatial[action_.spatial];
- qm_->spatial_height_fact = kFactorHeightSpatial[action_.spatial];
- qm_->temporal_fact = kFactorTemporal[action_.temporal];
- InsertLatestDownAction();
- } else {
- // This function should only be called if either the Up or Down action
- // has been selected.
- assert(false);
- }
- UpdateCodecResolution();
- state_dec_factor_spatial_ = state_dec_factor_spatial_ *
- qm_->spatial_width_fact *
- qm_->spatial_height_fact;
- state_dec_factor_temporal_ = state_dec_factor_temporal_ * qm_->temporal_fact;
-}
-
-void VCMQmResolution::UpdateCodecResolution() {
- if (action_.spatial != kNoChangeSpatial) {
- qm_->change_resolution_spatial = true;
- qm_->codec_width =
- static_cast<uint16_t>(width_ / qm_->spatial_width_fact + 0.5f);
- qm_->codec_height =
- static_cast<uint16_t>(height_ / qm_->spatial_height_fact + 0.5f);
- // Size should not exceed native sizes.
- assert(qm_->codec_width <= native_width_);
- assert(qm_->codec_height <= native_height_);
- // New sizes should be multiple of 2, otherwise spatial should not have
- // been selected.
- assert(qm_->codec_width % 2 == 0);
- assert(qm_->codec_height % 2 == 0);
- }
- if (action_.temporal != kNoChangeTemporal) {
- qm_->change_resolution_temporal = true;
- // Update the frame rate based on the average incoming frame rate.
- qm_->frame_rate = avg_incoming_framerate_ / qm_->temporal_fact + 0.5f;
- if (down_action_history_[0].temporal == 0) {
- // When we undo the last temporal-down action, make sure we go back up
- // to the native frame rate. Since the incoming frame rate may
- // fluctuate over time, |avg_incoming_framerate_| scaled back up may
- // be smaller than |native_frame rate_|.
- qm_->frame_rate = native_frame_rate_;
- }
- }
-}
-
-uint8_t VCMQmResolution::RateClass(float transition_rate) {
- return avg_target_rate_ < (kFacLowRate * transition_rate)
- ? 0
- : (avg_target_rate_ >= transition_rate ? 2 : 1);
-}
-
-// TODO(marpan): Would be better to capture these frame rate adjustments by
-// extending the table data (qm_select_data.h).
-void VCMQmResolution::AdjustAction() {
- // If the spatial level is default state (neither low or high), motion level
- // is not high, and spatial action was selected, switch to 2/3 frame rate
- // reduction if the average incoming frame rate is high.
- if (spatial_.level == kDefault && motion_.level != kHigh &&
- action_.spatial != kNoChangeSpatial &&
- framerate_level_ == kFrameRateHigh) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kTwoThirdsTemporal;
- }
- // If both motion and spatial level are low, and temporal down action was
- // selected, switch to spatial 3/4x3/4 if the frame rate is not above the
- // lower middle level (|kFrameRateMiddle1|).
- if (motion_.level == kLow && spatial_.level == kLow &&
- framerate_level_ <= kFrameRateMiddle1 &&
- action_.temporal != kNoChangeTemporal) {
- action_.spatial = kOneHalfSpatialUniform;
- action_.temporal = kNoChangeTemporal;
- }
- // If spatial action is selected, and there has been too much spatial
- // reduction already (i.e., 1/4), then switch to temporal action if the
- // average frame rate is not low.
- if (action_.spatial != kNoChangeSpatial &&
- down_action_history_[0].spatial == kOneQuarterSpatialUniform &&
- framerate_level_ != kFrameRateLow) {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = kTwoThirdsTemporal;
- }
- // Never use temporal action if number of temporal layers is above 2.
- if (num_layers_ > 2) {
- if (action_.temporal != kNoChangeTemporal) {
- action_.spatial = kOneHalfSpatialUniform;
- }
- action_.temporal = kNoChangeTemporal;
- }
- // If spatial action was selected, we need to make sure the frame sizes
- // are multiples of two. Otherwise switch to 2/3 temporal.
- if (action_.spatial != kNoChangeSpatial && !EvenFrameSize()) {
- action_.spatial = kNoChangeSpatial;
- // Only one action (spatial or temporal) is allowed at a given time, so need
- // to check whether temporal action is currently selected.
- action_.temporal = kTwoThirdsTemporal;
- }
-}
-
-void VCMQmResolution::ConvertSpatialFractionalToWhole() {
- // If 3/4 spatial is selected, check if there has been another 3/4,
- // and if so, combine them into 1/2. 1/2 scaling is more efficient than 9/16.
- // Note we define 3/4x3/4 spatial as kOneHalfSpatialUniform.
- if (action_.spatial == kOneHalfSpatialUniform) {
- bool found = false;
- int isel = kDownActionHistorySize;
- for (int i = 0; i < kDownActionHistorySize; ++i) {
- if (down_action_history_[i].spatial == kOneHalfSpatialUniform) {
- isel = i;
- found = true;
- break;
- }
- }
- if (found) {
- action_.spatial = kOneQuarterSpatialUniform;
- state_dec_factor_spatial_ =
- state_dec_factor_spatial_ /
- (kFactorWidthSpatial[kOneHalfSpatialUniform] *
- kFactorHeightSpatial[kOneHalfSpatialUniform]);
- // Check if switching to 1/2x1/2 (=1/4) spatial is allowed.
- ConstrainAmountOfDownSampling();
- if (action_.spatial == kNoChangeSpatial) {
- // Not allowed. Go back to 3/4x3/4 spatial.
- action_.spatial = kOneHalfSpatialUniform;
- state_dec_factor_spatial_ =
- state_dec_factor_spatial_ *
- kFactorWidthSpatial[kOneHalfSpatialUniform] *
- kFactorHeightSpatial[kOneHalfSpatialUniform];
- } else {
- // Switching is allowed. Remove 3/4x3/4 from the history, and update
- // the frame size.
- for (int i = isel; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
- }
- width_ = width_ * kFactorWidthSpatial[kOneHalfSpatialUniform];
- height_ = height_ * kFactorHeightSpatial[kOneHalfSpatialUniform];
- }
- }
- }
-}
-
-// Returns false if the new frame sizes, under the current spatial action,
-// are not multiples of two.
-bool VCMQmResolution::EvenFrameSize() {
- if (action_.spatial == kOneHalfSpatialUniform) {
- if ((width_ * 3 / 4) % 2 != 0 || (height_ * 3 / 4) % 2 != 0) {
- return false;
- }
- } else if (action_.spatial == kOneQuarterSpatialUniform) {
- if ((width_ * 1 / 2) % 2 != 0 || (height_ * 1 / 2) % 2 != 0) {
- return false;
- }
- }
- return true;
-}
-
-void VCMQmResolution::InsertLatestDownAction() {
- if (action_.spatial != kNoChangeSpatial) {
- for (int i = kDownActionHistorySize - 1; i > 0; --i) {
- down_action_history_[i].spatial = down_action_history_[i - 1].spatial;
- }
- down_action_history_[0].spatial = action_.spatial;
- }
- if (action_.temporal != kNoChangeTemporal) {
- for (int i = kDownActionHistorySize - 1; i > 0; --i) {
- down_action_history_[i].temporal = down_action_history_[i - 1].temporal;
- }
- down_action_history_[0].temporal = action_.temporal;
- }
-}
-
-void VCMQmResolution::RemoveLastDownAction() {
- if (action_.spatial != kNoChangeSpatial) {
- // If the last spatial action was 1/2x1/2 we replace it with 3/4x3/4.
- if (action_.spatial == kOneQuarterSpatialUniform) {
- down_action_history_[0].spatial = kOneHalfSpatialUniform;
- } else {
- for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
- }
- down_action_history_[kDownActionHistorySize - 1].spatial =
- kNoChangeSpatial;
- }
- }
- if (action_.temporal != kNoChangeTemporal) {
- for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
- down_action_history_[i].temporal = down_action_history_[i + 1].temporal;
- }
- down_action_history_[kDownActionHistorySize - 1].temporal =
- kNoChangeTemporal;
- }
-}
-
-void VCMQmResolution::ConstrainAmountOfDownSampling() {
- // Sanity checks on down-sampling selection:
- // override the settings for too small image size and/or frame rate.
- // Also check the limit on current down-sampling states.
-
- float spatial_width_fact = kFactorWidthSpatial[action_.spatial];
- float spatial_height_fact = kFactorHeightSpatial[action_.spatial];
- float temporal_fact = kFactorTemporal[action_.temporal];
- float new_dec_factor_spatial =
- state_dec_factor_spatial_ * spatial_width_fact * spatial_height_fact;
- float new_dec_factor_temp = state_dec_factor_temporal_ * temporal_fact;
-
- // No spatial sampling if current frame size is too small, or if the
- // amount of spatial down-sampling is above maximum spatial down-action.
- if ((width_ * height_) <= kMinImageSize ||
- new_dec_factor_spatial > kMaxSpatialDown) {
- action_.spatial = kNoChangeSpatial;
- new_dec_factor_spatial = state_dec_factor_spatial_;
- }
- // No frame rate reduction if average frame rate is below some point, or if
- // the amount of temporal down-sampling is above maximum temporal down-action.
- if (avg_incoming_framerate_ <= kMinFrameRate ||
- new_dec_factor_temp > kMaxTempDown) {
- action_.temporal = kNoChangeTemporal;
- new_dec_factor_temp = state_dec_factor_temporal_;
- }
- // Check if the total (spatial-temporal) down-action is above maximum allowed,
- // if so, disallow the current selected down-action.
- if (new_dec_factor_spatial * new_dec_factor_temp > kMaxTotalDown) {
- if (action_.spatial != kNoChangeSpatial) {
- action_.spatial = kNoChangeSpatial;
- } else if (action_.temporal != kNoChangeTemporal) {
- action_.temporal = kNoChangeTemporal;
- } else {
- // We only allow for one action (spatial or temporal) at a given time, so
- // either spatial or temporal action is selected when this function is
- // called. If the selected action is disallowed from one of the above
- // 2 prior conditions (on spatial & temporal max down-action), then this
- // condition "total down-action > |kMaxTotalDown|" would not be entered.
- assert(false);
- }
- }
-}
-
-void VCMQmResolution::PickSpatialOrTemporal() {
- // Pick the one that has had the most down-sampling thus far.
- if (state_dec_factor_spatial_ > state_dec_factor_temporal_) {
- action_.spatial = down_action_history_[0].spatial;
- action_.temporal = kNoChangeTemporal;
- } else {
- action_.spatial = kNoChangeSpatial;
- action_.temporal = down_action_history_[0].temporal;
- }
-}
-
-// TODO(marpan): Update when we allow for directional spatial down-sampling.
-void VCMQmResolution::SelectSpatialDirectionMode(float transition_rate) {
- // Default is 4/3x4/3
- // For bit rates well below transitional rate, we select 2x2.
- if (avg_target_rate_ < transition_rate * kRateRedSpatial2X2) {
- qm_->spatial_width_fact = 2.0f;
- qm_->spatial_height_fact = 2.0f;
- }
- // Otherwise check prediction errors and aspect ratio.
- float spatial_err = 0.0f;
- float spatial_err_h = 0.0f;
- float spatial_err_v = 0.0f;
- if (content_metrics_) {
- spatial_err = content_metrics_->spatial_pred_err;
- spatial_err_h = content_metrics_->spatial_pred_err_h;
- spatial_err_v = content_metrics_->spatial_pred_err_v;
- }
-
- // Favor 1x2 if aspect_ratio is 16:9.
- if (aspect_ratio_ >= 16.0f / 9.0f) {
- // Check if 1x2 has lowest prediction error.
- if (spatial_err_h < spatial_err && spatial_err_h < spatial_err_v) {
- qm_->spatial_width_fact = 2.0f;
- qm_->spatial_height_fact = 1.0f;
- }
- }
- // Check for 4/3x4/3 selection: favor 2x2 over 1x2 and 2x1.
- if (spatial_err < spatial_err_h * (1.0f + kSpatialErr2x2VsHoriz) &&
- spatial_err < spatial_err_v * (1.0f + kSpatialErr2X2VsVert)) {
- qm_->spatial_width_fact = 4.0f / 3.0f;
- qm_->spatial_height_fact = 4.0f / 3.0f;
- }
- // Check for 2x1 selection.
- if (spatial_err_v < spatial_err_h * (1.0f - kSpatialErrVertVsHoriz) &&
- spatial_err_v < spatial_err * (1.0f - kSpatialErr2X2VsVert)) {
- qm_->spatial_width_fact = 1.0f;
- qm_->spatial_height_fact = 2.0f;
- }
-}
-
-// ROBUSTNESS CLASS
-
-VCMQmRobustness::VCMQmRobustness() {
- Reset();
-}
-
-VCMQmRobustness::~VCMQmRobustness() {}
-
-void VCMQmRobustness::Reset() {
- prev_total_rate_ = 0.0f;
- prev_rtt_time_ = 0;
- prev_packet_loss_ = 0;
- prev_code_rate_delta_ = 0;
- ResetQM();
-}
-
-// Adjust the FEC rate based on the content and the network state
-// (packet loss rate, total rate/bandwidth, round trip time).
-// Note that packetLoss here is the filtered loss value.
-float VCMQmRobustness::AdjustFecFactor(uint8_t code_rate_delta,
- float total_rate,
- float framerate,
- int64_t rtt_time,
- uint8_t packet_loss) {
- // Default: no adjustment
- float adjust_fec = 1.0f;
- if (content_metrics_ == NULL) {
- return adjust_fec;
- }
- // Compute class state of the content.
- ComputeMotionNFD();
- ComputeSpatial();
-
- // TODO(marpan): Set FEC adjustment factor.
-
- // Keep track of previous values of network state:
- // adjustment may be also based on pattern of changes in network state.
- prev_total_rate_ = total_rate;
- prev_rtt_time_ = rtt_time;
- prev_packet_loss_ = packet_loss;
- prev_code_rate_delta_ = code_rate_delta;
- return adjust_fec;
-}
-
-// Set the UEP (unequal-protection across packets) on/off for the FEC.
-bool VCMQmRobustness::SetUepProtection(uint8_t code_rate_delta,
- float total_rate,
- uint8_t packet_loss,
- bool frame_type) {
- // Default.
- return false;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/qm_select.h b/chromium/third_party/webrtc/modules/video_coding/qm_select.h
deleted file mode 100644
index 764b5ed8e37..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/qm_select.h
+++ /dev/null
@@ -1,356 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
-#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/typedefs.h"
-
-/******************************************************/
-/* Quality Modes: Resolution and Robustness settings */
-/******************************************************/
-
-namespace webrtc {
-struct VideoContentMetrics;
-
-struct VCMResolutionScale {
- VCMResolutionScale()
- : codec_width(640),
- codec_height(480),
- frame_rate(30.0f),
- spatial_width_fact(1.0f),
- spatial_height_fact(1.0f),
- temporal_fact(1.0f),
- change_resolution_spatial(false),
- change_resolution_temporal(false) {}
- uint16_t codec_width;
- uint16_t codec_height;
- float frame_rate;
- float spatial_width_fact;
- float spatial_height_fact;
- float temporal_fact;
- bool change_resolution_spatial;
- bool change_resolution_temporal;
-};
-
-enum ImageType {
- kQCIF = 0, // 176x144
- kHCIF, // 264x216 = half(~3/4x3/4) CIF.
- kQVGA, // 320x240 = quarter VGA.
- kCIF, // 352x288
- kHVGA, // 480x360 = half(~3/4x3/4) VGA.
- kVGA, // 640x480
- kQFULLHD, // 960x540 = quarter FULLHD, and half(~3/4x3/4) WHD.
- kWHD, // 1280x720
- kFULLHD, // 1920x1080
- kNumImageTypes
-};
-
-const uint32_t kSizeOfImageType[kNumImageTypes] = {
- 25344, 57024, 76800, 101376, 172800, 307200, 518400, 921600, 2073600};
-
-enum FrameRateLevelClass {
- kFrameRateLow,
- kFrameRateMiddle1,
- kFrameRateMiddle2,
- kFrameRateHigh
-};
-
-enum ContentLevelClass { kLow, kHigh, kDefault };
-
-struct VCMContFeature {
- VCMContFeature() : value(0.0f), level(kDefault) {}
- void Reset() {
- value = 0.0f;
- level = kDefault;
- }
- float value;
- ContentLevelClass level;
-};
-
-enum UpDownAction { kUpResolution, kDownResolution };
-
-enum SpatialAction {
- kNoChangeSpatial,
- kOneHalfSpatialUniform, // 3/4 x 3/4: 9/6 ~1/2 pixel reduction.
- kOneQuarterSpatialUniform, // 1/2 x 1/2: 1/4 pixel reduction.
- kNumModesSpatial
-};
-
-enum TemporalAction {
- kNoChangeTemporal,
- kTwoThirdsTemporal, // 2/3 frame rate reduction
- kOneHalfTemporal, // 1/2 frame rate reduction
- kNumModesTemporal
-};
-
-struct ResolutionAction {
- ResolutionAction() : spatial(kNoChangeSpatial), temporal(kNoChangeTemporal) {}
- SpatialAction spatial;
- TemporalAction temporal;
-};
-
-// Down-sampling factors for spatial (width and height), and temporal.
-const float kFactorWidthSpatial[kNumModesSpatial] = {1.0f, 4.0f / 3.0f, 2.0f};
-
-const float kFactorHeightSpatial[kNumModesSpatial] = {1.0f, 4.0f / 3.0f, 2.0f};
-
-const float kFactorTemporal[kNumModesTemporal] = {1.0f, 1.5f, 2.0f};
-
-enum EncoderState {
- kStableEncoding, // Low rate mis-match, stable buffer levels.
- kStressedEncoding, // Significant over-shooting of target rate,
- // Buffer under-flow, etc.
- kEasyEncoding // Significant under-shooting of target rate.
-};
-
-// QmMethod class: main class for resolution and robustness settings
-
-class VCMQmMethod {
- public:
- VCMQmMethod();
- virtual ~VCMQmMethod();
-
- // Reset values
- void ResetQM();
- virtual void Reset() = 0;
-
- // Compute content class.
- uint8_t ComputeContentClass();
-
- // Update with the content metrics.
- void UpdateContent(const VideoContentMetrics* content_metrics);
-
- // Compute spatial texture magnitude and level.
- // Spatial texture is a spatial prediction error measure.
- void ComputeSpatial();
-
- // Compute motion magnitude and level for NFD metric.
- // NFD is normalized frame difference (normalized by spatial variance).
- void ComputeMotionNFD();
-
- // Get the imageType (CIF, VGA, HD, etc) for the system width/height.
- ImageType GetImageType(uint16_t width, uint16_t height);
-
- // Return the closest image type.
- ImageType FindClosestImageType(uint16_t width, uint16_t height);
-
- // Get the frame rate level.
- FrameRateLevelClass FrameRateLevel(float frame_rate);
-
- protected:
- // Content Data.
- const VideoContentMetrics* content_metrics_;
-
- // Encoder frame sizes and native frame sizes.
- uint16_t width_;
- uint16_t height_;
- float user_frame_rate_;
- uint16_t native_width_;
- uint16_t native_height_;
- float native_frame_rate_;
- float aspect_ratio_;
- // Image type and frame rate leve, for the current encoder resolution.
- ImageType image_type_;
- FrameRateLevelClass framerate_level_;
- // Content class data.
- VCMContFeature motion_;
- VCMContFeature spatial_;
- uint8_t content_class_;
- bool init_;
-};
-
-// Resolution settings class
-
-class VCMQmResolution : public VCMQmMethod {
- public:
- VCMQmResolution();
- virtual ~VCMQmResolution();
-
- // Reset all quantities.
- virtual void Reset();
-
- // Reset rate quantities and counters after every SelectResolution() call.
- void ResetRates();
-
- // Reset down-sampling state.
- void ResetDownSamplingState();
-
- // Get the encoder state.
- EncoderState GetEncoderState();
-
- // Initialize after SetEncodingData in media_opt.
- int Initialize(float bitrate,
- float user_framerate,
- uint16_t width,
- uint16_t height,
- int num_layers);
-
- // Update the encoder frame size.
- void UpdateCodecParameters(float frame_rate, uint16_t width, uint16_t height);
-
- // Update with actual bit rate (size of the latest encoded frame)
- // and frame type, after every encoded frame.
- void UpdateEncodedSize(size_t encoded_size);
-
- // Update with new target bitrate, actual encoder sent rate, frame_rate,
- // loss rate: every ~1 sec from SetTargetRates in media_opt.
- void UpdateRates(float target_bitrate,
- float encoder_sent_rate,
- float incoming_framerate,
- uint8_t packet_loss);
-
- // Extract ST (spatio-temporal) resolution action.
- // Inputs: qm: Reference to the quality modes pointer.
- // Output: the spatial and/or temporal scale change.
- int SelectResolution(VCMResolutionScale** qm);
-
- private:
- // Set the default resolution action.
- void SetDefaultAction();
-
- // Compute rates for the selection of down-sampling action.
- void ComputeRatesForSelection();
-
- // Compute the encoder state.
- void ComputeEncoderState();
-
- // Return true if the action is to go back up in resolution.
- bool GoingUpResolution();
-
- // Return true if the action is to go down in resolution.
- bool GoingDownResolution();
-
- // Check the condition for going up in resolution by the scale factors:
- // |facWidth|, |facHeight|, |facTemp|.
- // |scaleFac| is a scale factor for the transition rate.
- bool ConditionForGoingUp(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac);
-
- // Get the bitrate threshold for the resolution action.
- // The case |facWidth|=|facHeight|=|facTemp|==1 is for down-sampling action.
- // |scaleFac| is a scale factor for the transition rate.
- float GetTransitionRate(float fac_width,
- float fac_height,
- float fac_temp,
- float scale_fac);
-
- // Update the down-sampling state.
- void UpdateDownsamplingState(UpDownAction up_down);
-
- // Update the codec frame size and frame rate.
- void UpdateCodecResolution();
-
- // Return a state based on average target rate relative transition rate.
- uint8_t RateClass(float transition_rate);
-
- // Adjust the action selected from the table.
- void AdjustAction();
-
- // Covert 2 stages of 3/4 (=9/16) spatial decimation to 1/2.
- void ConvertSpatialFractionalToWhole();
-
- // Returns true if the new frame sizes, under the selected spatial action,
- // are of even size.
- bool EvenFrameSize();
-
- // Insert latest down-sampling action into the history list.
- void InsertLatestDownAction();
-
- // Remove the last (first element) down-sampling action from the list.
- void RemoveLastDownAction();
-
- // Check constraints on the amount of down-sampling allowed.
- void ConstrainAmountOfDownSampling();
-
- // For going up in resolution: pick spatial or temporal action,
- // if both actions were separately selected.
- void PickSpatialOrTemporal();
-
- // Select the directional (1x2 or 2x1) spatial down-sampling action.
- void SelectSpatialDirectionMode(float transition_rate);
-
- enum { kDownActionHistorySize = 10 };
-
- VCMResolutionScale* qm_;
- // Encoder rate control parameters.
- float target_bitrate_;
- float incoming_framerate_;
- float per_frame_bandwidth_;
- float buffer_level_;
-
- // Data accumulated every ~1sec from MediaOpt.
- float sum_target_rate_;
- float sum_incoming_framerate_;
- float sum_rate_MM_;
- float sum_rate_MM_sgn_;
- float sum_packet_loss_;
- // Counters.
- uint32_t frame_cnt_;
- uint32_t frame_cnt_delta_;
- uint32_t update_rate_cnt_;
- uint32_t low_buffer_cnt_;
-
- // Resolution state parameters.
- float state_dec_factor_spatial_;
- float state_dec_factor_temporal_;
-
- // Quantities used for selection.
- float avg_target_rate_;
- float avg_incoming_framerate_;
- float avg_ratio_buffer_low_;
- float avg_rate_mismatch_;
- float avg_rate_mismatch_sgn_;
- float avg_packet_loss_;
- EncoderState encoder_state_;
- ResolutionAction action_;
- // Short history of the down-sampling actions from the Initialize() state.
- // This is needed for going up in resolution. Since the total amount of
- // down-sampling actions are constrained, the length of the list need not be
- // large: i.e., (4/3) ^{kDownActionHistorySize} <= kMaxDownSample.
- ResolutionAction down_action_history_[kDownActionHistorySize];
- int num_layers_;
-};
-
-// Robustness settings class.
-
-class VCMQmRobustness : public VCMQmMethod {
- public:
- VCMQmRobustness();
- ~VCMQmRobustness();
-
- virtual void Reset();
-
- // Adjust FEC rate based on content: every ~1 sec from SetTargetRates.
- // Returns an adjustment factor.
- float AdjustFecFactor(uint8_t code_rate_delta,
- float total_rate,
- float framerate,
- int64_t rtt_time,
- uint8_t packet_loss);
-
- // Set the UEP protection on/off.
- bool SetUepProtection(uint8_t code_rate_delta,
- float total_rate,
- uint8_t packet_loss,
- bool frame_type);
-
- private:
- // Previous state of network parameters.
- float prev_total_rate_;
- int64_t prev_rtt_time_;
- uint8_t prev_packet_loss_;
- uint8_t prev_code_rate_delta_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/qm_select_data.h b/chromium/third_party/webrtc/modules/video_coding/qm_select_data.h
deleted file mode 100644
index 49190ef53b9..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/qm_select_data.h
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
-#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
-
-/***************************************************************
-*QMSelectData.h
-* This file includes parameters for content-aware media optimization
-****************************************************************/
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-//
-// PARAMETERS FOR RESOLUTION ADAPTATION
-//
-
-// Initial level of buffer in secs.
-const float kInitBufferLevel = 0.5f;
-
-// Threshold of (max) buffer size below which we consider too low (underflow).
-const float kPercBufferThr = 0.10f;
-
-// Threshold on the occurrences of low buffer levels.
-const float kMaxBufferLow = 0.30f;
-
-// Threshold on rate mismatch.
-const float kMaxRateMisMatch = 0.5f;
-
-// Threshold on amount of under/over encoder shooting.
-const float kRateOverShoot = 0.75f;
-const float kRateUnderShoot = 0.75f;
-
-// Factor to favor weighting the average rates with the current/last data.
-const float kWeightRate = 0.70f;
-
-// Factor for transitional rate for going back up in resolution.
-const float kTransRateScaleUpSpatial = 1.25f;
-const float kTransRateScaleUpTemp = 1.25f;
-const float kTransRateScaleUpSpatialTemp = 1.25f;
-
-// Threshold on packet loss rate, above which favor resolution reduction.
-const float kPacketLossThr = 0.1f;
-
-// Factor for reducing transitional bitrate under packet loss.
-const float kPacketLossRateFac = 1.0f;
-
-// Maximum possible transitional rate for down-sampling:
-// (units in kbps), for 30fps.
-const uint16_t kMaxRateQm[9] = {
- 0, // QCIF
- 50, // kHCIF
- 125, // kQVGA
- 200, // CIF
- 280, // HVGA
- 400, // VGA
- 700, // QFULLHD
- 1000, // WHD
- 1500 // FULLHD
-};
-
-// Frame rate scale for maximum transition rate.
-const float kFrameRateFac[4] = {
- 0.5f, // Low
- 0.7f, // Middle level 1
- 0.85f, // Middle level 2
- 1.0f, // High
-};
-
-// Scale for transitional rate: based on content class
-// motion=L/H/D,spatial==L/H/D: for low, high, middle levels
-const float kScaleTransRateQm[18] = {
- // VGA and lower
- 0.40f, // L, L
- 0.50f, // L, H
- 0.40f, // L, D
- 0.60f, // H ,L
- 0.60f, // H, H
- 0.60f, // H, D
- 0.50f, // D, L
- 0.50f, // D, D
- 0.50f, // D, H
-
- // over VGA
- 0.40f, // L, L
- 0.50f, // L, H
- 0.40f, // L, D
- 0.60f, // H ,L
- 0.60f, // H, H
- 0.60f, // H, D
- 0.50f, // D, L
- 0.50f, // D, D
- 0.50f, // D, H
-};
-
-// Threshold on the target rate relative to transitional rate.
-const float kFacLowRate = 0.5f;
-
-// Action for down-sampling:
-// motion=L/H/D,spatial==L/H/D, for low, high, middle levels;
-// rate = 0/1/2, for target rate state relative to transition rate.
-const uint8_t kSpatialAction[27] = {
- // rateClass = 0:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 4, // H ,L
- 1, // H, H
- 4, // H, D
- 4, // D, L
- 1, // D, H
- 2, // D, D
-
- // rateClass = 1:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 2, // H ,L
- 1, // H, H
- 2, // H, D
- 2, // D, L
- 1, // D, H
- 2, // D, D
-
- // rateClass = 2:
- 1, // L, L
- 1, // L, H
- 1, // L, D
- 2, // H ,L
- 1, // H, H
- 2, // H, D
- 2, // D, L
- 1, // D, H
- 2, // D, D
-};
-
-const uint8_t kTemporalAction[27] = {
- // rateClass = 0:
- 3, // L, L
- 2, // L, H
- 2, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 2, // D, H
- 1, // D, D
-
- // rateClass = 1:
- 3, // L, L
- 3, // L, H
- 3, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 3, // D, H
- 1, // D, D
-
- // rateClass = 2:
- 1, // L, L
- 3, // L, H
- 3, // L, D
- 1, // H ,L
- 3, // H, H
- 1, // H, D
- 1, // D, L
- 3, // D, H
- 1, // D, D
-};
-
-// Control the total amount of down-sampling allowed.
-const float kMaxSpatialDown = 8.0f;
-const float kMaxTempDown = 3.0f;
-const float kMaxTotalDown = 9.0f;
-
-// Minimum image size for a spatial down-sampling.
-const int kMinImageSize = 176 * 144;
-
-// Minimum frame rate for temporal down-sampling:
-// no frame rate reduction if incomingFrameRate <= MIN_FRAME_RATE.
-const int kMinFrameRate = 8;
-
-//
-// PARAMETERS FOR FEC ADJUSTMENT: TODO (marpan)
-//
-
-//
-// PARAMETETS FOR SETTING LOW/HIGH STATES OF CONTENT METRICS:
-//
-
-// Thresholds for frame rate:
-const int kLowFrameRate = 10;
-const int kMiddleFrameRate = 15;
-const int kHighFrameRate = 25;
-
-// Thresholds for motion: motion level is from NFD.
-const float kHighMotionNfd = 0.075f;
-const float kLowMotionNfd = 0.03f;
-
-// Thresholds for spatial prediction error:
-// this is applied on the average of (2x2,1x2,2x1).
-const float kHighTexture = 0.035f;
-const float kLowTexture = 0.020f;
-
-// Used to reduce thresholds for larger/HD scenes: correction factor since
-// higher correlation in HD scenes means lower spatial prediction error.
-const float kScaleTexture = 0.9f;
-
-// Percentage reduction in transitional bitrate for 2x2 selected over 1x2/2x1.
-const float kRateRedSpatial2X2 = 0.6f;
-
-const float kSpatialErr2x2VsHoriz = 0.1f; // percentage to favor 2x2 over H
-const float kSpatialErr2X2VsVert = 0.1f; // percentage to favor 2x2 over V
-const float kSpatialErrVertVsHoriz = 0.1f; // percentage to favor H over V
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_DATA_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/qm_select_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/qm_select_unittest.cc
deleted file mode 100644
index f8542ec6763..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/qm_select_unittest.cc
+++ /dev/null
@@ -1,1307 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file includes unit tests the QmResolution class
- * In particular, for the selection of spatial and/or temporal down-sampling.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_coding/qm_select.h"
-
-namespace webrtc {
-
-// Representative values of content metrics for: low/high/medium(default) state,
-// based on parameters settings in qm_select_data.h.
-const float kSpatialLow = 0.01f;
-const float kSpatialMedium = 0.03f;
-const float kSpatialHigh = 0.1f;
-const float kTemporalLow = 0.01f;
-const float kTemporalMedium = 0.06f;
-const float kTemporalHigh = 0.1f;
-
-class QmSelectTest : public ::testing::Test {
- protected:
- QmSelectTest()
- : qm_resolution_(new VCMQmResolution()),
- content_metrics_(new VideoContentMetrics()),
- qm_scale_(NULL) {}
- VCMQmResolution* qm_resolution_;
- VideoContentMetrics* content_metrics_;
- VCMResolutionScale* qm_scale_;
-
- void InitQmNativeData(float initial_bit_rate,
- int user_frame_rate,
- int native_width,
- int native_height,
- int num_layers);
-
- void UpdateQmEncodedFrame(size_t* encoded_size, size_t num_updates);
-
- void UpdateQmRateData(int* target_rate,
- int* encoder_sent_rate,
- int* incoming_frame_rate,
- uint8_t* fraction_lost,
- int num_updates);
-
- void UpdateQmContentData(float motion_metric,
- float spatial_metric,
- float spatial_metric_horiz,
- float spatial_metric_vert);
-
- bool IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
- float fac_width,
- float fac_height,
- float fac_temp,
- uint16_t new_width,
- uint16_t new_height,
- float new_frame_rate);
-
- void TearDown() {
- delete qm_resolution_;
- delete content_metrics_;
- }
-};
-
-TEST_F(QmSelectTest, HandleInputs) {
- // Expect parameter error. Initialize with invalid inputs.
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 0, 640, 480, 1));
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 640, 0, 1));
- EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 0, 480, 1));
-
- // Expect uninitialized error.: No valid initialization before selection.
- EXPECT_EQ(-7, qm_resolution_->SelectResolution(&qm_scale_));
-
- VideoContentMetrics* content_metrics = NULL;
- EXPECT_EQ(0, qm_resolution_->Initialize(1000, 30, 640, 480, 1));
- qm_resolution_->UpdateContent(content_metrics);
- // Content metrics are NULL: Expect success and no down-sampling action.
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0, 1.0, 1.0, 640, 480, 30.0f));
-}
-
-// TODO(marpan): Add a test for number of temporal layers > 1.
-
-// No down-sampling action at high rates.
-TEST_F(QmSelectTest, NoActionHighRate) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(800, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {800, 800, 800};
- int encoder_sent_rate[] = {800, 800, 800};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-}
-
-// Rate is well below transition, down-sampling action is taken,
-// depending on the content state.
-TEST_F(QmSelectTest, DownActionLowRate) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, low spatial: 2/3 temporal is expected.
- UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, low spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // High motion, high spatial: 2/3 temporal expected.
- UpdateQmContentData(kTemporalHigh, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(4, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial: 1/2 temporal expected.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, high spatial: 1/2 temporal expected.
- UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
- kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // High motion, medium spatial: 2x2 spatial expected.
- UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- // Target frame rate for frame dropper should be the same as previous == 15.
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, medium spatial: high frame rate, so 1/2 temporal expected.
- UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- qm_resolution_->ResetDownSamplingState();
- // Medium motion, medium spatial: high frame rate, so 2/3 temporal expected.
- UpdateQmContentData(kTemporalMedium, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(8, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-}
-
-// Rate mis-match is high, and we have over-shooting.
-// since target rate is below max for down-sampling, down-sampling is selected.
-TEST_F(QmSelectTest, DownActionHighRateMMOvershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {900, 900, 900};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-}
-
-// Rate mis-match is high, target rate is below max for down-sampling,
-// but since we have consistent under-shooting, no down-sampling action.
-TEST_F(QmSelectTest, NoActionHighRateMMUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {100, 100, 100};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-}
-
-// Buffer is underflowing, and target rate is below max for down-sampling,
-// so action is taken.
-TEST_F(QmSelectTest, DownActionBufferUnderflow) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(300, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update with encoded size over a number of frames.
- // per-frame bandwidth = 15 = 450/30: simulate (decoder) buffer underflow:
- size_t encoded_size[] = {200, 100, 50, 30, 60, 40, 20, 30, 20, 40};
- UpdateQmEncodedFrame(encoded_size, GTEST_ARRAY_SIZE_(encoded_size));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {300, 300, 300};
- int encoder_sent_rate[] = {450, 450, 450};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480, 20.5f));
-}
-
-// Target rate is below max for down-sampling, but buffer level is stable,
-// so no action is taken.
-TEST_F(QmSelectTest, NoActionBufferStable) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(350, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update with encoded size over a number of frames.
- // per-frame bandwidth = 15 = 450/30: simulate stable (decoder) buffer levels.
- size_t encoded_size[] = {40, 10, 10, 16, 18, 20, 17, 20, 16, 15};
- UpdateQmEncodedFrame(encoded_size, GTEST_ARRAY_SIZE_(encoded_size));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {350, 350, 350};
- int encoder_sent_rate[] = {350, 450, 450};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-
- qm_resolution_->ResetDownSamplingState();
- // Low motion, high spatial
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 30.0f));
-}
-
-// Very low rate, but no spatial down-sampling below some size (QCIF).
-TEST_F(QmSelectTest, LimitDownSpatialAction) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(10, 30, 176, 144, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 176;
- uint16_t codec_height = 144;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(0, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {10, 10, 10};
- int encoder_sent_rate[] = {10, 10, 10};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 176, 144, 30.0f));
-}
-
-// Very low rate, but no frame reduction below some frame_rate (8fps).
-TEST_F(QmSelectTest, LimitDownTemporalAction) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(10, 8, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(8.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {10, 10, 10};
- int encoder_sent_rate[] = {10, 10, 10};
- int incoming_frame_rate[] = {8, 8, 8};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, medium spatial.
- UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 8.0f));
-}
-
-// Two stages: spatial down-sample and then back up spatially,
-// as rate as increased.
-TEST_F(QmSelectTest, 2StageDownSpatialUpSpatial) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Reset and go up in rate: expected to go back up, in 2 stages of 3/4.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Two stages: spatial down-sample and then back up spatially, since encoder
-// is under-shooting target even though rate has not increased much.
-TEST_F(QmSelectTest, 2StageDownSpatialUpSpatialUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Reset rates and simulate under-shooting scenario.: expect to go back up.
- // Goes up spatially in two stages for 1/2x1/2 down-sampling.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {200, 200, 200, 200, 200};
- int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Two stages: spatial down-sample and then no action to go up,
-// as encoding rate mis-match is too high.
-TEST_F(QmSelectTest, 2StageDownSpatialNoActionUp) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Reset and simulate large rate mis-match: expect no action to go back up.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 320, 240, 30.0f));
-}
-
-// Two stages: temporally down-sample and then back up temporally,
-// as rate as increased.
-TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporal) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- // Reset rates and go up in rate: expect to go back up.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {400, 400, 400, 400, 400};
- int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480, 30.0f));
-}
-
-// Two stages: temporal down-sample and then back up temporally, since encoder
-// is under-shooting target even though rate has not increased much.
-TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporalUndershoot) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480, 15.5f));
-
- // Reset rates and simulate under-shooting scenario.: expect to go back up.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {150, 150, 150, 150, 150};
- int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480, 30.0f));
-}
-
-// Two stages: temporal down-sample and then no action to go up,
-// as encoding rate mis-match is too high.
-TEST_F(QmSelectTest, 2StageDownTemporalNoActionUp) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(50, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {50, 50, 50};
- int encoder_sent_rate[] = {50, 50, 50};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2, 640, 480, 15.5f));
-
- // Reset and simulate large rate mis-match: expect no action to go back up.
- qm_resolution_->UpdateCodecParameters(15.0f, codec_width, codec_height);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate2[] = {600, 600, 600, 600, 600};
- int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480, 15.0f));
-}
-// 3 stages: spatial down-sample, followed by temporal down-sample,
-// and then go up to full state, as encoding rate has increased.
-TEST_F(QmSelectTest, 3StageDownSpatialTemporlaUpSpatialTemporal) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(80, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {80, 80, 80};
- int encoder_sent_rate[] = {80, 80, 80};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Change content data: expect temporal down-sample.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
-
- // Reset rates and go lower in rate.
- qm_resolution_->ResetRates();
- int target_rate2[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
-
- // Reset rates and go high up in rate: expect to go back up both spatial
- // and temporally. The 1/2x1/2 spatial is undone in two stages.
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate3[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// No down-sampling below some total amount.
-TEST_F(QmSelectTest, NoActionTooMuchDownSampling) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 1280, 720, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 1280;
- uint16_t codec_height = 720;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(7, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 640, 360, 30.0f));
-
- // Reset and lower rates to get another spatial action (3/4x3/4).
- // Lower the frame rate for spatial to be selected again.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(10.0f, 640, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(640, 360));
- // Update rates for a sequence of intervals.
- int target_rate2[] = {70, 70, 70, 70, 70};
- int encoder_sent_rate2[] = {70, 70, 70, 70, 70};
- int incoming_frame_rate2[] = {10, 10, 10, 10, 10};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, medium spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
- kSpatialMedium);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 270, 10.0f));
-
- // Reset and go to very low rate: no action should be taken,
- // we went down too much already.
- qm_resolution_->ResetRates();
- qm_resolution_->UpdateCodecParameters(10.0f, 480, 270);
- EXPECT_EQ(3, qm_resolution_->GetImageType(480, 270));
- // Update rates for a sequence of intervals.
- int target_rate3[] = {10, 10, 10, 10, 10};
- int encoder_sent_rate3[] = {10, 10, 10, 10, 10};
- int incoming_frame_rate3[] = {10, 10, 10, 10, 10};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 480, 270, 10.0f));
-}
-
-// Multiple down-sampling stages and then undo all of them.
-// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
-// followed by spatial 3/4x3/4. Then go up to full state,
-// as encoding rate has increased.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory1) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down spatial 3/4x3/4.
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360, 20.5f));
-
- // Go down 3/4x3/4 spatial:
- qm_resolution_->UpdateCodecParameters(20.0f, 480, 360);
- qm_resolution_->ResetRates();
- int target_rate3[] = {80, 80, 80, 80, 80};
- int encoder_sent_rate3[] = {80, 80, 80, 80, 80};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // High motion, low spatial.
- UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- // The two spatial actions of 3/4x3/4 are converted to 1/2x1/2,
- // so scale factor is 2.0.
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 20.0f));
-
- // Reset rates and go high up in rate: expect to go up:
- // 1/2x1x2 spatial and 1/2 temporally.
-
- // Go up 1/2x1/2 spatially and 1/2 temporally. Spatial is done in 2 stages.
- qm_resolution_->UpdateCodecParameters(15.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate4[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate4[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate4[] = {15, 15, 15, 15, 15};
- uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Multiple down-sampling and up-sample stages, with partial undoing.
-// Spatial down-sample 1/2x1/2, followed by temporal down-sample 2/3, undo the
-// temporal, then another temporal, and then undo both spatial and temporal.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory2) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(80, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 1/2x1/2 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {80, 80, 80};
- int encoder_sent_rate[] = {80, 80, 80};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- int target_rate2[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, high spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
- kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
-
- // Go up 2/3 temporally.
- qm_resolution_->UpdateCodecParameters(20.0f, 320, 240);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {150, 150, 150, 150, 150};
- int encoder_sent_rate3[] = {150, 150, 150, 150, 150};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 320,
- 240, 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
- EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
- qm_resolution_->ResetRates();
- int target_rate4[] = {40, 40, 40, 40, 40};
- int encoder_sent_rate4[] = {40, 40, 40, 40, 40};
- int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240, 20.5f));
-
- // Go up spatial and temporal. Spatial undoing is done in 2 stages.
- qm_resolution_->UpdateCodecParameters(20.5f, 320, 240);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate5[] = {1000, 1000, 1000, 1000, 1000};
- int encoder_sent_rate5[] = {1000, 1000, 1000, 1000, 1000};
- int incoming_frame_rate5[] = {20, 20, 20, 20, 20};
- uint8_t fraction_lost5[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate5, encoder_sent_rate5, incoming_frame_rate5,
- fraction_lost5, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- float scale = (4.0f / 3.0f) / 2.0f;
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Multiple down-sampling and up-sample stages, with partial undoing.
-// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
-// undo the temporal 2/3, and then undo the spatial.
-TEST_F(QmSelectTest, MultipleStagesCheckActionHistory3) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(100, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 3/4x3/4 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {100, 100, 100};
- int encoder_sent_rate[] = {100, 100, 100};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
-
- // Go down 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Low motion, high spatial.
- UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360, 20.5f));
-
- // Go up 2/3 temporal.
- qm_resolution_->UpdateCodecParameters(20.5f, 480, 360);
- qm_resolution_->ResetRates();
- // Update rates for a sequence of intervals.
- int target_rate3[] = {250, 250, 250, 250, 250};
- int encoder_sent_rate3[] = {250, 250, 250, 250, 250};
- int incoming_frame_rate3[] = {20, 20, 20, 20, 120};
- uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
- fraction_lost3, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 480,
- 360, 30.0f));
-
- // Go up spatial.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate4[] = {500, 500, 500, 500, 500};
- int encoder_sent_rate4[] = {500, 500, 500, 500, 500};
- int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost4[] = {30, 30, 30, 30, 30};
- UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
- fraction_lost4, 5);
-
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
- 640, 480, 30.0f));
-}
-
-// Two stages of 3/4x3/4 converted to one stage of 1/2x1/2.
-TEST_F(QmSelectTest, ConvertThreeQuartersToOneHalf) {
- // Initialize with bitrate, frame rate, native system width/height, and
- // number of temporal layers.
- InitQmNativeData(150, 30, 640, 480, 1);
-
- // Update with encoder frame size.
- uint16_t codec_width = 640;
- uint16_t codec_height = 480;
- qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
- EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
-
- // Go down 3/4x3/4 spatial.
- // Update rates for a sequence of intervals.
- int target_rate[] = {150, 150, 150};
- int encoder_sent_rate[] = {150, 150, 150};
- int incoming_frame_rate[] = {30, 30, 30};
- uint8_t fraction_lost[] = {10, 10, 10};
- UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
- fraction_lost, 3);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f, 1.0f,
- 480, 360, 30.0f));
-
- // Set rates to go down another 3/4 spatial. Should be converted ton 1/2.
- qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
- EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
- qm_resolution_->ResetRates();
- int target_rate2[] = {100, 100, 100, 100, 100};
- int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
- int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
- uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
- UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
- fraction_lost2, 5);
-
- // Update content: motion level, and 3 spatial prediction errors.
- // Medium motion, low spatial.
- UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
- EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
- EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
- EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
- EXPECT_TRUE(
- IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240, 30.0f));
-}
-
-void QmSelectTest::InitQmNativeData(float initial_bit_rate,
- int user_frame_rate,
- int native_width,
- int native_height,
- int num_layers) {
- EXPECT_EQ(
- 0, qm_resolution_->Initialize(initial_bit_rate, user_frame_rate,
- native_width, native_height, num_layers));
-}
-
-void QmSelectTest::UpdateQmContentData(float motion_metric,
- float spatial_metric,
- float spatial_metric_horiz,
- float spatial_metric_vert) {
- content_metrics_->motion_magnitude = motion_metric;
- content_metrics_->spatial_pred_err = spatial_metric;
- content_metrics_->spatial_pred_err_h = spatial_metric_horiz;
- content_metrics_->spatial_pred_err_v = spatial_metric_vert;
- qm_resolution_->UpdateContent(content_metrics_);
-}
-
-void QmSelectTest::UpdateQmEncodedFrame(size_t* encoded_size,
- size_t num_updates) {
- for (size_t i = 0; i < num_updates; ++i) {
- // Convert to bytes.
- size_t encoded_size_update = 1000 * encoded_size[i] / 8;
- qm_resolution_->UpdateEncodedSize(encoded_size_update);
- }
-}
-
-void QmSelectTest::UpdateQmRateData(int* target_rate,
- int* encoder_sent_rate,
- int* incoming_frame_rate,
- uint8_t* fraction_lost,
- int num_updates) {
- for (int i = 0; i < num_updates; ++i) {
- float target_rate_update = target_rate[i];
- float encoder_sent_rate_update = encoder_sent_rate[i];
- float incoming_frame_rate_update = incoming_frame_rate[i];
- uint8_t fraction_lost_update = fraction_lost[i];
- qm_resolution_->UpdateRates(target_rate_update, encoder_sent_rate_update,
- incoming_frame_rate_update,
- fraction_lost_update);
- }
-}
-
-// Check is the selected action from the QmResolution class is the same
-// as the expected scales from |fac_width|, |fac_height|, |fac_temp|.
-bool QmSelectTest::IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
- float fac_width,
- float fac_height,
- float fac_temp,
- uint16_t new_width,
- uint16_t new_height,
- float new_frame_rate) {
- if (qm_scale->spatial_width_fact == fac_width &&
- qm_scale->spatial_height_fact == fac_height &&
- qm_scale->temporal_fact == fac_temp &&
- qm_scale->codec_width == new_width &&
- qm_scale->codec_height == new_height &&
- qm_scale->frame_rate == new_frame_rate) {
- return true;
- } else {
- return false;
- }
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/receiver.cc b/chromium/third_party/webrtc/modules/video_coding/receiver.cc
index a02fd01de6a..1954df94e73 100644
--- a/chromium/third_party/webrtc/modules/video_coding/receiver.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/receiver.cc
@@ -42,12 +42,17 @@ VCMReceiver::VCMReceiver(VCMTiming* timing,
EventFactory* event_factory,
NackSender* nack_sender,
KeyFrameRequestSender* keyframe_request_sender)
- : VCMReceiver(timing,
- clock,
- std::unique_ptr<EventWrapper>(event_factory->CreateEvent()),
- std::unique_ptr<EventWrapper>(event_factory->CreateEvent()),
- nack_sender,
- keyframe_request_sender) {}
+ : VCMReceiver(
+ timing,
+ clock,
+ std::unique_ptr<EventWrapper>(event_factory
+ ? event_factory->CreateEvent()
+ : EventWrapper::Create()),
+ std::unique_ptr<EventWrapper>(event_factory
+ ? event_factory->CreateEvent()
+ : EventWrapper::Create()),
+ nack_sender,
+ keyframe_request_sender) {}
VCMReceiver::VCMReceiver(VCMTiming* timing,
Clock* clock,
@@ -281,24 +286,6 @@ int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
return 0;
}
-int VCMReceiver::RenderBufferSizeMs() {
- uint32_t timestamp_start = 0u;
- uint32_t timestamp_end = 0u;
- // Render timestamps are computed just prior to decoding. Therefore this is
- // only an estimate based on frames' timestamps and current timing state.
- jitter_buffer_.RenderBufferSize(&timestamp_start, &timestamp_end);
- if (timestamp_start == timestamp_end) {
- return 0;
- }
- // Update timing.
- const int64_t now_ms = clock_->TimeInMilliseconds();
- timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
- // Get render timestamps.
- uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
- uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
- return render_end - render_start;
-}
-
void VCMReceiver::RegisterStatsCallback(
VCMReceiveStatisticsCallback* callback) {
jitter_buffer_.RegisterStatsCallback(callback);
diff --git a/chromium/third_party/webrtc/modules/video_coding/receiver.h b/chromium/third_party/webrtc/modules/video_coding/receiver.h
index a4c55e967cb..dbef62a716c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/receiver.h
+++ b/chromium/third_party/webrtc/modules/video_coding/receiver.h
@@ -90,11 +90,6 @@ class VCMReceiver {
void SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode);
VCMDecodeErrorMode DecodeErrorMode() const;
- // Returns size in time (milliseconds) of complete continuous frames in the
- // jitter buffer. The render time is estimated based on the render delay at
- // the time this function is called.
- int RenderBufferSizeMs();
-
void RegisterStatsCallback(VCMReceiveStatisticsCallback* callback);
void TriggerDecoderShutdown();
diff --git a/chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc
index 42cc9ac0a88..d05957f6f0d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/receiver_unittest.cc
@@ -92,66 +92,6 @@ class TestVCMReceiver : public ::testing::Test {
std::unique_ptr<StreamGenerator> stream_generator_;
};
-TEST_F(TestVCMReceiver, RenderBufferSize_AllComplete) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ(num_of_frames * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_SkipToKeyFrame) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- const int kNumOfNonDecodableFrames = 2;
- for (int i = 0; i < kNumOfNonDecodableFrames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- const int kNumOfFrames = 10;
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- for (int i = 0; i < kNumOfFrames - 1; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ((kNumOfFrames - 1) * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_NotAllComplete) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- num_of_frames++;
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ((num_of_frames - 1) * kDefaultFramePeriodMs,
- receiver_.RenderBufferSizeMs());
-}
-
-TEST_F(TestVCMReceiver, RenderBufferSize_NoKeyFrame) {
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
- int num_of_frames = 10;
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- int64_t next_render_time_ms = 0;
- VCMEncodedFrame* frame =
- receiver_.FrameForDecoding(10, &next_render_time_ms, false);
- EXPECT_TRUE(frame == NULL);
- receiver_.ReleaseFrame(frame);
- EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
- for (int i = 0; i < num_of_frames; ++i) {
- EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
- }
- EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
-}
-
TEST_F(TestVCMReceiver, NonDecodableDuration_Empty) {
// Enable NACK and with no RTT thresholds for disabling retransmission delay.
receiver_.SetNackMode(kNack, -1, -1);
diff --git a/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
new file mode 100644
index 00000000000..2ddfada74e5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
@@ -0,0 +1,486 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/rtp_frame_reference_finder.h"
+
+#include <algorithm>
+#include <limits>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/video_coding/frame_object.h"
+#include "webrtc/modules/video_coding/packet_buffer.h"
+
+namespace webrtc {
+namespace video_coding {
+
+RtpFrameReferenceFinder::RtpFrameReferenceFinder(
+ OnCompleteFrameCallback* frame_callback)
+ : last_picture_id_(-1),
+ last_unwrap_(-1),
+ current_ss_idx_(0),
+ frame_callback_(frame_callback) {}
+
+void RtpFrameReferenceFinder::ManageFrame(
+ std::unique_ptr<RtpFrameObject> frame) {
+ rtc::CritScope lock(&crit_);
+ switch (frame->codec_type()) {
+ case kVideoCodecULPFEC:
+ case kVideoCodecRED:
+ case kVideoCodecUnknown:
+ RTC_NOTREACHED();
+ break;
+ case kVideoCodecVP8:
+ ManageFrameVp8(std::move(frame));
+ break;
+ case kVideoCodecVP9:
+ ManageFrameVp9(std::move(frame));
+ break;
+ case kVideoCodecH264:
+ case kVideoCodecI420:
+ case kVideoCodecGeneric:
+ ManageFrameGeneric(std::move(frame));
+ break;
+ }
+}
+
+void RtpFrameReferenceFinder::RetryStashedFrames() {
+ size_t num_stashed_frames = stashed_frames_.size();
+
+ // Clean up stashed frames if there are too many.
+ while (stashed_frames_.size() > kMaxStashedFrames)
+ stashed_frames_.pop();
+
+ // Since frames are stashed if there is not enough data to determine their
+ // frame references we should at most check |stashed_frames_.size()| in
+ // order to not pop and push frames in and endless loop.
+ for (size_t i = 0; i < num_stashed_frames && !stashed_frames_.empty(); ++i) {
+ std::unique_ptr<RtpFrameObject> frame = std::move(stashed_frames_.front());
+ stashed_frames_.pop();
+ ManageFrame(std::move(frame));
+ }
+}
+
+void RtpFrameReferenceFinder::ManageFrameGeneric(
+ std::unique_ptr<RtpFrameObject> frame) {
+ if (frame->frame_type() == kVideoFrameKey)
+ last_seq_num_gop_[frame->last_seq_num()] = frame->last_seq_num();
+
+ // We have received a frame but not yet a keyframe, stash this frame.
+ if (last_seq_num_gop_.empty()) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ // Clean up info for old keyframes but make sure to keep info
+ // for the last keyframe.
+ auto clean_to = last_seq_num_gop_.lower_bound(frame->last_seq_num() - 100);
+ if (clean_to != last_seq_num_gop_.end())
+ last_seq_num_gop_.erase(last_seq_num_gop_.begin(), clean_to);
+
+ // Find the last sequence number of the last frame for the keyframe
+ // that this frame indirectly references.
+ auto seq_num_it = last_seq_num_gop_.upper_bound(frame->last_seq_num());
+ seq_num_it--;
+
+ // Make sure the packet sequence numbers are continuous, otherwise stash
+ // this frame.
+ if (frame->frame_type() == kVideoFrameDelta) {
+ if (seq_num_it->second !=
+ static_cast<uint16_t>(frame->first_seq_num() - 1)) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+ }
+
+ RTC_DCHECK(AheadOrAt(frame->last_seq_num(), seq_num_it->first));
+
+ // Since keyframes can cause reordering we can't simply assign the
+ // picture id according to some incrementing counter.
+ frame->picture_id = frame->last_seq_num();
+ frame->num_references = frame->frame_type() == kVideoFrameDelta;
+ frame->references[0] = seq_num_it->second;
+ seq_num_it->second = frame->picture_id;
+
+ last_picture_id_ = frame->picture_id;
+ frame_callback_->OnCompleteFrame(std::move(frame));
+ RetryStashedFrames();
+}
+
+void RtpFrameReferenceFinder::ManageFrameVp8(
+ std::unique_ptr<RtpFrameObject> frame) {
+ RTPVideoTypeHeader* rtp_codec_header = frame->GetCodecHeader();
+ if (!rtp_codec_header)
+ return;
+
+ const RTPVideoHeaderVP8& codec_header = rtp_codec_header->VP8;
+
+ if (codec_header.pictureId == kNoPictureId ||
+ codec_header.temporalIdx == kNoTemporalIdx ||
+ codec_header.tl0PicIdx == kNoTl0PicIdx) {
+ ManageFrameGeneric(std::move(frame));
+ return;
+ }
+
+ frame->picture_id = codec_header.pictureId % kPicIdLength;
+
+ if (last_unwrap_ == -1)
+ last_unwrap_ = codec_header.pictureId;
+
+ if (last_picture_id_ == -1)
+ last_picture_id_ = frame->picture_id;
+
+ // Find if there has been a gap in fully received frames and save the picture
+ // id of those frames in |not_yet_received_frames_|.
+ if (AheadOf<uint16_t, kPicIdLength>(frame->picture_id, last_picture_id_)) {
+ last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1);
+ while (last_picture_id_ != frame->picture_id) {
+ not_yet_received_frames_.insert(last_picture_id_);
+ last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1);
+ }
+ }
+
+ // Clean up info for base layers that are too old.
+ uint8_t old_tl0_pic_idx = codec_header.tl0PicIdx - kMaxLayerInfo;
+ auto clean_layer_info_to = layer_info_.lower_bound(old_tl0_pic_idx);
+ layer_info_.erase(layer_info_.begin(), clean_layer_info_to);
+
+ // Clean up info about not yet received frames that are too old.
+ uint16_t old_picture_id =
+ Subtract<kPicIdLength>(frame->picture_id, kMaxNotYetReceivedFrames);
+ auto clean_frames_to = not_yet_received_frames_.lower_bound(old_picture_id);
+ not_yet_received_frames_.erase(not_yet_received_frames_.begin(),
+ clean_frames_to);
+
+ if (frame->frame_type() == kVideoFrameKey) {
+ frame->num_references = 0;
+ layer_info_[codec_header.tl0PicIdx].fill(-1);
+ CompletedFrameVp8(std::move(frame));
+ return;
+ }
+
+ auto layer_info_it = layer_info_.find(codec_header.temporalIdx == 0
+ ? codec_header.tl0PicIdx - 1
+ : codec_header.tl0PicIdx);
+
+ // If we don't have the base layer frame yet, stash this frame.
+ if (layer_info_it == layer_info_.end()) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ // A non keyframe base layer frame has been received, copy the layer info
+ // from the previous base layer frame and set a reference to the previous
+ // base layer frame.
+ if (codec_header.temporalIdx == 0) {
+ layer_info_it =
+ layer_info_
+ .insert(make_pair(codec_header.tl0PicIdx, layer_info_it->second))
+ .first;
+ frame->num_references = 1;
+ frame->references[0] = layer_info_it->second[0];
+ CompletedFrameVp8(std::move(frame));
+ return;
+ }
+
+ // Layer sync frame, this frame only references its base layer frame.
+ if (codec_header.layerSync) {
+ frame->num_references = 1;
+ frame->references[0] = layer_info_it->second[0];
+
+ CompletedFrameVp8(std::move(frame));
+ return;
+ }
+
+ // Find all references for this frame.
+ frame->num_references = 0;
+ for (uint8_t layer = 0; layer <= codec_header.temporalIdx; ++layer) {
+ RTC_DCHECK_NE(-1, layer_info_it->second[layer]);
+
+ // If we have not yet received a frame between this frame and the referenced
+ // frame then we have to wait for that frame to be completed first.
+ auto not_received_frame_it =
+ not_yet_received_frames_.upper_bound(layer_info_it->second[layer]);
+ if (not_received_frame_it != not_yet_received_frames_.end() &&
+ AheadOf<uint16_t, kPicIdLength>(frame->picture_id,
+ *not_received_frame_it)) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ ++frame->num_references;
+ frame->references[layer] = layer_info_it->second[layer];
+ }
+
+ CompletedFrameVp8(std::move(frame));
+}
+
+void RtpFrameReferenceFinder::CompletedFrameVp8(
+ std::unique_ptr<RtpFrameObject> frame) {
+ RTPVideoTypeHeader* rtp_codec_header = frame->GetCodecHeader();
+ if (!rtp_codec_header)
+ return;
+
+ const RTPVideoHeaderVP8& codec_header = rtp_codec_header->VP8;
+
+ uint8_t tl0_pic_idx = codec_header.tl0PicIdx;
+ uint8_t temporal_index = codec_header.temporalIdx;
+ auto layer_info_it = layer_info_.find(tl0_pic_idx);
+
+ // Update this layer info and newer.
+ while (layer_info_it != layer_info_.end()) {
+ if (layer_info_it->second[temporal_index] != -1 &&
+ AheadOf<uint16_t, kPicIdLength>(layer_info_it->second[temporal_index],
+ frame->picture_id)) {
+ // The frame was not newer, then no subsequent layer info have to be
+ // update.
+ break;
+ }
+
+ layer_info_it->second[codec_header.temporalIdx] = frame->picture_id;
+ ++tl0_pic_idx;
+ layer_info_it = layer_info_.find(tl0_pic_idx);
+ }
+ not_yet_received_frames_.erase(frame->picture_id);
+
+ for (size_t i = 0; i < frame->num_references; ++i)
+ frame->references[i] = UnwrapPictureId(frame->references[i]);
+ frame->picture_id = UnwrapPictureId(frame->picture_id);
+
+ frame_callback_->OnCompleteFrame(std::move(frame));
+ RetryStashedFrames();
+}
+
+void RtpFrameReferenceFinder::ManageFrameVp9(
+ std::unique_ptr<RtpFrameObject> frame) {
+ RTPVideoTypeHeader* rtp_codec_header = frame->GetCodecHeader();
+ if (!rtp_codec_header)
+ return;
+
+ const RTPVideoHeaderVP9& codec_header = rtp_codec_header->VP9;
+
+ if (codec_header.picture_id == kNoPictureId) {
+ ManageFrameGeneric(std::move(frame));
+ return;
+ }
+
+ frame->spatial_layer = codec_header.spatial_idx;
+ frame->inter_layer_predicted = codec_header.inter_layer_predicted;
+ frame->picture_id = codec_header.picture_id % kPicIdLength;
+
+ if (last_unwrap_ == -1)
+ last_unwrap_ = codec_header.picture_id;
+
+ if (last_picture_id_ == -1)
+ last_picture_id_ = frame->picture_id;
+
+ if (codec_header.flexible_mode) {
+ frame->num_references = codec_header.num_ref_pics;
+ for (size_t i = 0; i < frame->num_references; ++i) {
+ frame->references[i] =
+ Subtract<1 << 16>(frame->picture_id, codec_header.pid_diff[i]);
+ }
+
+ CompletedFrameVp9(std::move(frame));
+ return;
+ }
+
+ if (codec_header.ss_data_available) {
+ // Scalability structures can only be sent with tl0 frames.
+ if (codec_header.temporal_idx != 0) {
+ LOG(LS_WARNING) << "Received scalability structure on a non base layer"
+ " frame. Scalability structure ignored.";
+ } else {
+ current_ss_idx_ = Add<kMaxGofSaved>(current_ss_idx_, 1);
+ scalability_structures_[current_ss_idx_] = codec_header.gof;
+ scalability_structures_[current_ss_idx_].pid_start = frame->picture_id;
+
+ auto pid_and_gof = std::make_pair(
+ frame->picture_id, &scalability_structures_[current_ss_idx_]);
+ gof_info_.insert(std::make_pair(codec_header.tl0_pic_idx, pid_and_gof));
+ }
+ }
+
+ // Clean up info for base layers that are too old.
+ uint8_t old_tl0_pic_idx = codec_header.tl0_pic_idx - kMaxGofSaved;
+ auto clean_gof_info_to = gof_info_.lower_bound(old_tl0_pic_idx);
+ gof_info_.erase(gof_info_.begin(), clean_gof_info_to);
+
+ if (frame->frame_type() == kVideoFrameKey) {
+ // When using GOF all keyframes must include the scalability structure.
+ if (!codec_header.ss_data_available)
+ LOG(LS_WARNING) << "Received keyframe without scalability structure";
+
+ frame->num_references = 0;
+ GofInfoVP9* gof = gof_info_.find(codec_header.tl0_pic_idx)->second.second;
+ FrameReceivedVp9(frame->picture_id, *gof);
+ CompletedFrameVp9(std::move(frame));
+ return;
+ }
+
+ auto gof_info_it = gof_info_.find(
+ (codec_header.temporal_idx == 0 && !codec_header.ss_data_available)
+ ? codec_header.tl0_pic_idx - 1
+ : codec_header.tl0_pic_idx);
+
+ // Gof info for this frame is not available yet, stash this frame.
+ if (gof_info_it == gof_info_.end()) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ GofInfoVP9* gof = gof_info_it->second.second;
+ uint16_t picture_id_tl0 = gof_info_it->second.first;
+
+ FrameReceivedVp9(frame->picture_id, *gof);
+
+ // Make sure we don't miss any frame that could potentially have the
+ // up switch flag set.
+ if (MissingRequiredFrameVp9(frame->picture_id, *gof)) {
+ stashed_frames_.emplace(std::move(frame));
+ return;
+ }
+
+ if (codec_header.temporal_up_switch) {
+ auto pid_tidx =
+ std::make_pair(frame->picture_id, codec_header.temporal_idx);
+ up_switch_.insert(pid_tidx);
+ }
+
+ // If this is a base layer frame that contains a scalability structure
+ // then gof info has already been inserted earlier, so we only want to
+ // insert if we haven't done so already.
+ if (codec_header.temporal_idx == 0 && !codec_header.ss_data_available) {
+ auto pid_and_gof = std::make_pair(frame->picture_id, gof);
+ gof_info_.insert(std::make_pair(codec_header.tl0_pic_idx, pid_and_gof));
+ }
+
+ // Clean out old info about up switch frames.
+ uint16_t old_picture_id = Subtract<kPicIdLength>(last_picture_id_, 50);
+ auto up_switch_erase_to = up_switch_.lower_bound(old_picture_id);
+ up_switch_.erase(up_switch_.begin(), up_switch_erase_to);
+
+ RTC_DCHECK(
+ (AheadOrAt<uint16_t, kPicIdLength>(frame->picture_id, picture_id_tl0)));
+
+ size_t diff =
+ ForwardDiff<uint16_t, kPicIdLength>(gof->pid_start, frame->picture_id);
+ size_t gof_idx = diff % gof->num_frames_in_gof;
+
+ // Populate references according to the scalability structure.
+ frame->num_references = gof->num_ref_pics[gof_idx];
+ for (size_t i = 0; i < frame->num_references; ++i) {
+ frame->references[i] =
+ Subtract<kPicIdLength>(frame->picture_id, gof->pid_diff[gof_idx][i]);
+
+ // If this is a reference to a frame earlier than the last up switch point,
+ // then ignore this reference.
+ if (UpSwitchInIntervalVp9(frame->picture_id, codec_header.temporal_idx,
+ frame->references[i])) {
+ --frame->num_references;
+ }
+ }
+
+ CompletedFrameVp9(std::move(frame));
+}
+
+bool RtpFrameReferenceFinder::MissingRequiredFrameVp9(uint16_t picture_id,
+ const GofInfoVP9& gof) {
+ size_t diff = ForwardDiff<uint16_t, kPicIdLength>(gof.pid_start, picture_id);
+ size_t gof_idx = diff % gof.num_frames_in_gof;
+ size_t temporal_idx = gof.temporal_idx[gof_idx];
+
+ // For every reference this frame has, check if there is a frame missing in
+ // the interval (|ref_pid|, |picture_id|) in any of the lower temporal
+ // layers. If so, we are missing a required frame.
+ uint8_t num_references = gof.num_ref_pics[gof_idx];
+ for (size_t i = 0; i < num_references; ++i) {
+ uint16_t ref_pid =
+ Subtract<kPicIdLength>(picture_id, gof.pid_diff[gof_idx][i]);
+ for (size_t l = 0; l < temporal_idx; ++l) {
+ auto missing_frame_it = missing_frames_for_layer_[l].lower_bound(ref_pid);
+ if (missing_frame_it != missing_frames_for_layer_[l].end() &&
+ AheadOf<uint16_t, kPicIdLength>(picture_id, *missing_frame_it)) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+void RtpFrameReferenceFinder::FrameReceivedVp9(uint16_t picture_id,
+ const GofInfoVP9& gof) {
+ RTC_DCHECK_NE(-1, last_picture_id_);
+
+ // If there is a gap, find which temporal layer the missing frames
+ // belong to and add the frame as missing for that temporal layer.
+ // Otherwise, remove this frame from the set of missing frames.
+ if (AheadOf<uint16_t, kPicIdLength>(picture_id, last_picture_id_)) {
+ size_t diff =
+ ForwardDiff<uint16_t, kPicIdLength>(gof.pid_start, last_picture_id_);
+ size_t gof_idx = diff % gof.num_frames_in_gof;
+
+ last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1);
+ while (last_picture_id_ != picture_id) {
+ ++gof_idx;
+ RTC_DCHECK_NE(0ul, gof_idx % gof.num_frames_in_gof);
+ size_t temporal_idx = gof.temporal_idx[gof_idx];
+ missing_frames_for_layer_[temporal_idx].insert(last_picture_id_);
+ last_picture_id_ = Add<kPicIdLength>(last_picture_id_, 1);
+ }
+ } else {
+ size_t diff =
+ ForwardDiff<uint16_t, kPicIdLength>(gof.pid_start, picture_id);
+ size_t gof_idx = diff % gof.num_frames_in_gof;
+ size_t temporal_idx = gof.temporal_idx[gof_idx];
+ missing_frames_for_layer_[temporal_idx].erase(picture_id);
+ }
+}
+
+bool RtpFrameReferenceFinder::UpSwitchInIntervalVp9(uint16_t picture_id,
+ uint8_t temporal_idx,
+ uint16_t pid_ref) {
+ for (auto up_switch_it = up_switch_.upper_bound(pid_ref);
+ up_switch_it != up_switch_.end() &&
+ AheadOf<uint16_t, kPicIdLength>(picture_id, up_switch_it->first);
+ ++up_switch_it) {
+ if (up_switch_it->second < temporal_idx)
+ return true;
+ }
+
+ return false;
+}
+
+void RtpFrameReferenceFinder::CompletedFrameVp9(
+ std::unique_ptr<RtpFrameObject> frame) {
+ for (size_t i = 0; i < frame->num_references; ++i)
+ frame->references[i] = UnwrapPictureId(frame->references[i]);
+ frame->picture_id = UnwrapPictureId(frame->picture_id);
+
+ frame_callback_->OnCompleteFrame(std::move(frame));
+ RetryStashedFrames();
+}
+
+uint16_t RtpFrameReferenceFinder::UnwrapPictureId(uint16_t picture_id) {
+ RTC_DCHECK_NE(-1, last_unwrap_);
+
+ uint16_t unwrap_truncated = last_unwrap_ % kPicIdLength;
+ uint16_t diff = MinDiff<uint16_t, kPicIdLength>(unwrap_truncated, picture_id);
+
+ if (AheadOf<uint16_t, kPicIdLength>(picture_id, unwrap_truncated))
+ last_unwrap_ = Add<1 << 16>(last_unwrap_, diff);
+ else
+ last_unwrap_ = Subtract<1 << 16>(last_unwrap_, diff);
+
+ return last_unwrap_;
+}
+
+} // namespace video_coding
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.h b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.h
new file mode 100644
index 00000000000..7289b803bd0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/rtp_frame_reference_finder.h
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_
+
+#include <array>
+#include <map>
+#include <memory>
+#include <queue>
+#include <set>
+#include <utility>
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_coding/sequence_number_util.h"
+
+namespace webrtc {
+namespace video_coding {
+
+class RtpFrameObject;
+class OnCompleteFrameCallback;
+
+class RtpFrameReferenceFinder {
+ public:
+ explicit RtpFrameReferenceFinder(OnCompleteFrameCallback* frame_callback);
+ void ManageFrame(std::unique_ptr<RtpFrameObject> frame);
+
+ private:
+ static const uint16_t kPicIdLength = 1 << 7;
+ static const uint8_t kMaxTemporalLayers = 5;
+ static const int kMaxLayerInfo = 10;
+ static const int kMaxStashedFrames = 10;
+ static const int kMaxNotYetReceivedFrames = 20;
+ static const int kMaxGofSaved = 15;
+
+ rtc::CriticalSection crit_;
+
+ // Retry finding references for all frames that previously didn't have
+ // all information needed.
+ void RetryStashedFrames() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Find references for generic frames.
+ void ManageFrameGeneric(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Find references for Vp8 frames
+ void ManageFrameVp8(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Updates all necessary state used to determine frame references
+ // for Vp8 and then calls the |frame_callback| callback with the
+ // completed frame.
+ void CompletedFrameVp8(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Find references for Vp9 frames
+ void ManageFrameVp9(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Unwrap the picture id and the frame references and then call the
+ // |frame_callback| callback with the completed frame.
+ void CompletedFrameVp9(std::unique_ptr<RtpFrameObject> frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Check if we are missing a frame necessary to determine the references
+ // for this frame.
+ bool MissingRequiredFrameVp9(uint16_t picture_id, const GofInfoVP9& gof)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Updates which frames that have been received. If there is a gap,
+ // missing frames will be added to |missing_frames_for_layer_| or
+ // if this is an already missing frame then it will be removed.
+ void FrameReceivedVp9(uint16_t picture_id, const GofInfoVP9& gof)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Check if there is a frame with the up-switch flag set in the interval
+ // (|pid_ref|, |picture_id|) with temporal layer smaller than |temporal_idx|.
+ bool UpSwitchInIntervalVp9(uint16_t picture_id,
+ uint8_t temporal_idx,
+ uint16_t pid_ref) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // All picture ids are unwrapped to 16 bits.
+ uint16_t UnwrapPictureId(uint16_t picture_id) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // Holds the last sequence number of the last frame that has been created
+ // given the last sequence number of a given keyframe.
+ std::map<uint16_t, uint16_t, DescendingSeqNumComp<uint16_t>> last_seq_num_gop_
+ GUARDED_BY(crit_);
+
+ // Save the last picture id in order to detect when there is a gap in frames
+ // that have not yet been fully received.
+ int last_picture_id_ GUARDED_BY(crit_);
+
+ // The last unwrapped picture id. Used to unwrap the picture id from a length
+ // of |kPicIdLength| to 16 bits.
+ int last_unwrap_ GUARDED_BY(crit_);
+
+ // Frames earlier than the last received frame that have not yet been
+ // fully received.
+ std::set<uint16_t, DescendingSeqNumComp<uint16_t, kPicIdLength>>
+ not_yet_received_frames_ GUARDED_BY(crit_);
+
+ // Frames that have been fully received but didn't have all the information
+ // needed to determine their references.
+ std::queue<std::unique_ptr<RtpFrameObject>> stashed_frames_ GUARDED_BY(crit_);
+
+ // Holds the information about the last completed frame for a given temporal
+ // layer given a Tl0 picture index.
+ std::map<uint8_t,
+ std::array<int16_t, kMaxTemporalLayers>,
+ DescendingSeqNumComp<uint8_t>>
+ layer_info_ GUARDED_BY(crit_);
+
+ // Where the current scalability structure is in the
+ // |scalability_structures_| array.
+ uint8_t current_ss_idx_;
+
+ // Holds received scalability structures.
+ std::array<GofInfoVP9, kMaxGofSaved> scalability_structures_
+ GUARDED_BY(crit_);
+
+ // Holds the picture id and the Gof information for a given TL0 picture index.
+ std::map<uint8_t,
+ std::pair<uint16_t, GofInfoVP9*>,
+ DescendingSeqNumComp<uint8_t>>
+ gof_info_ GUARDED_BY(crit_);
+
+ // Keep track of which picture id and which temporal layer that had the
+ // up switch flag set.
+ std::map<uint16_t, uint8_t> up_switch_ GUARDED_BY(crit_);
+
+ // For every temporal layer, keep a set of which frames that are missing.
+ std::array<std::set<uint16_t, DescendingSeqNumComp<uint16_t, kPicIdLength>>,
+ kMaxTemporalLayers>
+ missing_frames_for_layer_ GUARDED_BY(crit_);
+
+ OnCompleteFrameCallback* frame_callback_;
+};
+
+} // namespace video_coding
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc b/chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc
index 97d63e0fb54..d5fa9ae936c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/test/rtp_player.cc
@@ -12,9 +12,11 @@
#include <stdio.h>
+#include <cstdlib>
#include <map>
#include <memory>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
@@ -227,7 +229,6 @@ class SsrcHandlers {
return -1;
}
- handler->rtp_module_->SetNACKStatus(kNackOff);
handler->rtp_header_parser_->RegisterRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset,
kDefaultTransmissionTimeOffsetExtensionId);
@@ -341,7 +342,7 @@ class RtpPlayerImpl : public RtpPlayerInterface {
assert(packet_source);
assert(packet_source->get());
packet_source_.swap(*packet_source);
- srand(321);
+ std::srand(321);
}
virtual ~RtpPlayerImpl() {}
@@ -434,7 +435,8 @@ class RtpPlayerImpl : public RtpPlayerInterface {
if (no_loss_startup_ > 0) {
no_loss_startup_--;
- } else if ((rand() + 1.0) / (RAND_MAX + 1.0) < loss_rate_) { // NOLINT
+ } else if ((std::rand() + 1.0) / (RAND_MAX + 1.0) <
+ loss_rate_) { // NOLINT
uint16_t seq_num = header.sequenceNumber;
lost_packets_.AddPacket(new RawRtpPacket(data, length, ssrc, seq_num));
DEBUG_LOG1("Dropped packet: %d!", header.header.sequenceNumber);
diff --git a/chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h b/chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h
index 36b26db92e4..9eb957194f1 100644
--- a/chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h
+++ b/chromium/third_party/webrtc/modules/video_coding/test/stream_generator.h
@@ -12,6 +12,7 @@
#include <list>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/video_coding/packet.h"
#include "webrtc/modules/video_coding/test/test_util.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc b/chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc
index e774db16520..8c674ef5042 100644
--- a/chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/test/vcm_payload_sink_factory.cc
@@ -14,6 +14,7 @@
#include <algorithm>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/video_coding/test/test_util.h"
#include "webrtc/system_wrappers/include/clock.h"
diff --git a/chromium/third_party/webrtc/modules/video_coding/timing.h b/chromium/third_party/webrtc/modules/video_coding/timing.h
index a45eee38c6c..e593c9acbc3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/timing.h
+++ b/chromium/third_party/webrtc/modules/video_coding/timing.h
@@ -28,7 +28,7 @@ class VCMTiming {
// The primary timing component should be passed
// if this is the dual timing component.
explicit VCMTiming(Clock* clock, VCMTiming* master_timing = NULL);
- ~VCMTiming();
+ virtual ~VCMTiming();
// Resets the timing to the initial state.
void Reset();
@@ -69,11 +69,11 @@ class VCMTiming {
// Returns the receiver system time when the frame with timestamp
// frame_timestamp should be rendered, assuming that the system time currently
// is now_ms.
- int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
+ virtual int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
// Returns the maximum time in ms that we can wait for a frame to become
// complete before we must pass it to the decoder.
- uint32_t MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const;
+ virtual uint32_t MaxWaitingTime(int64_t render_time_ms, int64_t now_ms) const;
// Returns the current target delay which is required delay + decode time +
// render delay.
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc b/chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc
index c95048c0743..5de7526ac24 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/frame_dropper.cc
@@ -73,6 +73,7 @@ void FrameDropper::Reset() {
incoming_frame_rate_ = kDefaultIncomingFrameRate;
large_frame_accumulation_count_ = 0;
+ large_frame_accumulation_chunk_size_ = 0;
large_frame_accumulation_spread_ = 0.5 * kDefaultIncomingFrameRate;
drop_next_ = false;
@@ -129,13 +130,6 @@ void FrameDropper::Fill(size_t framesize_bytes, bool delta_frame) {
// Change the level of the accumulator (bucket)
accumulator_ += framesize_kbits;
CapAccumulator();
- LOG(LS_VERBOSE) << "FILL acc " << accumulator_ << " max " << accumulator_max_
- << " count " << large_frame_accumulation_count_ << " chunk "
- << large_frame_accumulation_chunk_size_ << " spread "
- << large_frame_accumulation_spread_ << " delta avg "
- << delta_frame_size_avg_kbits_.filtered() << " SIZE "
- << framesize_kbits << "key frame ratio "
- << key_frame_ratio_.filtered();
}
void FrameDropper::Leak(uint32_t input_framerate) {
@@ -160,10 +154,6 @@ void FrameDropper::Leak(uint32_t input_framerate) {
if (accumulator_ < 0.0f) {
accumulator_ = 0.0f;
}
- LOG(LS_VERBOSE) << "LEAK acc " << accumulator_ << " max " << accumulator_max_
- << " count " << large_frame_accumulation_count_ << " spread "
- << large_frame_accumulation_spread_ << " delta avg "
- << delta_frame_size_avg_kbits_.filtered();
UpdateRatio();
}
@@ -201,8 +191,6 @@ bool FrameDropper::DropFrame() {
drop_next_ = false;
drop_count_ = 0;
}
- LOG(LS_VERBOSE) << " drop_ratio_ " << drop_ratio_.filtered()
- << " drop_count_ " << drop_count_;
if (drop_ratio_.filtered() >= 0.5f) { // Drops per keep
// limit is the number of frames we should drop between each kept frame
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.cc b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.cc
new file mode 100644
index 00000000000..97f1da30e40
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+
+namespace webrtc {
+
+IvfFileWriter::IvfFileWriter(const std::string& file_name,
+ std::unique_ptr<FileWrapper> file,
+ VideoCodecType codec_type)
+ : codec_type_(codec_type),
+ num_frames_(0),
+ width_(0),
+ height_(0),
+ last_timestamp_(-1),
+ using_capture_timestamps_(false),
+ file_name_(file_name),
+ file_(std::move(file)) {}
+
+IvfFileWriter::~IvfFileWriter() {
+ Close();
+}
+
+const size_t kIvfHeaderSize = 32;
+
+std::unique_ptr<IvfFileWriter> IvfFileWriter::Open(const std::string& file_name,
+ VideoCodecType codec_type) {
+ std::unique_ptr<IvfFileWriter> file_writer;
+ std::unique_ptr<FileWrapper> file(FileWrapper::Create());
+ if (file->OpenFile(file_name.c_str(), false) != 0)
+ return file_writer;
+
+ file_writer.reset(new IvfFileWriter(
+ file_name, std::unique_ptr<FileWrapper>(std::move(file)), codec_type));
+ if (!file_writer->WriteHeader())
+ file_writer.reset();
+
+ return file_writer;
+}
+
+bool IvfFileWriter::WriteHeader() {
+ if (file_->Rewind() != 0) {
+ LOG(LS_WARNING) << "Unable to rewind output file " << file_name_;
+ return false;
+ }
+
+ uint8_t ivf_header[kIvfHeaderSize] = {0};
+ ivf_header[0] = 'D';
+ ivf_header[1] = 'K';
+ ivf_header[2] = 'I';
+ ivf_header[3] = 'F';
+ ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[4], 0); // Version.
+ ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[6], 32); // Header size.
+
+ switch (codec_type_) {
+ case kVideoCodecVP8:
+ ivf_header[8] = 'V';
+ ivf_header[9] = 'P';
+ ivf_header[10] = '8';
+ ivf_header[11] = '0';
+ break;
+ case kVideoCodecVP9:
+ ivf_header[8] = 'V';
+ ivf_header[9] = 'P';
+ ivf_header[10] = '9';
+ ivf_header[11] = '0';
+ break;
+ case kVideoCodecH264:
+ ivf_header[8] = 'H';
+ ivf_header[9] = '2';
+ ivf_header[10] = '6';
+ ivf_header[11] = '4';
+ break;
+ default:
+ LOG(LS_ERROR) << "Unknown CODEC type: " << codec_type_;
+ return false;
+ }
+
+ ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[12], width_);
+ ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[14], height_);
+ // Render timestamps are in ms (1/1000 scale), while RTP timestamps use a
+ // 90kHz clock.
+ ByteWriter<uint32_t>::WriteLittleEndian(
+ &ivf_header[16], using_capture_timestamps_ ? 1000 : 90000);
+ ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[20], 1);
+ ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[24],
+ static_cast<uint32_t>(num_frames_));
+ ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[28], 0); // Reserved.
+
+ if (!file_->Write(ivf_header, kIvfHeaderSize)) {
+ LOG(LS_ERROR) << "Unable to write IVF header for file " << file_name_;
+ return false;
+ }
+
+ return true;
+}
+
+bool IvfFileWriter::InitFromFirstFrame(const EncodedImage& encoded_image) {
+ width_ = encoded_image._encodedWidth;
+ height_ = encoded_image._encodedHeight;
+ RTC_CHECK_GT(width_, 0);
+ RTC_CHECK_GT(height_, 0);
+ using_capture_timestamps_ = encoded_image._timeStamp == 0;
+
+ if (!WriteHeader())
+ return false;
+
+ std::string codec_name;
+ switch (codec_type_) {
+ case kVideoCodecVP8:
+ codec_name = "VP8";
+ break;
+ case kVideoCodecVP9:
+ codec_name = "VP9";
+ break;
+ case kVideoCodecH264:
+ codec_name = "H264";
+ break;
+ default:
+ codec_name = "Unknown";
+ }
+ LOG(LS_WARNING) << "Created IVF file " << file_name_
+ << " for codec data of type " << codec_name
+ << " at resolution " << width_ << " x " << height_
+ << ", using " << (using_capture_timestamps_ ? "1" : "90")
+ << "kHz clock resolution.";
+ return true;
+}
+
+bool IvfFileWriter::WriteFrame(const EncodedImage& encoded_image) {
+ RTC_DCHECK(file_->Open());
+
+ if (num_frames_ == 0 && !InitFromFirstFrame(encoded_image))
+ return false;
+
+ if ((encoded_image._encodedWidth > 0 || encoded_image._encodedHeight > 0) &&
+ (encoded_image._encodedHeight != height_ ||
+ encoded_image._encodedWidth != width_)) {
+ LOG(LS_WARNING)
+ << "Incomig frame has diffferent resolution then previous: (" << width_
+ << "x" << height_ << ") -> (" << encoded_image._encodedWidth << "x"
+ << encoded_image._encodedHeight << ")";
+ }
+
+ int64_t timestamp = using_capture_timestamps_
+ ? encoded_image.capture_time_ms_
+ : wrap_handler_.Unwrap(encoded_image._timeStamp);
+ if (last_timestamp_ != -1 && timestamp <= last_timestamp_) {
+ LOG(LS_WARNING) << "Timestamp no increasing: " << last_timestamp_ << " -> "
+ << timestamp;
+ }
+ last_timestamp_ = timestamp;
+
+ const size_t kFrameHeaderSize = 12;
+ uint8_t frame_header[kFrameHeaderSize] = {};
+ ByteWriter<uint32_t>::WriteLittleEndian(
+ &frame_header[0], static_cast<uint32_t>(encoded_image._length));
+ ByteWriter<uint64_t>::WriteLittleEndian(&frame_header[4], timestamp);
+ if (!file_->Write(frame_header, kFrameHeaderSize) ||
+ !file_->Write(encoded_image._buffer, encoded_image._length)) {
+ LOG(LS_ERROR) << "Unable to write frame to file " << file_name_;
+ return false;
+ }
+
+ ++num_frames_;
+ return true;
+}
+
+bool IvfFileWriter::Close() {
+ if (!file_->Open())
+ return false;
+
+ if (num_frames_ == 0) {
+ // No frame written to file, close and remove it entirely if possible.
+ file_->CloseFile();
+ if (remove(file_name_.c_str()) != 0)
+ LOG(LS_WARNING) << "Failed to remove empty IVF file " << file_name_;
+
+ return true;
+ }
+
+ return WriteHeader() && (file_->CloseFile() == 0);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.h b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.h
new file mode 100644
index 00000000000..25d68a28034
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_
+
+#include <memory>
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/video_frame.h"
+#include "webrtc/system_wrappers/include/file_wrapper.h"
+
+namespace webrtc {
+
+class IvfFileWriter {
+ public:
+ ~IvfFileWriter();
+
+ static std::unique_ptr<IvfFileWriter> Open(const std::string& file_name,
+ VideoCodecType codec_type);
+ bool WriteFrame(const EncodedImage& encoded_image);
+ bool Close();
+
+ private:
+ IvfFileWriter(const std::string& path_name,
+ std::unique_ptr<FileWrapper> file,
+ VideoCodecType codec_type);
+ bool WriteHeader();
+ bool InitFromFirstFrame(const EncodedImage& encoded_image);
+
+ const VideoCodecType codec_type_;
+ size_t num_frames_;
+ uint16_t width_;
+ uint16_t height_;
+ int64_t last_timestamp_;
+ bool using_capture_timestamps_;
+ rtc::TimestampWrapAroundHandler wrap_handler_;
+ const std::string file_name_;
+ std::unique_ptr<FileWrapper> file_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(IvfFileWriter);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer_unittest.cc
new file mode 100644
index 00000000000..bdeef2abd5b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/ivf_file_writer_unittest.cc
@@ -0,0 +1,176 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
+
+#include <memory>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+namespace {
+static const int kHeaderSize = 32;
+static const int kFrameHeaderSize = 12;
+static uint8_t dummy_payload[4] = {0, 1, 2, 3};
+} // namespace
+
+class IvfFileWriterTest : public ::testing::Test {
+ protected:
+ void SetUp() override {
+ const int64_t start_id =
+ reinterpret_cast<int64_t>(this) ^ rtc::TimeMicros();
+ int64_t id = start_id;
+ do {
+ std::ostringstream oss;
+ oss << test::OutputPath() << "ivf_test_file_" << id++ << ".ivf";
+ file_name_ = oss.str();
+ } while (id < start_id + 100 && FileExists());
+ ASSERT_LT(id, start_id + 100);
+ }
+
+ bool WriteDummyTestFrames(int width,
+ int height,
+ int num_frames,
+ bool use_capture_tims_ms) {
+ EncodedImage frame;
+ frame._buffer = dummy_payload;
+ frame._encodedWidth = width;
+ frame._encodedHeight = height;
+ for (int i = 1; i <= num_frames; ++i) {
+ frame._length = i % sizeof(dummy_payload);
+ if (use_capture_tims_ms) {
+ frame.capture_time_ms_ = i;
+ } else {
+ frame._timeStamp = i;
+ }
+ if (!file_writer_->WriteFrame(frame))
+ return false;
+ }
+ return true;
+ }
+
+ void VerifyIvfHeader(FileWrapper* file,
+ const uint8_t fourcc[4],
+ int width,
+ int height,
+ uint32_t num_frames,
+ bool use_capture_tims_ms) {
+ uint8_t data[kHeaderSize];
+ ASSERT_EQ(kHeaderSize, file->Read(data, kHeaderSize));
+
+ uint8_t dkif[4] = {'D', 'K', 'I', 'F'};
+ EXPECT_EQ(0, memcmp(dkif, data, 4));
+ EXPECT_EQ(0u, ByteReader<uint16_t>::ReadLittleEndian(&data[4]));
+ EXPECT_EQ(32u, ByteReader<uint16_t>::ReadLittleEndian(&data[6]));
+ EXPECT_EQ(0, memcmp(fourcc, &data[8], 4));
+ EXPECT_EQ(width, ByteReader<uint16_t>::ReadLittleEndian(&data[12]));
+ EXPECT_EQ(height, ByteReader<uint16_t>::ReadLittleEndian(&data[14]));
+ EXPECT_EQ(use_capture_tims_ms ? 1000u : 90000u,
+ ByteReader<uint32_t>::ReadLittleEndian(&data[16]));
+ EXPECT_EQ(1u, ByteReader<uint32_t>::ReadLittleEndian(&data[20]));
+ EXPECT_EQ(num_frames, ByteReader<uint32_t>::ReadLittleEndian(&data[24]));
+ EXPECT_EQ(0u, ByteReader<uint32_t>::ReadLittleEndian(&data[28]));
+ }
+
+ void VerifyDummyTestFrames(FileWrapper* file, uint32_t num_frames) {
+ const int kMaxFrameSize = 4;
+ for (uint32_t i = 1; i <= num_frames; ++i) {
+ uint8_t frame_header[kFrameHeaderSize];
+ ASSERT_EQ(kFrameHeaderSize, file->Read(frame_header, kFrameHeaderSize));
+ uint32_t frame_length =
+ ByteReader<uint32_t>::ReadLittleEndian(&frame_header[0]);
+ EXPECT_EQ(i % 4, frame_length);
+ uint64_t timestamp =
+ ByteReader<uint64_t>::ReadLittleEndian(&frame_header[4]);
+ EXPECT_EQ(i, timestamp);
+
+ uint8_t data[kMaxFrameSize] = {};
+ ASSERT_EQ(frame_length,
+ static_cast<uint32_t>(file->Read(data, frame_length)));
+ EXPECT_EQ(0, memcmp(data, dummy_payload, frame_length));
+ }
+ }
+
+ void RunBasicFileStructureTest(VideoCodecType codec_type,
+ const uint8_t fourcc[4],
+ bool use_capture_tims_ms) {
+ file_writer_ = IvfFileWriter::Open(file_name_, codec_type);
+ ASSERT_TRUE(file_writer_.get());
+ const int kWidth = 320;
+ const int kHeight = 240;
+ const int kNumFrames = 257;
+ EXPECT_TRUE(
+ WriteDummyTestFrames(kWidth, kHeight, kNumFrames, use_capture_tims_ms));
+ EXPECT_TRUE(file_writer_->Close());
+
+ std::unique_ptr<FileWrapper> out_file(FileWrapper::Create());
+ ASSERT_EQ(0, out_file->OpenFile(file_name_.c_str(), true));
+ VerifyIvfHeader(out_file.get(), fourcc, kWidth, kHeight, kNumFrames,
+ use_capture_tims_ms);
+ VerifyDummyTestFrames(out_file.get(), kNumFrames);
+
+ EXPECT_EQ(0, out_file->CloseFile());
+ EXPECT_EQ(0, remove(file_name_.c_str()));
+ }
+
+ bool FileExists() {
+ std::unique_ptr<FileWrapper> file_wrapper(FileWrapper::Create());
+ return file_wrapper->OpenFile(file_name_.c_str(), true) == 0;
+ }
+
+ std::string file_name_;
+ std::unique_ptr<IvfFileWriter> file_writer_;
+};
+
+TEST_F(IvfFileWriterTest, RemovesUnusedFile) {
+ file_writer_ = IvfFileWriter::Open(file_name_, kVideoCodecVP8);
+ ASSERT_TRUE(file_writer_.get() != nullptr);
+ EXPECT_TRUE(FileExists());
+ EXPECT_TRUE(file_writer_->Close());
+ EXPECT_FALSE(FileExists());
+ EXPECT_FALSE(file_writer_->Close()); // Can't close twice.
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicVP8FileNtpTimestamp) {
+ const uint8_t fourcc[4] = {'V', 'P', '8', '0'};
+ RunBasicFileStructureTest(kVideoCodecVP8, fourcc, false);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicVP8FileMsTimestamp) {
+ const uint8_t fourcc[4] = {'V', 'P', '8', '0'};
+ RunBasicFileStructureTest(kVideoCodecVP8, fourcc, true);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicVP9FileNtpTimestamp) {
+ const uint8_t fourcc[4] = {'V', 'P', '9', '0'};
+ RunBasicFileStructureTest(kVideoCodecVP9, fourcc, false);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicVP9FileMsTimestamp) {
+ const uint8_t fourcc[4] = {'V', 'P', '9', '0'};
+ RunBasicFileStructureTest(kVideoCodecVP9, fourcc, true);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicH264FileNtpTimestamp) {
+ const uint8_t fourcc[4] = {'H', '2', '6', '4'};
+ RunBasicFileStructureTest(kVideoCodecH264, fourcc, false);
+}
+
+TEST_F(IvfFileWriterTest, WritesBasicH264FileMsTimestamp) {
+ const uint8_t fourcc[4] = {'H', '2', '6', '4'};
+ RunBasicFileStructureTest(kVideoCodecH264, fourcc, true);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc
index c6e56697310..bb60ee036e3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.cc
@@ -11,29 +11,39 @@
namespace webrtc {
+namespace {
static const int kMinFps = 5;
-static const int kMeasureSecondsDownscale = 3;
// Threshold constant used until first downscale (to permit fast rampup).
static const int kMeasureSecondsFastUpscale = 2;
static const int kMeasureSecondsUpscale = 5;
+static const int kMeasureSecondsDownscale = 5;
static const int kFramedropPercentThreshold = 60;
-static const int kHdResolutionThreshold = 700 * 500;
-static const int kHdBitrateThresholdKbps = 500;
-
-const int QualityScaler::kDefaultLowQpDenominator = 3;
-// Note that this is the same for width and height to permit 120x90 in both
-// portrait and landscape mode.
-const int QualityScaler::kDefaultMinDownscaleDimension = 90;
-
-QualityScaler::QualityScaler()
- : low_qp_threshold_(-1),
- framerate_down_(false),
- min_width_(kDefaultMinDownscaleDimension),
- min_height_(kDefaultMinDownscaleDimension) {}
+// Min width/height to downscale to, set to not go below QVGA, but with some
+// margin to permit "almost-QVGA" resolutions, such as QCIF.
+static const int kMinDownscaleDimension = 140;
+// Initial resolutions corresponding to a bitrate. Aa bit above their actual
+// values to permit near-VGA and near-QVGA resolutions to use the same
+// mechanism.
+static const int kVgaBitrateThresholdKbps = 500;
+static const int kVgaNumPixels = 700 * 500; // 640x480
+static const int kQvgaBitrateThresholdKbps = 250;
+static const int kQvgaNumPixels = 400 * 300; // 320x240
+} // namespace
+
+// QP thresholds are chosen to be high enough to be hit in practice when quality
+// is good, but also low enough to not cause a flip-flop behavior (e.g. going up
+// in resolution shouldn't give so bad quality that we should go back down).
+
+const int QualityScaler::kLowVp8QpThreshold = 29;
+const int QualityScaler::kBadVp8QpThreshold = 95;
+
+const int QualityScaler::kLowH264QpThreshold = 22;
+const int QualityScaler::kBadH264QpThreshold = 35;
+
+QualityScaler::QualityScaler() : low_qp_threshold_(-1) {}
void QualityScaler::Init(int low_qp_threshold,
int high_qp_threshold,
- bool use_framerate_reduction,
int initial_bitrate_kbps,
int width,
int height,
@@ -41,7 +51,6 @@ void QualityScaler::Init(int low_qp_threshold,
ClearSamples();
low_qp_threshold_ = low_qp_threshold;
high_qp_threshold_ = high_qp_threshold;
- use_framerate_reduction_ = use_framerate_reduction;
downscale_shift_ = 0;
// Use a faster window for upscaling initially (but be more graceful later).
// This enables faster initial rampups without risking strong up-down
@@ -49,25 +58,24 @@ void QualityScaler::Init(int low_qp_threshold,
measure_seconds_upscale_ = kMeasureSecondsFastUpscale;
const int init_width = width;
const int init_height = height;
- // TODO(glaznev): Investigate using thresholds for other resolutions
- // or threshold tables.
- if (initial_bitrate_kbps > 0 &&
- initial_bitrate_kbps < kHdBitrateThresholdKbps) {
- // Start scaling to roughly VGA.
- while (width * height > kHdResolutionThreshold) {
+ if (initial_bitrate_kbps > 0) {
+ int init_num_pixels = width * height;
+ if (initial_bitrate_kbps < kVgaBitrateThresholdKbps)
+ init_num_pixels = kVgaNumPixels;
+ if (initial_bitrate_kbps < kQvgaBitrateThresholdKbps)
+ init_num_pixels = kQvgaNumPixels;
+ while (width * height > init_num_pixels) {
++downscale_shift_;
width /= 2;
height /= 2;
}
}
+
+ // Zero out width/height so they can be checked against inside
+ // UpdateTargetResolution.
+ res_.width = res_.height = 0;
UpdateTargetResolution(init_width, init_height);
ReportFramerate(fps);
- target_framerate_ = -1;
-}
-
-void QualityScaler::SetMinResolution(int min_width, int min_height) {
- min_width_ = min_width;
- min_height_ = min_height;
}
// Report framerate(fps) to estimate # of samples.
@@ -96,34 +104,14 @@ void QualityScaler::OnEncodeFrame(const VideoFrame& frame) {
int avg_drop = 0;
int avg_qp = 0;
- // When encoder consistently overshoots, framerate reduction and spatial
- // resizing will be triggered to get a smoother video.
if ((framedrop_percent_.GetAverage(num_samples_downscale_, &avg_drop) &&
avg_drop >= kFramedropPercentThreshold) ||
(average_qp_downscale_.GetAverage(num_samples_downscale_, &avg_qp) &&
avg_qp > high_qp_threshold_)) {
- // Reducing frame rate before spatial resolution change.
- // Reduce frame rate only when it is above a certain number.
- // Only one reduction is allowed for now.
- // TODO(jackychen): Allow more than one framerate reduction.
- if (use_framerate_reduction_ && !framerate_down_ && framerate_ >= 20) {
- target_framerate_ = framerate_ / 2;
- framerate_down_ = true;
- // If frame rate has been updated, clear the buffer. We don't want
- // spatial resolution to change right after frame rate change.
- ClearSamples();
- } else {
- AdjustScale(false);
- }
+ AdjustScale(false);
} else if (average_qp_upscale_.GetAverage(num_samples_upscale_, &avg_qp) &&
avg_qp <= low_qp_threshold_) {
- if (use_framerate_reduction_ && framerate_down_) {
- target_framerate_ = -1;
- framerate_down_ = false;
- ClearSamples();
- } else {
- AdjustScale(true);
- }
+ AdjustScale(true);
}
UpdateTargetResolution(frame.width(), frame.height());
}
@@ -132,10 +120,6 @@ QualityScaler::Resolution QualityScaler::GetScaledResolution() const {
return res_;
}
-int QualityScaler::GetTargetFramerate() const {
- return target_framerate_;
-}
-
const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) {
Resolution res = GetScaledResolution();
if (res.width == frame.width())
@@ -146,24 +130,39 @@ const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) {
if (scaler_.Scale(frame, &scaled_frame_) != 0)
return frame;
+ // TODO(perkj): Refactor the scaler to not own |scaled_frame|. VideoFrame are
+ // just thin wrappers so instead the scaler should return a
+ // rtc::scoped_refptr<VideoFrameBuffer> and a new VideoFrame be created with
+ // the meta data from |frame|. That way we would not have to set all these
+ // meta data.
scaled_frame_.set_ntp_time_ms(frame.ntp_time_ms());
scaled_frame_.set_timestamp(frame.timestamp());
scaled_frame_.set_render_time_ms(frame.render_time_ms());
+ scaled_frame_.set_rotation(frame.rotation());
return scaled_frame_;
}
void QualityScaler::UpdateTargetResolution(int frame_width, int frame_height) {
assert(downscale_shift_ >= 0);
- res_.width = frame_width;
- res_.height = frame_height;
+ int shifts_performed = 0;
for (int shift = downscale_shift_;
- shift > 0 && (res_.width / 2 >= min_width_) &&
- (res_.height / 2 >= min_height_);
- --shift) {
- res_.width /= 2;
- res_.height /= 2;
+ shift > 0 && (frame_width / 2 >= kMinDownscaleDimension) &&
+ (frame_height / 2 >= kMinDownscaleDimension);
+ --shift, ++shifts_performed) {
+ frame_width /= 2;
+ frame_height /= 2;
+ }
+ // Clamp to number of shifts actually performed to not be stuck trying to
+ // scale way beyond QVGA.
+ downscale_shift_ = shifts_performed;
+ if (res_.width == frame_width && res_.height == frame_height) {
+ // No reset done/needed, using same resolution.
+ return;
}
+ res_.width = frame_width;
+ res_.height = frame_height;
+ ClearSamples();
}
void QualityScaler::ClearSamples() {
@@ -184,11 +183,10 @@ void QualityScaler::AdjustScale(bool up) {
if (downscale_shift_ < 0)
downscale_shift_ = 0;
if (!up) {
- // Hit first downscale, start using a slower threshold for going up.
+ // First downscale hit, start using a slower threshold for going up.
measure_seconds_upscale_ = kMeasureSecondsUpscale;
UpdateSampleCounts();
}
- ClearSamples();
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h
index 34dda0e9f37..fe70393c21c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler.h
@@ -17,8 +17,6 @@
namespace webrtc {
class QualityScaler {
public:
- static const int kDefaultLowQpDenominator;
- static const int kDefaultMinDownscaleDimension;
struct Resolution {
int width;
int height;
@@ -27,22 +25,27 @@ class QualityScaler {
QualityScaler();
void Init(int low_qp_threshold,
int high_qp_threshold,
- bool use_framerate_reduction,
int initial_bitrate_kbps,
int width,
int height,
int fps);
- void SetMinResolution(int min_width, int min_height);
void ReportFramerate(int framerate);
void ReportQP(int qp);
void ReportDroppedFrame();
- void Reset(int framerate, int bitrate, int width, int height);
void OnEncodeFrame(const VideoFrame& frame);
Resolution GetScaledResolution() const;
const VideoFrame& GetScaledFrame(const VideoFrame& frame);
- int GetTargetFramerate() const;
int downscale_shift() const { return downscale_shift_; }
+ // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
+ // bitstream range of [0, 127] and not the user-level range of [0,63].
+ static const int kLowVp8QpThreshold;
+ static const int kBadVp8QpThreshold;
+
+ // H264 QP is in the range [0, 51].
+ static const int kLowH264QpThreshold;
+ static const int kBadH264QpThreshold;
+
private:
void AdjustScale(bool up);
void UpdateTargetResolution(int frame_width, int frame_height);
@@ -59,17 +62,12 @@ class QualityScaler {
MovingAverage<int> average_qp_downscale_;
int framerate_;
- int target_framerate_;
int low_qp_threshold_;
int high_qp_threshold_;
MovingAverage<int> framedrop_percent_;
Resolution res_;
int downscale_shift_;
- int framerate_down_;
- bool use_framerate_reduction_;
- int min_width_;
- int min_height_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
index 72e9db405ed..fdec081c900 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/quality_scaler_unittest.cc
@@ -16,33 +16,24 @@ namespace webrtc {
namespace {
static const int kNumSeconds = 10;
static const int kWidth = 1920;
-static const int kWidthVga = 640;
static const int kHalfWidth = kWidth / 2;
static const int kHeight = 1080;
-static const int kHeightVga = 480;
static const int kFramerate = 30;
static const int kLowQp = 15;
static const int kNormalQp = 30;
+static const int kLowQpThreshold = 18;
static const int kHighQp = 40;
-static const int kMaxQp = 56;
-static const int kDisabledBadQpThreshold = kMaxQp + 1;
+static const int kDisabledBadQpThreshold = 64;
static const int kLowInitialBitrateKbps = 300;
// These values need to be in sync with corresponding constants
// in quality_scaler.cc
-static const int kMeasureSecondsDownscale = 3;
static const int kMeasureSecondsFastUpscale = 2;
static const int kMeasureSecondsUpscale = 5;
+static const int kMeasureSecondsDownscale = 5;
+static const int kMinDownscaleDimension = 140;
} // namespace
class QualityScalerTest : public ::testing::Test {
- public:
- // Temporal and spatial resolution.
- struct Resolution {
- int framerate;
- int width;
- int height;
- };
-
protected:
enum ScaleDirection {
kKeepScaleAtHighQp,
@@ -50,13 +41,11 @@ class QualityScalerTest : public ::testing::Test {
kScaleDownAboveHighQp,
kScaleUp
};
- enum BadQualityMetric { kDropFrame, kReportLowQP };
QualityScalerTest() {
input_frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, kHalfWidth,
kHalfWidth);
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false,
- 0, 0, 0, kFramerate);
+ qs_.Init(kLowQpThreshold, kHighQp, 0, 0, 0, kFramerate);
qs_.OnEncodeFrame(input_frame_);
}
@@ -103,16 +92,6 @@ class QualityScalerTest : public ::testing::Test {
void DoesNotDownscaleFrameDimensions(int width, int height);
- Resolution TriggerResolutionChange(BadQualityMetric dropframe_lowqp,
- int num_second,
- int initial_framerate);
-
- void VerifyQualityAdaptation(int initial_framerate,
- int seconds_downscale,
- int seconds_upscale,
- bool expect_spatial_resize,
- bool expect_framerate_reduction);
-
void DownscaleEndsAt(int input_width,
int input_height,
int end_width,
@@ -200,7 +179,7 @@ void QualityScalerTest::ContinuouslyDownscalesByHalfDimensionsAndBackUp() {
int min_dimension = initial_min_dimension;
int current_shift = 0;
// Drop all frames to force-trigger downscaling.
- while (min_dimension >= 2 * QualityScaler::kDefaultMinDownscaleDimension) {
+ while (min_dimension >= 2 * kMinDownscaleDimension) {
EXPECT_TRUE(TriggerScale(kScaleDown)) << "No downscale within "
<< kNumSeconds << " seconds.";
qs_.OnEncodeFrame(input_frame_);
@@ -270,133 +249,50 @@ TEST_F(QualityScalerTest, DoesNotDownscaleFrom1Px) {
DoesNotDownscaleFrameDimensions(1, 1);
}
-QualityScalerTest::Resolution QualityScalerTest::TriggerResolutionChange(
- BadQualityMetric dropframe_lowqp,
- int num_second,
- int initial_framerate) {
- QualityScalerTest::Resolution res;
- res.framerate = initial_framerate;
- qs_.OnEncodeFrame(input_frame_);
- res.width = qs_.GetScaledResolution().width;
- res.height = qs_.GetScaledResolution().height;
- for (int i = 0; i < kFramerate * num_second; ++i) {
- switch (dropframe_lowqp) {
- case kReportLowQP:
- qs_.ReportQP(kLowQp);
- break;
- case kDropFrame:
- qs_.ReportDroppedFrame();
- break;
- }
- qs_.OnEncodeFrame(input_frame_);
- // Simulate the case when SetRates is called right after reducing
- // framerate.
- qs_.ReportFramerate(initial_framerate);
- res.framerate = qs_.GetTargetFramerate();
- if (res.framerate != -1)
- qs_.ReportFramerate(res.framerate);
- res.width = qs_.GetScaledResolution().width;
- res.height = qs_.GetScaledResolution().height;
- }
- return res;
-}
-
-void QualityScalerTest::VerifyQualityAdaptation(
- int initial_framerate,
- int seconds_downscale,
- int seconds_upscale,
- bool expect_spatial_resize,
- bool expect_framerate_reduction) {
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator,
- kDisabledBadQpThreshold, true, 0, 0, 0, initial_framerate);
- qs_.OnEncodeFrame(input_frame_);
- int init_width = qs_.GetScaledResolution().width;
- int init_height = qs_.GetScaledResolution().height;
-
- // Test reducing framerate by dropping frame continuously.
- QualityScalerTest::Resolution res =
- TriggerResolutionChange(kDropFrame, seconds_downscale, initial_framerate);
-
- if (expect_framerate_reduction) {
- EXPECT_LT(res.framerate, initial_framerate);
- } else {
- // No framerate reduction, video decimator should be disabled.
- EXPECT_EQ(-1, res.framerate);
- }
-
- if (expect_spatial_resize) {
- EXPECT_LT(res.width, init_width);
- EXPECT_LT(res.height, init_height);
- } else {
- EXPECT_EQ(init_width, res.width);
- EXPECT_EQ(init_height, res.height);
- }
-
- // The "seconds * 1.5" is to ensure spatial resolution to recover.
- // For example, in 6 seconds test, framerate reduction happens in the first
- // 3 seconds from 30fps to 15fps and causes the buffer size to be half of the
- // original one. Then it will take only 45 samples to downscale (twice in 90
- // samples). So to recover the resolution changes, we need more than 10
- // seconds (i.e, seconds_upscale * 1.5). This is because the framerate
- // increases before spatial size recovers, so it will take 150 samples to
- // recover spatial size (300 for twice).
- res = TriggerResolutionChange(kReportLowQP, seconds_upscale * 1.5,
- initial_framerate);
- EXPECT_EQ(-1, res.framerate);
- EXPECT_EQ(init_width, res.width);
- EXPECT_EQ(init_height, res.height);
-}
-
-// In 3 seconds test, only framerate adjusting should happen and 5 second
-// upscaling duration, only a framerate adjusting should happen.
-TEST_F(QualityScalerTest, ChangeFramerateOnly) {
- VerifyQualityAdaptation(kFramerate, kMeasureSecondsDownscale,
- kMeasureSecondsUpscale, false, true);
-}
-
-// In 6 seconds test, framerate adjusting and scaling are both
-// triggered, it shows that scaling would happen after framerate
-// adjusting.
-TEST_F(QualityScalerTest, ChangeFramerateAndSpatialSize) {
- VerifyQualityAdaptation(kFramerate, kMeasureSecondsDownscale * 2,
- kMeasureSecondsUpscale * 2, true, true);
-}
-
-// When starting from a low framerate, only spatial size will be changed.
-TEST_F(QualityScalerTest, ChangeSpatialSizeOnly) {
- qs_.ReportFramerate(kFramerate >> 1);
- VerifyQualityAdaptation(kFramerate >> 1, kMeasureSecondsDownscale * 2,
- kMeasureSecondsUpscale * 2, true, false);
-}
-
TEST_F(QualityScalerTest, DoesNotDownscaleBelow2xDefaultMinDimensionsWidth) {
DoesNotDownscaleFrameDimensions(
- 2 * QualityScaler::kDefaultMinDownscaleDimension - 1, 1000);
+ 2 * kMinDownscaleDimension - 1, 1000);
}
TEST_F(QualityScalerTest, DoesNotDownscaleBelow2xDefaultMinDimensionsHeight) {
DoesNotDownscaleFrameDimensions(
- 1000, 2 * QualityScaler::kDefaultMinDownscaleDimension - 1);
+ 1000, 2 * kMinDownscaleDimension - 1);
}
TEST_F(QualityScalerTest, DownscaleToVgaOnLowInitialBitrate) {
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator,
- kDisabledBadQpThreshold, true,
- kLowInitialBitrateKbps, kWidth, kHeight, kFramerate);
+ static const int kWidth720p = 1280;
+ static const int kHeight720p = 720;
+ static const int kInitialBitrateKbps = 300;
+ input_frame_.CreateEmptyFrame(kWidth720p, kHeight720p, kWidth720p,
+ kWidth720p / 2, kWidth720p / 2);
+ qs_.Init(kLowQpThreshold, kDisabledBadQpThreshold, kInitialBitrateKbps,
+ kWidth720p, kHeight720p, kFramerate);
qs_.OnEncodeFrame(input_frame_);
int init_width = qs_.GetScaledResolution().width;
int init_height = qs_.GetScaledResolution().height;
- EXPECT_LE(init_width, kWidthVga);
- EXPECT_LE(init_height, kHeightVga);
+ EXPECT_EQ(640, init_width);
+ EXPECT_EQ(360, init_height);
+}
+
+TEST_F(QualityScalerTest, DownscaleToQvgaOnLowerInitialBitrate) {
+ static const int kWidth720p = 1280;
+ static const int kHeight720p = 720;
+ static const int kInitialBitrateKbps = 200;
+ input_frame_.CreateEmptyFrame(kWidth720p, kHeight720p, kWidth720p,
+ kWidth720p / 2, kWidth720p / 2);
+ qs_.Init(kLowQpThreshold, kDisabledBadQpThreshold, kInitialBitrateKbps,
+ kWidth720p, kHeight720p, kFramerate);
+ qs_.OnEncodeFrame(input_frame_);
+ int init_width = qs_.GetScaledResolution().width;
+ int init_height = qs_.GetScaledResolution().height;
+ EXPECT_EQ(320, init_width);
+ EXPECT_EQ(180, init_height);
}
TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
- QualityScalerTest::Resolution initial_res;
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false, 0,
- kWidth, kHeight, kFramerate);
+ qs_.Init(kLowQpThreshold, kHighQp, 0, kWidth, kHeight, kFramerate);
qs_.OnEncodeFrame(input_frame_);
- initial_res.width = qs_.GetScaledResolution().width;
- initial_res.height = qs_.GetScaledResolution().height;
+ QualityScaler::Resolution initial_res = qs_.GetScaledResolution();
// Should not downscale if less than kMeasureSecondsDownscale seconds passed.
for (int i = 0; i < kFramerate * kMeasureSecondsDownscale - 1; ++i) {
@@ -431,12 +327,10 @@ TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
}
TEST_F(QualityScalerTest, UpscaleQuicklyInitiallyAfterMeasuredSeconds) {
- QualityScalerTest::Resolution initial_res;
- qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, kHighQp, false,
- kLowInitialBitrateKbps, kWidth, kHeight, kFramerate);
+ qs_.Init(kLowQpThreshold, kHighQp, kLowInitialBitrateKbps, kWidth, kHeight,
+ kFramerate);
qs_.OnEncodeFrame(input_frame_);
- initial_res.width = qs_.GetScaledResolution().width;
- initial_res.height = qs_.GetScaledResolution().height;
+ QualityScaler::Resolution initial_res = qs_.GetScaledResolution();
// Should not upscale if less than kMeasureSecondsFastUpscale seconds passed.
for (int i = 0; i < kFramerate * kMeasureSecondsFastUpscale - 1; ++i) {
@@ -480,36 +374,20 @@ void QualityScalerTest::DownscaleEndsAt(int input_width,
}
}
-TEST_F(QualityScalerTest, DefaultDownscalesTo160x90) {
- DownscaleEndsAt(320, 180, 160, 90);
-}
-
-TEST_F(QualityScalerTest, DefaultDownscalesTo90x160) {
- DownscaleEndsAt(180, 320, 90, 160);
-}
-
-TEST_F(QualityScalerTest, DefaultDownscalesFrom1280x720To160x90) {
- DownscaleEndsAt(1280, 720, 160, 90);
-}
-
-TEST_F(QualityScalerTest, DefaultDoesntDownscaleBelow160x90) {
- DownscaleEndsAt(320 - 1, 180 - 1, 320 - 1, 180 - 1);
+TEST_F(QualityScalerTest, DownscalesTo320x180) {
+ DownscaleEndsAt(640, 360, 320, 180);
}
-TEST_F(QualityScalerTest, DefaultDoesntDownscaleBelow90x160) {
- DownscaleEndsAt(180 - 1, 320 - 1, 180 - 1, 320 - 1);
+TEST_F(QualityScalerTest, DownscalesTo180x320) {
+ DownscaleEndsAt(360, 640, 180, 320);
}
-TEST_F(QualityScalerTest, RespectsMinResolutionWidth) {
- // Should end at 200x100, as width can't go lower.
- qs_.SetMinResolution(200, 10);
- DownscaleEndsAt(1600, 800, 200, 100);
+TEST_F(QualityScalerTest, DownscalesFrom1280x720To320x180) {
+ DownscaleEndsAt(1280, 720, 320, 180);
}
-TEST_F(QualityScalerTest, RespectsMinResolutionHeight) {
- // Should end at 100x200, as height can't go lower.
- qs_.SetMinResolution(10, 200);
- DownscaleEndsAt(800, 1600, 100, 200);
+TEST_F(QualityScalerTest, DoesntDownscaleInitialQvga) {
+ DownscaleEndsAt(320, 180, 320, 180);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp b/chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp
index 42cbb3d4e03..8edfd619863 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/video_coding_utility.gyp
@@ -20,6 +20,8 @@
'sources': [
'frame_dropper.cc',
'frame_dropper.h',
+ 'ivf_file_writer.cc',
+ 'ivf_file_writer.h',
'moving_average.h',
'qp_parser.cc',
'qp_parser.h',
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc b/chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc
index 631385d0f25..d88fb6cc21e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/vp8_header_parser.cc
@@ -74,8 +74,9 @@ static int VP8GetBit(VP8BitReader* const br, int prob) {
uint8_t range = br->range_;
if (br->bits_ < 0) {
VP8LoadNewBytes(br);
+ if (br->eof_)
+ return 0;
}
-
const int pos = br->bits_;
const uint8_t split = (range * prob) >> 8;
const uint8_t value = static_cast<uint8_t>(br->value_ >> pos);
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_coding.gypi b/chromium/third_party/webrtc/modules/video_coding/video_coding.gypi
index 7cfefed3ee4..27454a47115 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_coding.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/video_coding.gypi
@@ -22,19 +22,19 @@
],
'sources': [
# interfaces
- 'include/bitrate_adjuster.h',
'include/video_coding.h',
'include/video_coding_defines.h',
# headers
'codec_database.h',
'codec_timer.h',
- 'content_metrics_processing.h',
'decoding_state.h',
'encoded_frame.h',
'fec_tables_xor.h',
'frame_buffer.h',
+ 'frame_buffer2.h',
'frame_object.h',
+ 'rtp_frame_reference_finder.h',
'generic_decoder.h',
'generic_encoder.h',
'histogram.h',
@@ -50,8 +50,6 @@
'packet.h',
'packet_buffer.h',
'percentile_filter.h',
- 'qm_select_data.h',
- 'qm_select.h',
'receiver.h',
'rtt_filter.h',
'session_info.h',
@@ -60,14 +58,14 @@
'video_coding_impl.h',
# sources
- 'bitrate_adjuster.cc',
'codec_database.cc',
'codec_timer.cc',
- 'content_metrics_processing.cc',
'decoding_state.cc',
'encoded_frame.cc',
'frame_buffer.cc',
+ 'frame_buffer2.cc',
'frame_object.cc',
+ 'rtp_frame_reference_finder.cc',
'generic_decoder.cc',
'generic_encoder.cc',
'inter_frame_delay.cc',
@@ -80,7 +78,6 @@
'packet.cc',
'packet_buffer.cc',
'percentile_filter.cc',
- 'qm_select.cc',
'receiver.cc',
'rtt_filter.cc',
'session_info.cc',
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc b/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc
index e5f0ee12222..72bcc9a0594 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.cc
@@ -14,6 +14,7 @@
#include "webrtc/common_types.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/encoded_frame.h"
#include "webrtc/modules/video_coding/jitter_buffer.h"
@@ -53,7 +54,6 @@ class EncodedImageCallbackWrapper : public EncodedImageCallback {
callback_ = callback;
}
- // TODO(andresp): Change to void as return value is ignored.
virtual int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
@@ -73,23 +73,19 @@ class VideoCodingModuleImpl : public VideoCodingModule {
public:
VideoCodingModuleImpl(Clock* clock,
EventFactory* event_factory,
- bool owns_event_factory,
VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback,
NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender)
+ KeyFrameRequestSender* keyframe_request_sender,
+ EncodedImageCallback* pre_decode_image_callback)
: VideoCodingModule(),
- sender_(clock,
- &post_encode_callback_,
- encoder_rate_observer,
- qm_settings_callback),
+ sender_(clock, &post_encode_callback_, encoder_rate_observer, nullptr),
receiver_(clock,
event_factory,
+ pre_decode_image_callback,
nack_sender,
- keyframe_request_sender),
- own_event_factory_(owns_event_factory ? event_factory : NULL) {}
+ keyframe_request_sender) {}
- virtual ~VideoCodingModuleImpl() { own_event_factory_.reset(); }
+ virtual ~VideoCodingModuleImpl() {}
int64_t TimeUntilNextProcess() override {
int64_t sender_time = sender_.TimeUntilNextProcess();
@@ -132,16 +128,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
return sender_.SetChannelParameters(target_bitrate, lossRate, rtt);
}
- int32_t RegisterTransportCallback(
- VCMPacketizationCallback* transport) override {
- return sender_.RegisterTransportCallback(transport);
- }
-
- int32_t RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) override {
- return sender_.RegisterSendStatisticsCallback(sendStats);
- }
-
int32_t RegisterProtectionCallback(
VCMProtectionCallback* protection) override {
return sender_.RegisterProtectionCallback(protection);
@@ -156,12 +142,11 @@ class VideoCodingModuleImpl : public VideoCodingModule {
}
int32_t AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo) override {
- return sender_.AddVideoFrame(videoFrame, contentMetrics, codecSpecificInfo);
+ return sender_.AddVideoFrame(videoFrame, codecSpecificInfo);
}
- int32_t IntraFrameRequest(int stream_index) override {
+ int32_t IntraFrameRequest(size_t stream_index) override {
return sender_.IntraFrameRequest(stream_index);
}
@@ -212,11 +197,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
return receiver_.RegisterPacketRequestCallback(callback);
}
- int RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) override {
- return receiver_.RegisterRenderBufferSizeCallback(callback);
- }
-
int32_t Decode(uint16_t maxWaitTimeMs) override {
return receiver_.Decode(maxWaitTimeMs);
}
@@ -273,10 +253,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
return receiver_.SetReceiveChannelParameters(rtt);
}
- void RegisterPreDecodeImageCallback(EncodedImageCallback* observer) override {
- receiver_.RegisterPreDecodeImageCallback(observer);
- }
-
void RegisterPostEncodeImageCallback(
EncodedImageCallback* observer) override {
post_encode_callback_.Register(observer);
@@ -288,7 +264,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
EncodedImageCallbackWrapper post_encode_callback_;
vcm::VideoSender sender_;
vcm::VideoReceiver receiver_;
- std::unique_ptr<EventFactory> own_event_factory_;
};
} // namespace
@@ -305,7 +280,8 @@ VideoCodingModule* VideoCodingModule::Create(
return VideoCodingModule::Create(clock, encoder_rate_observer,
qm_settings_callback,
nullptr, // NackSender
- nullptr); // KeyframeRequestSender
+ nullptr, // KeyframeRequestSender
+ nullptr); // Pre-decode image callback
}
// Create method for the new jitter buffer.
@@ -314,11 +290,11 @@ VideoCodingModule* VideoCodingModule::Create(
VideoEncoderRateObserver* encoder_rate_observer,
VCMQMSettingsCallback* qm_settings_callback,
NackSender* nack_sender,
- KeyFrameRequestSender* keyframe_request_sender) {
- return new VideoCodingModuleImpl(clock, new EventFactoryImpl, true,
- encoder_rate_observer, qm_settings_callback,
- nack_sender,
- keyframe_request_sender);
+ KeyFrameRequestSender* keyframe_request_sender,
+ EncodedImageCallback* pre_decode_image_callback) {
+ return new VideoCodingModuleImpl(clock, nullptr, encoder_rate_observer,
+ nack_sender, keyframe_request_sender,
+ pre_decode_image_callback);
}
// Create method for current interface, will be removed when the
@@ -338,9 +314,8 @@ VideoCodingModule* VideoCodingModule::Create(
KeyFrameRequestSender* keyframe_request_sender) {
assert(clock);
assert(event_factory);
- return new VideoCodingModuleImpl(clock, event_factory, false, nullptr,
- nullptr, nack_sender,
- keyframe_request_sender);
+ return new VideoCodingModuleImpl(clock, event_factory, nullptr, nack_sender,
+ keyframe_request_sender, nullptr);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h b/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h
index f5f9b00206c..c9992b7f9ce 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/video_coding_impl.h
@@ -14,10 +14,13 @@
#include "webrtc/modules/video_coding/include/video_coding.h"
#include <memory>
+#include <string>
#include <vector>
+#include "webrtc/base/onetimeevent.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/modules/video_coding/codec_database.h"
#include "webrtc/modules/video_coding/frame_buffer.h"
#include "webrtc/modules/video_coding/generic_decoder.h"
@@ -28,7 +31,6 @@
#include "webrtc/modules/video_coding/timing.h"
#include "webrtc/modules/video_coding/utility/qp_parser.h"
#include "webrtc/system_wrappers/include/clock.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
@@ -50,14 +52,14 @@ class VCMProcessTimer {
int64_t _latestMs;
};
-class VideoSender {
+class VideoSender : public Module {
public:
typedef VideoCodingModule::SenderNackMode SenderNackMode;
VideoSender(Clock* clock,
EncodedImageCallback* post_encode_callback,
VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback);
+ VCMSendStatisticsCallback* send_stats_callback);
~VideoSender();
@@ -78,23 +80,20 @@ class VideoSender {
uint8_t lossRate,
int64_t rtt);
- int32_t RegisterTransportCallback(VCMPacketizationCallback* transport);
- int32_t RegisterSendStatisticsCallback(VCMSendStatisticsCallback* sendStats);
int32_t RegisterProtectionCallback(VCMProtectionCallback* protection);
void SetVideoProtection(VCMVideoProtection videoProtection);
int32_t AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* _contentMetrics,
const CodecSpecificInfo* codecSpecificInfo);
- int32_t IntraFrameRequest(int stream_index);
+ int32_t IntraFrameRequest(size_t stream_index);
int32_t EnableFrameDropper(bool enable);
void SuspendBelowMinBitrate();
bool VideoSuspended() const;
- int64_t TimeUntilNextProcess();
- void Process();
+ int64_t TimeUntilNextProcess() override;
+ void Process() override;
private:
void SetEncoderParameters(EncoderParameters params)
@@ -102,12 +101,11 @@ class VideoSender {
Clock* const clock_;
- std::unique_ptr<CriticalSectionWrapper> process_crit_sect_;
rtc::CriticalSection encoder_crit_;
VCMGenericEncoder* _encoder;
- VCMEncodedFrameCallback _encodedFrameCallback GUARDED_BY(encoder_crit_);
media_optimization::MediaOptimization _mediaOpt;
- VCMSendStatisticsCallback* _sendStatsCallback GUARDED_BY(process_crit_sect_);
+ VCMEncodedFrameCallback _encodedFrameCallback GUARDED_BY(encoder_crit_);
+ VCMSendStatisticsCallback* const send_stats_callback_;
VCMCodecDataBase _codecDataBase GUARDED_BY(encoder_crit_);
bool frame_dropper_enabled_ GUARDED_BY(encoder_crit_);
VCMProcessTimer _sendStatsTimer;
@@ -116,21 +114,22 @@ class VideoSender {
VideoCodec current_codec_;
rtc::ThreadChecker main_thread_;
- VCMQMSettingsCallback* const qm_settings_callback_;
VCMProtectionCallback* protection_callback_;
rtc::CriticalSection params_crit_;
EncoderParameters encoder_params_ GUARDED_BY(params_crit_);
bool encoder_has_internal_source_ GUARDED_BY(params_crit_);
+ std::string encoder_name_ GUARDED_BY(params_crit_);
std::vector<FrameType> next_frame_types_ GUARDED_BY(params_crit_);
};
-class VideoReceiver {
+class VideoReceiver : public Module {
public:
typedef VideoCodingModule::ReceiverRobustness ReceiverRobustness;
VideoReceiver(Clock* clock,
EventFactory* event_factory,
+ EncodedImageCallback* pre_decode_image_callback,
NackSender* nack_sender = nullptr,
KeyFrameRequestSender* keyframe_request_sender = nullptr);
~VideoReceiver();
@@ -148,7 +147,6 @@ class VideoReceiver {
VCMDecoderTimingCallback* decoderTiming);
int32_t RegisterFrameTypeCallback(VCMFrameTypeCallback* frameTypeCallback);
int32_t RegisterPacketRequestCallback(VCMPacketRequestCallback* callback);
- int RegisterRenderBufferSizeCallback(VCMRenderBufferSizeCallback* callback);
int32_t Decode(uint16_t maxWaitTimeMs);
@@ -175,50 +173,43 @@ class VideoReceiver {
int32_t SetReceiveChannelParameters(int64_t rtt);
int32_t SetVideoProtection(VCMVideoProtection videoProtection, bool enable);
- int64_t TimeUntilNextProcess();
- void Process();
+ int64_t TimeUntilNextProcess() override;
+ void Process() override;
- void RegisterPreDecodeImageCallback(EncodedImageCallback* observer);
void TriggerDecoderShutdown();
protected:
int32_t Decode(const webrtc::VCMEncodedFrame& frame)
- EXCLUSIVE_LOCKS_REQUIRED(_receiveCritSect);
+ EXCLUSIVE_LOCKS_REQUIRED(receive_crit_);
int32_t RequestKeyFrame();
int32_t RequestSliceLossIndication(const uint64_t pictureID) const;
private:
Clock* const clock_;
- std::unique_ptr<CriticalSectionWrapper> process_crit_sect_;
- CriticalSectionWrapper* _receiveCritSect;
+ rtc::CriticalSection process_crit_;
+ rtc::CriticalSection receive_crit_;
VCMTiming _timing;
VCMReceiver _receiver;
VCMDecodedFrameCallback _decodedFrameCallback;
- VCMFrameTypeCallback* _frameTypeCallback GUARDED_BY(process_crit_sect_);
- VCMReceiveStatisticsCallback* _receiveStatsCallback
- GUARDED_BY(process_crit_sect_);
- VCMDecoderTimingCallback* _decoderTimingCallback
- GUARDED_BY(process_crit_sect_);
- VCMPacketRequestCallback* _packetRequestCallback
- GUARDED_BY(process_crit_sect_);
- VCMRenderBufferSizeCallback* render_buffer_callback_
- GUARDED_BY(process_crit_sect_);
+ VCMFrameTypeCallback* _frameTypeCallback GUARDED_BY(process_crit_);
+ VCMReceiveStatisticsCallback* _receiveStatsCallback GUARDED_BY(process_crit_);
+ VCMDecoderTimingCallback* _decoderTimingCallback GUARDED_BY(process_crit_);
+ VCMPacketRequestCallback* _packetRequestCallback GUARDED_BY(process_crit_);
VCMGenericDecoder* _decoder;
-#ifdef DEBUG_DECODER_BIT_STREAM
- FILE* _bitStreamBeforeDecoder;
-#endif
+
VCMFrameBuffer _frameFromFile;
- bool _scheduleKeyRequest GUARDED_BY(process_crit_sect_);
- bool drop_frames_until_keyframe_ GUARDED_BY(process_crit_sect_);
- size_t max_nack_list_size_ GUARDED_BY(process_crit_sect_);
+ bool _scheduleKeyRequest GUARDED_BY(process_crit_);
+ bool drop_frames_until_keyframe_ GUARDED_BY(process_crit_);
+ size_t max_nack_list_size_ GUARDED_BY(process_crit_);
- VCMCodecDataBase _codecDataBase GUARDED_BY(_receiveCritSect);
- EncodedImageCallback* pre_decode_image_callback_ GUARDED_BY(_receiveCritSect);
+ VCMCodecDataBase _codecDataBase GUARDED_BY(receive_crit_);
+ EncodedImageCallback* pre_decode_image_callback_;
VCMProcessTimer _receiveStatsTimer;
VCMProcessTimer _retransmissionTimer;
VCMProcessTimer _keyRequestTimer;
QpParser qp_parser_;
+ ThreadUnsafeOneTimeEvent first_frame_received_;
};
} // namespace vcm
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_receiver.cc b/chromium/third_party/webrtc/modules/video_coding/video_receiver.cc
index 5aadcf91e0e..a832e2180b9 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_receiver.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_receiver.cc
@@ -20,18 +20,15 @@
#include "webrtc/modules/video_coding/video_coding_impl.h"
#include "webrtc/system_wrappers/include/clock.h"
-// #define DEBUG_DECODER_BIT_STREAM
-
namespace webrtc {
namespace vcm {
VideoReceiver::VideoReceiver(Clock* clock,
EventFactory* event_factory,
+ EncodedImageCallback* pre_decode_image_callback,
NackSender* nack_sender,
KeyFrameRequestSender* keyframe_request_sender)
: clock_(clock),
- process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- _receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_timing(clock_),
_receiver(&_timing,
clock_,
@@ -39,50 +36,36 @@ VideoReceiver::VideoReceiver(Clock* clock,
nack_sender,
keyframe_request_sender),
_decodedFrameCallback(&_timing, clock_),
- _frameTypeCallback(NULL),
- _receiveStatsCallback(NULL),
- _decoderTimingCallback(NULL),
- _packetRequestCallback(NULL),
- render_buffer_callback_(NULL),
- _decoder(NULL),
-#ifdef DEBUG_DECODER_BIT_STREAM
- _bitStreamBeforeDecoder(NULL),
-#endif
+ _frameTypeCallback(nullptr),
+ _receiveStatsCallback(nullptr),
+ _decoderTimingCallback(nullptr),
+ _packetRequestCallback(nullptr),
+ _decoder(nullptr),
_frameFromFile(),
_scheduleKeyRequest(false),
drop_frames_until_keyframe_(false),
max_nack_list_size_(0),
_codecDataBase(nullptr, nullptr),
- pre_decode_image_callback_(NULL),
+ pre_decode_image_callback_(pre_decode_image_callback),
_receiveStatsTimer(1000, clock_),
_retransmissionTimer(10, clock_),
- _keyRequestTimer(500, clock_) {
- assert(clock_);
-#ifdef DEBUG_DECODER_BIT_STREAM
- _bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
-#endif
-}
+ _keyRequestTimer(500, clock_) {}
-VideoReceiver::~VideoReceiver() {
- delete _receiveCritSect;
-#ifdef DEBUG_DECODER_BIT_STREAM
- fclose(_bitStreamBeforeDecoder);
-#endif
-}
+VideoReceiver::~VideoReceiver() {}
void VideoReceiver::Process() {
// Receive-side statistics
if (_receiveStatsTimer.TimeUntilProcess() == 0) {
_receiveStatsTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_receiveStatsCallback != NULL) {
+ rtc::CritScope cs(&process_crit_);
+ if (_receiveStatsCallback != nullptr) {
uint32_t bitRate;
uint32_t frameRate;
_receiver.ReceiveStatistics(&bitRate, &frameRate);
_receiveStatsCallback->OnReceiveRatesUpdated(bitRate, frameRate);
}
- if (_decoderTimingCallback != NULL) {
+ if (_decoderTimingCallback != nullptr) {
int decode_ms;
int max_decode_ms;
int current_delay_ms;
@@ -97,12 +80,6 @@ void VideoReceiver::Process() {
decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
}
-
- // Size of render buffer.
- if (render_buffer_callback_) {
- int buffer_size_ms = _receiver.RenderBufferSizeMs();
- render_buffer_callback_->RenderBufferSizeMs(buffer_size_ms);
- }
}
// Key frame requests
@@ -110,8 +87,8 @@ void VideoReceiver::Process() {
_keyRequestTimer.Processed();
bool request_key_frame = false;
{
- CriticalSectionScoped cs(process_crit_sect_.get());
- request_key_frame = _scheduleKeyRequest && _frameTypeCallback != NULL;
+ rtc::CritScope cs(&process_crit_);
+ request_key_frame = _scheduleKeyRequest && _frameTypeCallback != nullptr;
}
if (request_key_frame)
RequestKeyFrame();
@@ -129,9 +106,9 @@ void VideoReceiver::Process() {
bool callback_registered = false;
uint16_t length;
{
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
length = max_nack_list_size_;
- callback_registered = _packetRequestCallback != NULL;
+ callback_registered = _packetRequestCallback != nullptr;
}
if (callback_registered && length > 0) {
// Collect sequence numbers from the default receiver.
@@ -142,8 +119,8 @@ void VideoReceiver::Process() {
ret = RequestKeyFrame();
}
if (ret == VCM_OK && !nackList.empty()) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_packetRequestCallback != NULL) {
+ rtc::CritScope cs(&process_crit_);
+ if (_packetRequestCallback != nullptr) {
_packetRequestCallback->ResendPackets(&nackList[0], nackList.size());
}
}
@@ -168,7 +145,7 @@ int64_t VideoReceiver::TimeUntilNextProcess() {
}
int32_t VideoReceiver::SetReceiveChannelParameters(int64_t rtt) {
- CriticalSectionScoped receiveCs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
_receiver.UpdateRtt(rtt);
return 0;
}
@@ -189,9 +166,11 @@ int32_t VideoReceiver::SetVideoProtection(VCMVideoProtection videoProtection,
}
case kProtectionNackFEC: {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
RTC_DCHECK(enable);
- _receiver.SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
+ _receiver.SetNackMode(kNack,
+ media_optimization::kLowRttNackMs,
+ media_optimization::kMaxRttDelayThreshold);
_receiver.SetDecodeErrorMode(kNoErrors);
break;
}
@@ -210,14 +189,14 @@ int32_t VideoReceiver::SetVideoProtection(VCMVideoProtection videoProtection,
// ready for rendering.
int32_t VideoReceiver::RegisterReceiveCallback(
VCMReceiveCallback* receiveCallback) {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
_decodedFrameCallback.SetUserReceiveCallback(receiveCallback);
return VCM_OK;
}
int32_t VideoReceiver::RegisterReceiveStatisticsCallback(
VCMReceiveStatisticsCallback* receiveStats) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_receiver.RegisterStatsCallback(receiveStats);
_receiveStatsCallback = receiveStats;
return VCM_OK;
@@ -225,7 +204,7 @@ int32_t VideoReceiver::RegisterReceiveStatisticsCallback(
int32_t VideoReceiver::RegisterDecoderTimingCallback(
VCMDecoderTimingCallback* decoderTiming) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_decoderTimingCallback = decoderTiming;
return VCM_OK;
}
@@ -233,10 +212,10 @@ int32_t VideoReceiver::RegisterDecoderTimingCallback(
// Register an externally defined decoder object.
void VideoReceiver::RegisterExternalDecoder(VideoDecoder* externalDecoder,
uint8_t payloadType) {
- CriticalSectionScoped cs(_receiveCritSect);
- if (externalDecoder == NULL) {
+ rtc::CritScope cs(&receive_crit_);
+ if (externalDecoder == nullptr) {
// Make sure the VCM updates the decoder next time it decodes.
- _decoder = NULL;
+ _decoder = nullptr;
RTC_CHECK(_codecDataBase.DeregisterExternalDecoder(payloadType));
return;
}
@@ -246,25 +225,18 @@ void VideoReceiver::RegisterExternalDecoder(VideoDecoder* externalDecoder,
// Register a frame type request callback.
int32_t VideoReceiver::RegisterFrameTypeCallback(
VCMFrameTypeCallback* frameTypeCallback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_frameTypeCallback = frameTypeCallback;
return VCM_OK;
}
int32_t VideoReceiver::RegisterPacketRequestCallback(
VCMPacketRequestCallback* callback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_packetRequestCallback = callback;
return VCM_OK;
}
-int VideoReceiver::RegisterRenderBufferSizeCallback(
- VCMRenderBufferSizeCallback* callback) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- render_buffer_callback_ = callback;
- return VCM_OK;
-}
-
void VideoReceiver::TriggerDecoderShutdown() {
_receiver.TriggerDecoderShutdown();
}
@@ -275,7 +247,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
int64_t nextRenderTimeMs;
bool prefer_late_decoding = false;
{
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
prefer_late_decoding = _codecDataBase.PrefersLateDecoding();
}
@@ -286,7 +258,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
return VCM_FRAME_NOT_READY;
{
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
if (drop_frames_until_keyframe_) {
// Still getting delta frames, schedule another keyframe request as if
// decode failed.
@@ -298,11 +270,6 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
drop_frames_until_keyframe_ = false;
}
}
- CriticalSectionScoped cs(_receiveCritSect);
-
- // If this frame was too late, we should adjust the delay accordingly
- _timing.UpdateCurrentDelay(frame->RenderTimeMs(),
- clock_->TimeInMilliseconds());
if (pre_decode_image_callback_) {
EncodedImage encoded_image(frame->EncodedImage());
@@ -311,18 +278,20 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
encoded_image.qp_ = qp;
}
pre_decode_image_callback_->Encoded(encoded_image, frame->CodecSpecific(),
- NULL);
+ nullptr);
}
-#ifdef DEBUG_DECODER_BIT_STREAM
- if (_bitStreamBeforeDecoder != NULL) {
- // Write bit stream to file for debugging purposes
- if (fwrite(frame->Buffer(), 1, frame->Length(), _bitStreamBeforeDecoder) !=
- frame->Length()) {
- return -1;
- }
+ rtc::CritScope cs(&receive_crit_);
+ // If this frame was too late, we should adjust the delay accordingly
+ _timing.UpdateCurrentDelay(frame->RenderTimeMs(),
+ clock_->TimeInMilliseconds());
+
+ if (first_frame_received_()) {
+ LOG(LS_INFO) << "Received first "
+ << (frame->Complete() ? "complete" : "incomplete")
+ << " decodable video frame";
}
-#endif
+
const int32_t ret = Decode(*frame);
_receiver.ReleaseFrame(frame);
return ret;
@@ -331,8 +300,8 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
int32_t VideoReceiver::RequestSliceLossIndication(
const uint64_t pictureID) const {
TRACE_EVENT1("webrtc", "RequestSLI", "picture_id", pictureID);
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_frameTypeCallback != NULL) {
+ rtc::CritScope cs(&process_crit_);
+ if (_frameTypeCallback != nullptr) {
const int32_t ret =
_frameTypeCallback->SliceLossIndicationRequest(pictureID);
if (ret < 0) {
@@ -346,8 +315,8 @@ int32_t VideoReceiver::RequestSliceLossIndication(
int32_t VideoReceiver::RequestKeyFrame() {
TRACE_EVENT0("webrtc", "RequestKeyFrame");
- CriticalSectionScoped process_cs(process_crit_sect_.get());
- if (_frameTypeCallback != NULL) {
+ rtc::CritScope cs(&process_crit_);
+ if (_frameTypeCallback != nullptr) {
const int32_t ret = _frameTypeCallback->RequestKeyFrame();
if (ret < 0) {
return ret;
@@ -365,7 +334,7 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
"type", frame.FrameType());
// Change decoder if payload type has changed
_decoder = _codecDataBase.GetDecoder(frame, &_decodedFrameCallback);
- if (_decoder == NULL) {
+ if (_decoder == nullptr) {
return VCM_NO_CODEC_REGISTERED;
}
// Decode a frame
@@ -389,7 +358,7 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
ret = VCM_OK;
}
if (request_key_frame) {
- CriticalSectionScoped cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
_scheduleKeyRequest = true;
}
TRACE_EVENT_ASYNC_END0("webrtc", "Video", frame.TimeStamp());
@@ -400,8 +369,8 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
int32_t VideoReceiver::RegisterReceiveCodec(const VideoCodec* receiveCodec,
int32_t numberOfCores,
bool requireKeyFrame) {
- CriticalSectionScoped cs(_receiveCritSect);
- if (receiveCodec == NULL) {
+ rtc::CritScope cs(&receive_crit_);
+ if (receiveCodec == nullptr) {
return VCM_PARAMETER_ERROR;
}
if (!_codecDataBase.RegisterReceiveCodec(receiveCodec, numberOfCores,
@@ -413,8 +382,8 @@ int32_t VideoReceiver::RegisterReceiveCodec(const VideoCodec* receiveCodec,
// Get current received codec
int32_t VideoReceiver::ReceiveCodec(VideoCodec* currentReceiveCodec) const {
- CriticalSectionScoped cs(_receiveCritSect);
- if (currentReceiveCodec == NULL) {
+ rtc::CritScope cs(&receive_crit_);
+ if (currentReceiveCodec == nullptr) {
return VCM_PARAMETER_ERROR;
}
return _codecDataBase.ReceiveCodec(currentReceiveCodec) ? 0 : -1;
@@ -422,7 +391,7 @@ int32_t VideoReceiver::ReceiveCodec(VideoCodec* currentReceiveCodec) const {
// Get current received codec
VideoCodecType VideoReceiver::ReceiveCodec() const {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
return _codecDataBase.ReceiveCodec();
}
@@ -434,7 +403,7 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
TRACE_EVENT1("webrtc", "VCM::PacketKeyFrame", "seqnum",
rtpInfo.header.sequenceNumber);
}
- if (incomingPayload == NULL) {
+ if (incomingPayload == nullptr) {
// The jitter buffer doesn't handle non-zero payload lengths for packets
// without payload.
// TODO(holmer): We should fix this in the jitter buffer.
@@ -443,11 +412,12 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
const VCMPacket packet(incomingPayload, payloadLength, rtpInfo);
int32_t ret = _receiver.InsertPacket(packet, rtpInfo.type.Video.width,
rtpInfo.type.Video.height);
+
// TODO(holmer): Investigate if this somehow should use the key frame
// request scheduling to throttle the requests.
if (ret == VCM_FLUSH_INDICATOR) {
{
- CriticalSectionScoped process_cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
drop_frames_until_keyframe_ = true;
}
RequestKeyFrame();
@@ -484,7 +454,7 @@ uint32_t VideoReceiver::DiscardedPackets() const {
int VideoReceiver::SetReceiverRobustnessMode(
ReceiverRobustness robustnessMode,
VCMDecodeErrorMode decode_error_mode) {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
switch (robustnessMode) {
case VideoCodingModule::kNone:
_receiver.SetNackMode(kNoNack, -1, -1);
@@ -520,7 +490,7 @@ int VideoReceiver::SetReceiverRobustnessMode(
}
void VideoReceiver::SetDecodeErrorMode(VCMDecodeErrorMode decode_error_mode) {
- CriticalSectionScoped cs(_receiveCritSect);
+ rtc::CritScope cs(&receive_crit_);
_receiver.SetDecodeErrorMode(decode_error_mode);
}
@@ -528,7 +498,7 @@ void VideoReceiver::SetNackSettings(size_t max_nack_list_size,
int max_packet_age_to_nack,
int max_incomplete_time_ms) {
if (max_nack_list_size != 0) {
- CriticalSectionScoped process_cs(process_crit_sect_.get());
+ rtc::CritScope cs(&process_crit_);
max_nack_list_size_ = max_nack_list_size;
}
_receiver.SetNackSettings(max_nack_list_size, max_packet_age_to_nack,
@@ -539,11 +509,5 @@ int VideoReceiver::SetMinReceiverDelay(int desired_delay_ms) {
return _receiver.SetMinReceiverDelay(desired_delay_ms);
}
-void VideoReceiver::RegisterPreDecodeImageCallback(
- EncodedImageCallback* observer) {
- CriticalSectionScoped cs(_receiveCritSect);
- pre_decode_image_callback_ = observer;
-}
-
} // namespace vcm
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
index 05656a5c254..3c414053bc2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_receiver_unittest.cc
@@ -33,7 +33,7 @@ class TestVideoReceiver : public ::testing::Test {
TestVideoReceiver() : clock_(0) {}
virtual void SetUp() {
- receiver_.reset(new VideoReceiver(&clock_, &event_factory_));
+ receiver_.reset(new VideoReceiver(&clock_, &event_factory_, nullptr));
receiver_->RegisterExternalDecoder(&decoder_, kUnusedPayloadType);
const size_t kMaxNackListSize = 250;
const int kMaxPacketAgeToNack = 450;
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_sender.cc b/chromium/third_party/webrtc/modules/video_coding/video_sender.cc
index 49690697889..f52b1c56e7b 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_sender.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_sender.cc
@@ -27,27 +27,24 @@ namespace vcm {
VideoSender::VideoSender(Clock* clock,
EncodedImageCallback* post_encode_callback,
VideoEncoderRateObserver* encoder_rate_observer,
- VCMQMSettingsCallback* qm_settings_callback)
+ VCMSendStatisticsCallback* send_stats_callback)
: clock_(clock),
- process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
_encoder(nullptr),
- _encodedFrameCallback(post_encode_callback),
_mediaOpt(clock_),
- _sendStatsCallback(nullptr),
+ _encodedFrameCallback(post_encode_callback, &_mediaOpt),
+ send_stats_callback_(send_stats_callback),
_codecDataBase(encoder_rate_observer, &_encodedFrameCallback),
frame_dropper_enabled_(true),
_sendStatsTimer(1000, clock_),
current_codec_(),
- qm_settings_callback_(qm_settings_callback),
protection_callback_(nullptr),
encoder_params_({0, 0, 0, 0}),
encoder_has_internal_source_(false),
next_frame_types_(1, kVideoFrameDelta) {
+ _mediaOpt.Reset();
// Allow VideoSender to be created on one thread but used on another, post
// construction. This is currently how this class is being used by at least
// one external project (diffractor).
- _mediaOpt.EnableQM(qm_settings_callback_ != nullptr);
- _mediaOpt.Reset();
main_thread_.DetachFromThread();
}
@@ -55,12 +52,19 @@ VideoSender::~VideoSender() {}
void VideoSender::Process() {
if (_sendStatsTimer.TimeUntilProcess() == 0) {
+ // |_sendStatsTimer.Processed()| must be called. Otherwise
+ // VideoSender::Process() will be called in an infinite loop.
_sendStatsTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_sendStatsCallback != nullptr) {
+ if (send_stats_callback_) {
uint32_t bitRate = _mediaOpt.SentBitRate();
uint32_t frameRate = _mediaOpt.SentFrameRate();
- _sendStatsCallback->SendStatistics(bitRate, frameRate);
+ std::string encoder_name;
+ {
+ rtc::CritScope cs(&params_crit_);
+ // Copy the string here so that we don't hold |params_crit_| in the CB.
+ encoder_name = encoder_name_;
+ }
+ send_stats_callback_->SendStatistics(bitRate, frameRate, encoder_name);
}
}
@@ -196,19 +200,38 @@ int VideoSender::FrameRate(unsigned int* framerate) const {
int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate,
uint8_t lossRate,
int64_t rtt) {
- uint32_t target_rate =
- _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt,
- protection_callback_, qm_settings_callback_);
+ uint32_t target_rate = _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt,
+ protection_callback_);
uint32_t input_frame_rate = _mediaOpt.InputFrameRate();
- rtc::CritScope cs(&params_crit_);
- encoder_params_ = {target_rate, lossRate, rtt, input_frame_rate};
+ EncoderParameters encoder_params = {target_rate, lossRate, rtt,
+ input_frame_rate};
+ bool encoder_has_internal_source;
+ {
+ rtc::CritScope cs(&params_crit_);
+ encoder_params_ = encoder_params;
+ encoder_has_internal_source = encoder_has_internal_source_;
+ }
+
+ // For encoders with internal sources, we need to tell the encoder directly,
+ // instead of waiting for an AddVideoFrame that will never come (internal
+ // source encoders don't get input frames).
+ if (encoder_has_internal_source) {
+ rtc::CritScope cs(&encoder_crit_);
+ if (_encoder) {
+ SetEncoderParameters(encoder_params);
+ }
+ }
return VCM_OK;
}
void VideoSender::SetEncoderParameters(EncoderParameters params) {
+ // |target_bitrate == 0 | means that the network is down or the send pacer is
+ // full.
+ // TODO(perkj): Consider setting |target_bitrate| == 0 to the encoders.
+ // Especially if |encoder_has_internal_source_ | == true.
if (params.target_bitrate == 0)
return;
@@ -220,24 +243,6 @@ void VideoSender::SetEncoderParameters(EncoderParameters params) {
_encoder->SetEncoderParameters(params);
}
-int32_t VideoSender::RegisterTransportCallback(
- VCMPacketizationCallback* transport) {
- rtc::CritScope lock(&encoder_crit_);
- _encodedFrameCallback.SetMediaOpt(&_mediaOpt);
- _encodedFrameCallback.SetTransportCallback(transport);
- return VCM_OK;
-}
-
-// Register video output information callback which will be called to deliver
-// information about the video stream produced by the encoder, for instance the
-// average frame rate and bit rate.
-int32_t VideoSender::RegisterSendStatisticsCallback(
- VCMSendStatisticsCallback* sendStats) {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _sendStatsCallback = sendStats;
- return VCM_OK;
-}
-
// Register a video protection callback which will be called to deliver the
// requested FEC rate and NACK status (on/off).
// Note: this callback is assumed to only be registered once and before it is
@@ -269,7 +274,6 @@ void VideoSender::SetVideoProtection(VCMVideoProtection videoProtection) {
}
// Add one raw video frame to the encoder, blocking.
int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
- const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo) {
EncoderParameters encoder_params;
std::vector<FrameType> next_frame_types;
@@ -291,7 +295,6 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
_encoder->OnDroppedFrame();
return VCM_OK;
}
- _mediaOpt.UpdateContentData(contentMetrics);
// TODO(pbos): Make sure setting send codec is synchronized with video
// processing so frame size always matches.
if (!_codecDataBase.MatchesCurrentResolution(videoFrame.width(),
@@ -300,7 +303,8 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
return VCM_PARAMETER_ERROR;
}
VideoFrame converted_frame = videoFrame;
- if (converted_frame.native_handle() && !_encoder->SupportsNativeHandle()) {
+ if (converted_frame.video_frame_buffer()->native_handle() &&
+ !_encoder->SupportsNativeHandle()) {
// This module only supports software encoding.
// TODO(pbos): Offload conversion from the encoder thread.
converted_frame = converted_frame.ConvertNativeToI420Frame();
@@ -313,9 +317,12 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
LOG(LS_ERROR) << "Failed to encode frame. Error code: " << ret;
return ret;
}
+
{
- // Change all keyframe requests to encode delta frames the next time.
rtc::CritScope lock(&params_crit_);
+ encoder_name_ = _encoder->ImplementationName();
+
+ // Change all keyframe requests to encode delta frames the next time.
for (size_t i = 0; i < next_frame_types_.size(); ++i) {
// Check for equality (same requested as before encoding) to not
// accidentally drop a keyframe request while encoding.
@@ -323,16 +330,13 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
next_frame_types_[i] = kVideoFrameDelta;
}
}
- if (qm_settings_callback_)
- qm_settings_callback_->SetTargetFramerate(_encoder->GetTargetFramerate());
return VCM_OK;
}
-int32_t VideoSender::IntraFrameRequest(int stream_index) {
+int32_t VideoSender::IntraFrameRequest(size_t stream_index) {
{
rtc::CritScope lock(&params_crit_);
- if (stream_index < 0 ||
- static_cast<size_t>(stream_index) >= next_frame_types_.size()) {
+ if (stream_index >= next_frame_types_.size()) {
return -1;
}
next_frame_types_[stream_index] = kVideoFrameKey;
@@ -346,7 +350,7 @@ int32_t VideoSender::IntraFrameRequest(int stream_index) {
// encoder_crit_.
rtc::CritScope lock(&encoder_crit_);
rtc::CritScope params_lock(&params_crit_);
- if (static_cast<size_t>(stream_index) >= next_frame_types_.size())
+ if (stream_index >= next_frame_types_.size())
return -1;
if (_encoder != nullptr && _encoder->InternalSource()) {
// Try to request the frame if we have an external encoder with
diff --git a/chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc
index 3f9ba4eadda..5324ceeb0b8 100644
--- a/chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/video_sender_unittest.cc
@@ -86,19 +86,19 @@ class EmptyFrameGenerator : public FrameGenerator {
std::unique_ptr<VideoFrame> frame_;
};
-class PacketizationCallback : public VCMPacketizationCallback {
+class EncodedImageCallbackImpl : public EncodedImageCallback {
public:
- explicit PacketizationCallback(Clock* clock)
+ explicit EncodedImageCallbackImpl(Clock* clock)
: clock_(clock), start_time_ms_(clock_->TimeInMilliseconds()) {}
- virtual ~PacketizationCallback() {}
+ virtual ~EncodedImageCallbackImpl() {}
- int32_t SendData(uint8_t payload_type,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentation_header,
- const RTPVideoHeader* rtp_video_header) override {
- assert(rtp_video_header);
- frame_data_.push_back(FrameData(encoded_image._length, *rtp_video_header));
+ int32_t Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) override {
+ assert(codec_specific_info);
+ frame_data_.push_back(
+ FrameData(encoded_image._length, *codec_specific_info));
return 0;
}
@@ -130,11 +130,12 @@ class PacketizationCallback : public VCMPacketizationCallback {
struct FrameData {
FrameData() {}
- FrameData(size_t payload_size, const RTPVideoHeader& rtp_video_header)
- : payload_size(payload_size), rtp_video_header(rtp_video_header) {}
+ FrameData(size_t payload_size, const CodecSpecificInfo& codec_specific_info)
+ : payload_size(payload_size),
+ codec_specific_info(codec_specific_info) {}
size_t payload_size;
- RTPVideoHeader rtp_video_header;
+ CodecSpecificInfo codec_specific_info;
};
int64_t interval_ms() {
@@ -146,9 +147,9 @@ class PacketizationCallback : public VCMPacketizationCallback {
int CountFramesWithinTemporalLayer(int temporal_layer) {
int frames = 0;
for (size_t i = 0; i < frame_data_.size(); ++i) {
- EXPECT_EQ(kRtpVideoVp8, frame_data_[i].rtp_video_header.codec);
+ EXPECT_EQ(kVideoCodecVP8, frame_data_[i].codec_specific_info.codecType);
const uint8_t temporal_idx =
- frame_data_[i].rtp_video_header.codecHeader.VP8.temporalIdx;
+ frame_data_[i].codec_specific_info.codecSpecific.VP8.temporalIdx;
if (temporal_idx <= temporal_layer || temporal_idx == kNoTemporalIdx)
frames++;
}
@@ -158,9 +159,9 @@ class PacketizationCallback : public VCMPacketizationCallback {
size_t SumPayloadBytesWithinTemporalLayer(int temporal_layer) {
size_t payload_size = 0;
for (size_t i = 0; i < frame_data_.size(); ++i) {
- EXPECT_EQ(kRtpVideoVp8, frame_data_[i].rtp_video_header.codec);
+ EXPECT_EQ(kVideoCodecVP8, frame_data_[i].codec_specific_info.codecType);
const uint8_t temporal_idx =
- frame_data_[i].rtp_video_header.codecHeader.VP8.temporalIdx;
+ frame_data_[i].codec_specific_info.codecSpecific.VP8.temporalIdx;
if (temporal_idx <= temporal_layer || temporal_idx == kNoTemporalIdx)
payload_size += frame_data_[i].payload_size;
}
@@ -176,22 +177,20 @@ class TestVideoSender : public ::testing::Test {
protected:
// Note: simulated clock starts at 1 seconds, since parts of webrtc use 0 as
// a special case (e.g. frame rate in media optimization).
- TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {}
+ TestVideoSender() : clock_(1000), encoded_frame_callback_(&clock_) {}
void SetUp() override {
sender_.reset(
- new VideoSender(&clock_, &post_encode_callback_, nullptr, nullptr));
- EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_));
+ new VideoSender(&clock_, &encoded_frame_callback_, nullptr, nullptr));
}
void AddFrame() {
assert(generator_.get());
- sender_->AddVideoFrame(*generator_->NextFrame(), NULL, NULL);
+ sender_->AddVideoFrame(*generator_->NextFrame(), NULL);
}
SimulatedClock clock_;
- PacketizationCallback packetization_callback_;
- MockEncodedImageCallback post_encode_callback_;
+ EncodedImageCallbackImpl encoded_frame_callback_;
// Used by subclassing tests, need to outlive sender_.
std::unique_ptr<VideoEncoder> encoder_;
std::unique_ptr<VideoSender> sender_;
@@ -291,9 +290,17 @@ TEST_F(TestVideoSenderWithMockEncoder, TestIntraRequests) {
EXPECT_EQ(-1, sender_->IntraFrameRequest(3));
ExpectIntraRequest(-1);
AddFrame();
+}
- EXPECT_EQ(-1, sender_->IntraFrameRequest(-1));
- ExpectIntraRequest(-1);
+TEST_F(TestVideoSenderWithMockEncoder, TestSetRate) {
+ const uint32_t new_bitrate = settings_.startBitrate + 300;
+ EXPECT_CALL(encoder_, SetRates(new_bitrate, _)).Times(1).WillOnce(Return(0));
+ sender_->SetChannelParameters(new_bitrate * 1000, 0, 200);
+ AddFrame();
+
+ // Expect no call to encoder_.SetRates if the new bitrate is zero.
+ EXPECT_CALL(encoder_, SetRates(new_bitrate, _)).Times(0);
+ sender_->SetChannelParameters(0, 0, 200);
AddFrame();
}
@@ -314,7 +321,19 @@ TEST_F(TestVideoSenderWithMockEncoder, TestIntraRequestsInternalCapture) {
EXPECT_EQ(0, sender_->IntraFrameRequest(2));
// No requests expected since these indices are out of bounds.
EXPECT_EQ(-1, sender_->IntraFrameRequest(3));
- EXPECT_EQ(-1, sender_->IntraFrameRequest(-1));
+}
+
+TEST_F(TestVideoSenderWithMockEncoder, TestEncoderParametersForInternalSource) {
+ // De-register current external encoder.
+ sender_->RegisterExternalEncoder(nullptr, kUnusedPayloadType, false);
+ // Register encoder with internal capture.
+ sender_->RegisterExternalEncoder(&encoder_, kUnusedPayloadType, true);
+ EXPECT_EQ(0, sender_->RegisterSendCodec(&settings_, 1, 1200));
+ // Update encoder bitrate parameters. We expect that to immediately call
+ // SetRates on the encoder without waiting for AddFrame processing.
+ const uint32_t new_bitrate = settings_.startBitrate + 300;
+ EXPECT_CALL(encoder_, SetRates(new_bitrate, _)).Times(1).WillOnce(Return(0));
+ sender_->SetChannelParameters(new_bitrate * 1000, 0, 200);
}
TEST_F(TestVideoSenderWithMockEncoder, EncoderFramerateUpdatedViaProcess) {
@@ -402,8 +421,6 @@ class TestVideoSenderWithVp8 : public TestVideoSender {
void InsertFrames(float framerate, float seconds) {
for (int i = 0; i < seconds * framerate; ++i) {
clock_.AdvanceTimeMilliseconds(1000.0f / framerate);
- EXPECT_CALL(post_encode_callback_, Encoded(_, NULL, NULL))
- .WillOnce(Return(0));
AddFrame();
// SetChannelParameters needs to be called frequently to propagate
// framerate from the media optimization into the encoder.
@@ -422,10 +439,10 @@ class TestVideoSenderWithVp8 : public TestVideoSender {
// It appears that this 5 seconds simulation is needed to allow
// bitrate and framerate to stabilize.
InsertFrames(framerate, short_simulation_interval);
- packetization_callback_.Reset();
+ encoded_frame_callback_.Reset();
InsertFrames(framerate, long_simulation_interval);
- return packetization_callback_.CalculateVp8StreamInfo();
+ return encoded_frame_callback_.CalculateVp8StreamInfo();
}
protected:
diff --git a/chromium/third_party/webrtc/modules/video_processing/BUILD.gn b/chromium/third_party/webrtc/modules/video_processing/BUILD.gn
index 43a8de12557..1177d9b7f0d 100644
--- a/chromium/third_party/webrtc/modules/video_processing/BUILD.gn
+++ b/chromium/third_party/webrtc/modules/video_processing/BUILD.gn
@@ -13,12 +13,6 @@ build_video_processing_sse2 = current_cpu == "x86" || current_cpu == "x64"
source_set("video_processing") {
sources = [
- "brightness_detection.cc",
- "brightness_detection.h",
- "content_analysis.cc",
- "content_analysis.h",
- "deflickering.cc",
- "deflickering.h",
"frame_preprocessor.cc",
"frame_preprocessor.h",
"include/video_processing.h",
@@ -67,7 +61,6 @@ source_set("video_processing") {
if (build_video_processing_sse2) {
source_set("video_processing_sse2") {
sources = [
- "content_analysis_sse2.cc",
"util/denoiser_filter_sse2.cc",
"util/denoiser_filter_sse2.h",
]
diff --git a/chromium/third_party/webrtc/modules/video_processing/brightness_detection.cc b/chromium/third_party/webrtc/modules/video_processing/brightness_detection.cc
deleted file mode 100644
index 7455cf97591..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/brightness_detection.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/brightness_detection.h"
-
-#include <math.h>
-
-#include "webrtc/modules/video_processing/include/video_processing.h"
-
-namespace webrtc {
-
-VPMBrightnessDetection::VPMBrightnessDetection() {
- Reset();
-}
-
-VPMBrightnessDetection::~VPMBrightnessDetection() {}
-
-void VPMBrightnessDetection::Reset() {
- frame_cnt_bright_ = 0;
- frame_cnt_dark_ = 0;
-}
-
-int32_t VPMBrightnessDetection::ProcessFrame(
- const VideoFrame& frame,
- const VideoProcessing::FrameStats& stats) {
- if (frame.IsZeroSize()) {
- return VPM_PARAMETER_ERROR;
- }
- int width = frame.width();
- int height = frame.height();
-
- if (!VideoProcessing::ValidFrameStats(stats)) {
- return VPM_PARAMETER_ERROR;
- }
-
- const uint8_t frame_cnt_alarm = 2;
-
- // Get proportion in lowest bins.
- uint8_t low_th = 20;
- float prop_low = 0;
- for (uint32_t i = 0; i < low_th; i++) {
- prop_low += stats.hist[i];
- }
- prop_low /= stats.num_pixels;
-
- // Get proportion in highest bins.
- unsigned char high_th = 230;
- float prop_high = 0;
- for (uint32_t i = high_th; i < 256; i++) {
- prop_high += stats.hist[i];
- }
- prop_high /= stats.num_pixels;
-
- if (prop_high < 0.4) {
- if (stats.mean < 90 || stats.mean > 170) {
- // Standard deviation of Y
- const uint8_t* buffer = frame.buffer(kYPlane);
- float std_y = 0;
- for (int h = 0; h < height; h += (1 << stats.sub_sampling_factor)) {
- int row = h * width;
- for (int w = 0; w < width; w += (1 << stats.sub_sampling_factor)) {
- std_y +=
- (buffer[w + row] - stats.mean) * (buffer[w + row] - stats.mean);
- }
- }
- std_y = sqrt(std_y / stats.num_pixels);
-
- // Get percentiles.
- uint32_t sum = 0;
- uint32_t median_y = 140;
- uint32_t perc05 = 0;
- uint32_t perc95 = 255;
- float pos_perc05 = stats.num_pixels * 0.05f;
- float pos_median = stats.num_pixels * 0.5f;
- float posPerc95 = stats.num_pixels * 0.95f;
- for (uint32_t i = 0; i < 256; i++) {
- sum += stats.hist[i];
- if (sum < pos_perc05)
- perc05 = i; // 5th perc.
- if (sum < pos_median)
- median_y = i; // 50th perc.
- if (sum < posPerc95)
- perc95 = i; // 95th perc.
- else
- break;
- }
-
- // Check if image is too dark
- if ((std_y < 55) && (perc05 < 50)) {
- if (median_y < 60 || stats.mean < 80 || perc95 < 130 ||
- prop_low > 0.20) {
- frame_cnt_dark_++;
- } else {
- frame_cnt_dark_ = 0;
- }
- } else {
- frame_cnt_dark_ = 0;
- }
-
- // Check if image is too bright
- if ((std_y < 52) && (perc95 > 200) && (median_y > 160)) {
- if (median_y > 185 || stats.mean > 185 || perc05 > 140 ||
- prop_high > 0.25) {
- frame_cnt_bright_++;
- } else {
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_dark_ = 0;
- frame_cnt_bright_ = 0;
- }
- } else {
- frame_cnt_bright_++;
- frame_cnt_dark_ = 0;
- }
-
- if (frame_cnt_dark_ > frame_cnt_alarm) {
- return VideoProcessing::kDarkWarning;
- } else if (frame_cnt_bright_ > frame_cnt_alarm) {
- return VideoProcessing::kBrightWarning;
- } else {
- return VideoProcessing::kNoWarning;
- }
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/brightness_detection.h b/chromium/third_party/webrtc/modules/video_processing/brightness_detection.h
deleted file mode 100644
index 78a7ac5e0bf..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/brightness_detection.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
-#define WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
-
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VPMBrightnessDetection {
- public:
- VPMBrightnessDetection();
- ~VPMBrightnessDetection();
-
- void Reset();
- int32_t ProcessFrame(const VideoFrame& frame,
- const VideoProcessing::FrameStats& stats);
-
- private:
- uint32_t frame_cnt_bright_;
- uint32_t frame_cnt_dark_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_BRIGHTNESS_DETECTION_H_
diff --git a/chromium/third_party/webrtc/modules/video_processing/content_analysis.cc b/chromium/third_party/webrtc/modules/video_processing/content_analysis.cc
deleted file mode 100644
index 54c04da4668..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/content_analysis.cc
+++ /dev/null
@@ -1,281 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/modules/video_processing/content_analysis.h"
-
-#include <math.h>
-#include <stdlib.h>
-
-#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-namespace webrtc {
-
-VPMContentAnalysis::VPMContentAnalysis(bool runtime_cpu_detection)
- : orig_frame_(NULL),
- prev_frame_(NULL),
- width_(0),
- height_(0),
- skip_num_(1),
- border_(8),
- motion_magnitude_(0.0f),
- spatial_pred_err_(0.0f),
- spatial_pred_err_h_(0.0f),
- spatial_pred_err_v_(0.0f),
- first_frame_(true),
- ca_Init_(false),
- content_metrics_(NULL) {
- ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_C;
- TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_C;
-
- if (runtime_cpu_detection) {
-#if defined(WEBRTC_ARCH_X86_FAMILY)
- if (WebRtc_GetCPUInfo(kSSE2)) {
- ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_SSE2;
- TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_SSE2;
- }
-#endif
- }
- Release();
-}
-
-VPMContentAnalysis::~VPMContentAnalysis() {
- Release();
-}
-
-VideoContentMetrics* VPMContentAnalysis::ComputeContentMetrics(
- const VideoFrame& inputFrame) {
- if (inputFrame.IsZeroSize())
- return NULL;
-
- // Init if needed (native dimension change).
- if (width_ != inputFrame.width() || height_ != inputFrame.height()) {
- if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height()))
- return NULL;
- }
- // Only interested in the Y plane.
- orig_frame_ = inputFrame.buffer(kYPlane);
-
- // Compute spatial metrics: 3 spatial prediction errors.
- (this->*ComputeSpatialMetrics)();
-
- // Compute motion metrics
- if (first_frame_ == false)
- ComputeMotionMetrics();
-
- // Saving current frame as previous one: Y only.
- memcpy(prev_frame_, orig_frame_, width_ * height_);
-
- first_frame_ = false;
- ca_Init_ = true;
-
- return ContentMetrics();
-}
-
-int32_t VPMContentAnalysis::Release() {
- if (content_metrics_ != NULL) {
- delete content_metrics_;
- content_metrics_ = NULL;
- }
-
- if (prev_frame_ != NULL) {
- delete[] prev_frame_;
- prev_frame_ = NULL;
- }
-
- width_ = 0;
- height_ = 0;
- first_frame_ = true;
-
- return VPM_OK;
-}
-
-int32_t VPMContentAnalysis::Initialize(int width, int height) {
- width_ = width;
- height_ = height;
- first_frame_ = true;
-
- // skip parameter: # of skipped rows: for complexity reduction
- // temporal also currently uses it for column reduction.
- skip_num_ = 1;
-
- // use skipNum = 2 for 4CIF, WHD
- if ((height_ >= 576) && (width_ >= 704)) {
- skip_num_ = 2;
- }
- // use skipNum = 4 for FULLL_HD images
- if ((height_ >= 1080) && (width_ >= 1920)) {
- skip_num_ = 4;
- }
-
- if (content_metrics_ != NULL) {
- delete content_metrics_;
- }
-
- if (prev_frame_ != NULL) {
- delete[] prev_frame_;
- }
-
- // Spatial Metrics don't work on a border of 8. Minimum processing
- // block size is 16 pixels. So make sure the width and height support this.
- if (width_ <= 32 || height_ <= 32) {
- ca_Init_ = false;
- return VPM_PARAMETER_ERROR;
- }
-
- content_metrics_ = new VideoContentMetrics();
- if (content_metrics_ == NULL) {
- return VPM_MEMORY;
- }
-
- prev_frame_ = new uint8_t[width_ * height_]; // Y only.
- if (prev_frame_ == NULL)
- return VPM_MEMORY;
-
- return VPM_OK;
-}
-
-// Compute motion metrics: magnitude over non-zero motion vectors,
-// and size of zero cluster
-int32_t VPMContentAnalysis::ComputeMotionMetrics() {
- // Motion metrics: only one is derived from normalized
- // (MAD) temporal difference
- (this->*TemporalDiffMetric)();
- return VPM_OK;
-}
-
-// Normalized temporal difference (MAD): used as a motion level metric
-// Normalize MAD by spatial contrast: images with more contrast
-// (pixel variance) likely have larger temporal difference
-// To reduce complexity, we compute the metric for a reduced set of points.
-int32_t VPMContentAnalysis::TemporalDiffMetric_C() {
- // size of original frame
- int sizei = height_;
- int sizej = width_;
- uint32_t tempDiffSum = 0;
- uint32_t pixelSum = 0;
- uint64_t pixelSqSum = 0;
-
- uint32_t num_pixels = 0; // Counter for # of pixels.
- const int width_end = ((width_ - 2 * border_) & -16) + border_;
-
- for (int i = border_; i < sizei - border_; i += skip_num_) {
- for (int j = border_; j < width_end; j++) {
- num_pixels += 1;
- int ssn = i * sizej + j;
-
- uint8_t currPixel = orig_frame_[ssn];
- uint8_t prevPixel = prev_frame_[ssn];
-
- tempDiffSum +=
- static_cast<uint32_t>(abs((int16_t)(currPixel - prevPixel)));
- pixelSum += static_cast<uint32_t>(currPixel);
- pixelSqSum += static_cast<uint64_t>(currPixel * currPixel);
- }
- }
-
- // Default.
- motion_magnitude_ = 0.0f;
-
- if (tempDiffSum == 0)
- return VPM_OK;
-
- // Normalize over all pixels.
- float const tempDiffAvg =
- static_cast<float>(tempDiffSum) / static_cast<float>(num_pixels);
- float const pixelSumAvg =
- static_cast<float>(pixelSum) / static_cast<float>(num_pixels);
- float const pixelSqSumAvg =
- static_cast<float>(pixelSqSum) / static_cast<float>(num_pixels);
- float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
-
- if (contrast > 0.0) {
- contrast = sqrt(contrast);
- motion_magnitude_ = tempDiffAvg / contrast;
- }
- return VPM_OK;
-}
-
-// Compute spatial metrics:
-// To reduce complexity, we compute the metric for a reduced set of points.
-// The spatial metrics are rough estimates of the prediction error cost for
-// each QM spatial mode: 2x2,1x2,2x1
-// The metrics are a simple estimate of the up-sampling prediction error,
-// estimated assuming sub-sampling for decimation (no filtering),
-// and up-sampling back up with simple bilinear interpolation.
-int32_t VPMContentAnalysis::ComputeSpatialMetrics_C() {
- const int sizei = height_;
- const int sizej = width_;
-
- // Pixel mean square average: used to normalize the spatial metrics.
- uint32_t pixelMSA = 0;
-
- uint32_t spatialErrSum = 0;
- uint32_t spatialErrVSum = 0;
- uint32_t spatialErrHSum = 0;
-
- // make sure work section is a multiple of 16
- const int width_end = ((sizej - 2 * border_) & -16) + border_;
-
- for (int i = border_; i < sizei - border_; i += skip_num_) {
- for (int j = border_; j < width_end; j++) {
- int ssn1 = i * sizej + j;
- int ssn2 = (i + 1) * sizej + j; // bottom
- int ssn3 = (i - 1) * sizej + j; // top
- int ssn4 = i * sizej + j + 1; // right
- int ssn5 = i * sizej + j - 1; // left
-
- uint16_t refPixel1 = orig_frame_[ssn1] << 1;
- uint16_t refPixel2 = orig_frame_[ssn1] << 2;
-
- uint8_t bottPixel = orig_frame_[ssn2];
- uint8_t topPixel = orig_frame_[ssn3];
- uint8_t rightPixel = orig_frame_[ssn4];
- uint8_t leftPixel = orig_frame_[ssn5];
-
- spatialErrSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
- refPixel2 - static_cast<uint16_t>(bottPixel + topPixel + leftPixel +
- rightPixel))));
- spatialErrVSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
- refPixel1 - static_cast<uint16_t>(bottPixel + topPixel))));
- spatialErrHSum += static_cast<uint32_t>(abs(static_cast<int16_t>(
- refPixel1 - static_cast<uint16_t>(leftPixel + rightPixel))));
- pixelMSA += orig_frame_[ssn1];
- }
- }
-
- // Normalize over all pixels.
- const float spatialErr = static_cast<float>(spatialErrSum >> 2);
- const float spatialErrH = static_cast<float>(spatialErrHSum >> 1);
- const float spatialErrV = static_cast<float>(spatialErrVSum >> 1);
- const float norm = static_cast<float>(pixelMSA);
-
- // 2X2:
- spatial_pred_err_ = spatialErr / norm;
- // 1X2:
- spatial_pred_err_h_ = spatialErrH / norm;
- // 2X1:
- spatial_pred_err_v_ = spatialErrV / norm;
- return VPM_OK;
-}
-
-VideoContentMetrics* VPMContentAnalysis::ContentMetrics() {
- if (ca_Init_ == false)
- return NULL;
-
- content_metrics_->spatial_pred_err = spatial_pred_err_;
- content_metrics_->spatial_pred_err_h = spatial_pred_err_h_;
- content_metrics_->spatial_pred_err_v = spatial_pred_err_v_;
- // Motion metric: normalized temporal difference (MAD).
- content_metrics_->motion_magnitude = motion_magnitude_;
-
- return content_metrics_;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/content_analysis.h b/chromium/third_party/webrtc/modules/video_processing/content_analysis.h
deleted file mode 100644
index d3a11bd091d..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/content_analysis.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
-#define WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_processing/include/video_processing_defines.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video_frame.h"
-
-namespace webrtc {
-
-class VPMContentAnalysis {
- public:
- // When |runtime_cpu_detection| is true, runtime selection of an optimized
- // code path is allowed.
- explicit VPMContentAnalysis(bool runtime_cpu_detection);
- ~VPMContentAnalysis();
-
- // Initialize ContentAnalysis - should be called prior to
- // extractContentFeature
- // Inputs: width, height
- // Return value: 0 if OK, negative value upon error
- int32_t Initialize(int width, int height);
-
- // Extract content Feature - main function of ContentAnalysis
- // Input: new frame
- // Return value: pointer to structure containing content Analysis
- // metrics or NULL value upon error
- VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
-
- // Release all allocated memory
- // Output: 0 if OK, negative value upon error
- int32_t Release();
-
- private:
- // return motion metrics
- VideoContentMetrics* ContentMetrics();
-
- // Normalized temporal difference metric: for motion magnitude
- typedef int32_t (VPMContentAnalysis::*TemporalDiffMetricFunc)();
- TemporalDiffMetricFunc TemporalDiffMetric;
- int32_t TemporalDiffMetric_C();
-
- // Motion metric method: call 2 metrics (magnitude and size)
- int32_t ComputeMotionMetrics();
-
- // Spatial metric method: computes the 3 frame-average spatial
- // prediction errors (1x2,2x1,2x2)
- typedef int32_t (VPMContentAnalysis::*ComputeSpatialMetricsFunc)();
- ComputeSpatialMetricsFunc ComputeSpatialMetrics;
- int32_t ComputeSpatialMetrics_C();
-
-#if defined(WEBRTC_ARCH_X86_FAMILY)
- int32_t ComputeSpatialMetrics_SSE2();
- int32_t TemporalDiffMetric_SSE2();
-#endif
-
- const uint8_t* orig_frame_;
- uint8_t* prev_frame_;
- int width_;
- int height_;
- int skip_num_;
- int border_;
-
- // Content Metrics: Stores the local average of the metrics.
- float motion_magnitude_; // motion class
- float spatial_pred_err_; // spatial class
- float spatial_pred_err_h_; // spatial class
- float spatial_pred_err_v_; // spatial class
- bool first_frame_;
- bool ca_Init_;
-
- VideoContentMetrics* content_metrics_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_CONTENT_ANALYSIS_H_
diff --git a/chromium/third_party/webrtc/modules/video_processing/content_analysis_sse2.cc b/chromium/third_party/webrtc/modules/video_processing/content_analysis_sse2.cc
deleted file mode 100644
index 7a60a89b454..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/content_analysis_sse2.cc
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/content_analysis.h"
-
-#include <emmintrin.h>
-#include <math.h>
-
-namespace webrtc {
-
-int32_t VPMContentAnalysis::TemporalDiffMetric_SSE2() {
- uint32_t num_pixels = 0; // counter for # of pixels
- const uint8_t* imgBufO = orig_frame_ + border_ * width_ + border_;
- const uint8_t* imgBufP = prev_frame_ + border_ * width_ + border_;
-
- const int32_t width_end = ((width_ - 2 * border_) & -16) + border_;
-
- __m128i sad_64 = _mm_setzero_si128();
- __m128i sum_64 = _mm_setzero_si128();
- __m128i sqsum_64 = _mm_setzero_si128();
- const __m128i z = _mm_setzero_si128();
-
- for (uint16_t i = 0; i < (height_ - 2 * border_); i += skip_num_) {
- __m128i sqsum_32 = _mm_setzero_si128();
-
- const uint8_t* lineO = imgBufO;
- const uint8_t* lineP = imgBufP;
-
- // Work on 16 pixels at a time. For HD content with a width of 1920
- // this loop will run ~67 times (depending on border). Maximum for
- // abs(o-p) and sum(o) will be 255. _mm_sad_epu8 produces 2 64 bit
- // results which are then accumulated. There is no chance of
- // rollover for these two accumulators.
- // o*o will have a maximum of 255*255 = 65025. This will roll over
- // a 16 bit accumulator as 67*65025 > 65535, but will fit in a
- // 32 bit accumulator.
- for (uint16_t j = 0; j < width_end - border_; j += 16) {
- const __m128i o = _mm_loadu_si128((__m128i*)(lineO));
- const __m128i p = _mm_loadu_si128((__m128i*)(lineP));
-
- lineO += 16;
- lineP += 16;
-
- // Abs pixel difference between frames.
- sad_64 = _mm_add_epi64(sad_64, _mm_sad_epu8(o, p));
-
- // sum of all pixels in frame
- sum_64 = _mm_add_epi64(sum_64, _mm_sad_epu8(o, z));
-
- // Squared sum of all pixels in frame.
- const __m128i olo = _mm_unpacklo_epi8(o, z);
- const __m128i ohi = _mm_unpackhi_epi8(o, z);
-
- const __m128i sqsum_32_lo = _mm_madd_epi16(olo, olo);
- const __m128i sqsum_32_hi = _mm_madd_epi16(ohi, ohi);
-
- sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_lo);
- sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_hi);
- }
-
- // Add to 64 bit running sum as to not roll over.
- sqsum_64 =
- _mm_add_epi64(sqsum_64, _mm_add_epi64(_mm_unpackhi_epi32(sqsum_32, z),
- _mm_unpacklo_epi32(sqsum_32, z)));
-
- imgBufO += width_ * skip_num_;
- imgBufP += width_ * skip_num_;
- num_pixels += (width_end - border_);
- }
-
- __m128i sad_final_128;
- __m128i sum_final_128;
- __m128i sqsum_final_128;
-
- // Bring sums out of vector registers and into integer register
- // domain, summing them along the way.
- _mm_store_si128(&sad_final_128, sad_64);
- _mm_store_si128(&sum_final_128, sum_64);
- _mm_store_si128(&sqsum_final_128, sqsum_64);
-
- uint64_t* sad_final_64 = reinterpret_cast<uint64_t*>(&sad_final_128);
- uint64_t* sum_final_64 = reinterpret_cast<uint64_t*>(&sum_final_128);
- uint64_t* sqsum_final_64 = reinterpret_cast<uint64_t*>(&sqsum_final_128);
-
- const uint32_t pixelSum = sum_final_64[0] + sum_final_64[1];
- const uint64_t pixelSqSum = sqsum_final_64[0] + sqsum_final_64[1];
- const uint32_t tempDiffSum = sad_final_64[0] + sad_final_64[1];
-
- // Default.
- motion_magnitude_ = 0.0f;
-
- if (tempDiffSum == 0)
- return VPM_OK;
-
- // Normalize over all pixels.
- const float tempDiffAvg =
- static_cast<float>(tempDiffSum) / static_cast<float>(num_pixels);
- const float pixelSumAvg =
- static_cast<float>(pixelSum) / static_cast<float>(num_pixels);
- const float pixelSqSumAvg =
- static_cast<float>(pixelSqSum) / static_cast<float>(num_pixels);
- float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
-
- if (contrast > 0.0) {
- contrast = sqrt(contrast);
- motion_magnitude_ = tempDiffAvg / contrast;
- }
-
- return VPM_OK;
-}
-
-int32_t VPMContentAnalysis::ComputeSpatialMetrics_SSE2() {
- const uint8_t* imgBuf = orig_frame_ + border_ * width_;
- const int32_t width_end = ((width_ - 2 * border_) & -16) + border_;
-
- __m128i se_32 = _mm_setzero_si128();
- __m128i sev_32 = _mm_setzero_si128();
- __m128i seh_32 = _mm_setzero_si128();
- __m128i msa_32 = _mm_setzero_si128();
- const __m128i z = _mm_setzero_si128();
-
- // Error is accumulated as a 32 bit value. Looking at HD content with a
- // height of 1080 lines, or about 67 macro blocks. If the 16 bit row
- // value is maxed out at 65529 for every row, 65529*1080 = 70777800, which
- // will not roll over a 32 bit accumulator.
- // skip_num_ is also used to reduce the number of rows
- for (int32_t i = 0; i < (height_ - 2 * border_); i += skip_num_) {
- __m128i se_16 = _mm_setzero_si128();
- __m128i sev_16 = _mm_setzero_si128();
- __m128i seh_16 = _mm_setzero_si128();
- __m128i msa_16 = _mm_setzero_si128();
-
- // Row error is accumulated as a 16 bit value. There are 8
- // accumulators. Max value of a 16 bit number is 65529. Looking
- // at HD content, 1080p, has a width of 1920, 120 macro blocks.
- // A mb at a time is processed at a time. Absolute max error at
- // a point would be abs(0-255+255+255+255) which equals 1020.
- // 120*1020 = 122400. The probability of hitting this is quite low
- // on well behaved content. A specially crafted image could roll over.
- // border_ could also be adjusted to concentrate on just the center of
- // the images for an HD capture in order to reduce the possiblity of
- // rollover.
- const uint8_t* lineTop = imgBuf - width_ + border_;
- const uint8_t* lineCen = imgBuf + border_;
- const uint8_t* lineBot = imgBuf + width_ + border_;
-
- for (int32_t j = 0; j < width_end - border_; j += 16) {
- const __m128i t = _mm_loadu_si128((__m128i*)(lineTop));
- const __m128i l = _mm_loadu_si128((__m128i*)(lineCen - 1));
- const __m128i c = _mm_loadu_si128((__m128i*)(lineCen));
- const __m128i r = _mm_loadu_si128((__m128i*)(lineCen + 1));
- const __m128i b = _mm_loadu_si128((__m128i*)(lineBot));
-
- lineTop += 16;
- lineCen += 16;
- lineBot += 16;
-
- // center pixel unpacked
- __m128i clo = _mm_unpacklo_epi8(c, z);
- __m128i chi = _mm_unpackhi_epi8(c, z);
-
- // left right pixels unpacked and added together
- const __m128i lrlo =
- _mm_add_epi16(_mm_unpacklo_epi8(l, z), _mm_unpacklo_epi8(r, z));
- const __m128i lrhi =
- _mm_add_epi16(_mm_unpackhi_epi8(l, z), _mm_unpackhi_epi8(r, z));
-
- // top & bottom pixels unpacked and added together
- const __m128i tblo =
- _mm_add_epi16(_mm_unpacklo_epi8(t, z), _mm_unpacklo_epi8(b, z));
- const __m128i tbhi =
- _mm_add_epi16(_mm_unpackhi_epi8(t, z), _mm_unpackhi_epi8(b, z));
-
- // running sum of all pixels
- msa_16 = _mm_add_epi16(msa_16, _mm_add_epi16(chi, clo));
-
- clo = _mm_slli_epi16(clo, 1);
- chi = _mm_slli_epi16(chi, 1);
- const __m128i sevtlo = _mm_subs_epi16(clo, tblo);
- const __m128i sevthi = _mm_subs_epi16(chi, tbhi);
- const __m128i sehtlo = _mm_subs_epi16(clo, lrlo);
- const __m128i sehthi = _mm_subs_epi16(chi, lrhi);
-
- clo = _mm_slli_epi16(clo, 1);
- chi = _mm_slli_epi16(chi, 1);
- const __m128i setlo = _mm_subs_epi16(clo, _mm_add_epi16(lrlo, tblo));
- const __m128i sethi = _mm_subs_epi16(chi, _mm_add_epi16(lrhi, tbhi));
-
- // Add to 16 bit running sum
- se_16 =
- _mm_add_epi16(se_16, _mm_max_epi16(setlo, _mm_subs_epi16(z, setlo)));
- se_16 =
- _mm_add_epi16(se_16, _mm_max_epi16(sethi, _mm_subs_epi16(z, sethi)));
- sev_16 = _mm_add_epi16(sev_16,
- _mm_max_epi16(sevtlo, _mm_subs_epi16(z, sevtlo)));
- sev_16 = _mm_add_epi16(sev_16,
- _mm_max_epi16(sevthi, _mm_subs_epi16(z, sevthi)));
- seh_16 = _mm_add_epi16(seh_16,
- _mm_max_epi16(sehtlo, _mm_subs_epi16(z, sehtlo)));
- seh_16 = _mm_add_epi16(seh_16,
- _mm_max_epi16(sehthi, _mm_subs_epi16(z, sehthi)));
- }
-
- // Add to 32 bit running sum as to not roll over.
- se_32 = _mm_add_epi32(se_32, _mm_add_epi32(_mm_unpackhi_epi16(se_16, z),
- _mm_unpacklo_epi16(se_16, z)));
- sev_32 =
- _mm_add_epi32(sev_32, _mm_add_epi32(_mm_unpackhi_epi16(sev_16, z),
- _mm_unpacklo_epi16(sev_16, z)));
- seh_32 =
- _mm_add_epi32(seh_32, _mm_add_epi32(_mm_unpackhi_epi16(seh_16, z),
- _mm_unpacklo_epi16(seh_16, z)));
- msa_32 =
- _mm_add_epi32(msa_32, _mm_add_epi32(_mm_unpackhi_epi16(msa_16, z),
- _mm_unpacklo_epi16(msa_16, z)));
-
- imgBuf += width_ * skip_num_;
- }
-
- __m128i se_128;
- __m128i sev_128;
- __m128i seh_128;
- __m128i msa_128;
-
- // Bring sums out of vector registers and into integer register
- // domain, summing them along the way.
- _mm_store_si128(&se_128, _mm_add_epi64(_mm_unpackhi_epi32(se_32, z),
- _mm_unpacklo_epi32(se_32, z)));
- _mm_store_si128(&sev_128, _mm_add_epi64(_mm_unpackhi_epi32(sev_32, z),
- _mm_unpacklo_epi32(sev_32, z)));
- _mm_store_si128(&seh_128, _mm_add_epi64(_mm_unpackhi_epi32(seh_32, z),
- _mm_unpacklo_epi32(seh_32, z)));
- _mm_store_si128(&msa_128, _mm_add_epi64(_mm_unpackhi_epi32(msa_32, z),
- _mm_unpacklo_epi32(msa_32, z)));
-
- uint64_t* se_64 = reinterpret_cast<uint64_t*>(&se_128);
- uint64_t* sev_64 = reinterpret_cast<uint64_t*>(&sev_128);
- uint64_t* seh_64 = reinterpret_cast<uint64_t*>(&seh_128);
- uint64_t* msa_64 = reinterpret_cast<uint64_t*>(&msa_128);
-
- const uint32_t spatialErrSum = se_64[0] + se_64[1];
- const uint32_t spatialErrVSum = sev_64[0] + sev_64[1];
- const uint32_t spatialErrHSum = seh_64[0] + seh_64[1];
- const uint32_t pixelMSA = msa_64[0] + msa_64[1];
-
- // Normalize over all pixels.
- const float spatialErr = static_cast<float>(spatialErrSum >> 2);
- const float spatialErrH = static_cast<float>(spatialErrHSum >> 1);
- const float spatialErrV = static_cast<float>(spatialErrVSum >> 1);
- const float norm = static_cast<float>(pixelMSA);
-
- // 2X2:
- spatial_pred_err_ = spatialErr / norm;
-
- // 1X2:
- spatial_pred_err_h_ = spatialErrH / norm;
-
- // 2X1:
- spatial_pred_err_v_ = spatialErrV / norm;
-
- return VPM_OK;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/deflickering.cc b/chromium/third_party/webrtc/modules/video_processing/deflickering.cc
deleted file mode 100644
index 0e936ce9b77..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/deflickering.cc
+++ /dev/null
@@ -1,402 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_processing/deflickering.h"
-
-#include <math.h>
-#include <stdlib.h>
-
-#include "webrtc/base/logging.h"
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/system_wrappers/include/sort.h"
-
-namespace webrtc {
-
-// Detection constants
-// (Q4) Maximum allowed deviation for detection.
-enum { kFrequencyDeviation = 39 };
-// (Q4) Minimum frequency that can be detected.
-enum { kMinFrequencyToDetect = 32 };
-// Number of flickers before we accept detection
-enum { kNumFlickerBeforeDetect = 2 };
-enum { kmean_valueScaling = 4 }; // (Q4) In power of 2
-// Dead-zone region in terms of pixel values
-enum { kZeroCrossingDeadzone = 10 };
-// Deflickering constants.
-// Compute the quantiles over 1 / DownsamplingFactor of the image.
-enum { kDownsamplingFactor = 8 };
-enum { kLog2OfDownsamplingFactor = 3 };
-
-// To generate in Matlab:
-// >> probUW16 = round(2^11 *
-// [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]);
-// >> fprintf('%d, ', probUW16)
-// Resolution reduced to avoid overflow when multiplying with the
-// (potentially) large number of pixels.
-const uint16_t VPMDeflickering::prob_uw16_[kNumProbs] = {
- 102, 205, 410, 614, 819, 1024,
- 1229, 1434, 1638, 1843, 1946, 1987}; // <Q11>
-
-// To generate in Matlab:
-// >> numQuants = 14; maxOnlyLength = 5;
-// >> weightUW16 = round(2^15 *
-// [linspace(0.5, 1.0, numQuants - maxOnlyLength)]);
-// >> fprintf('%d, %d,\n ', weightUW16);
-const uint16_t VPMDeflickering::weight_uw16_[kNumQuants - kMaxOnlyLength] = {
- 16384, 18432, 20480, 22528, 24576, 26624, 28672, 30720, 32768}; // <Q15>
-
-VPMDeflickering::VPMDeflickering() {
- Reset();
-}
-
-VPMDeflickering::~VPMDeflickering() {}
-
-void VPMDeflickering::Reset() {
- mean_buffer_length_ = 0;
- detection_state_ = 0;
- frame_rate_ = 0;
-
- memset(mean_buffer_, 0, sizeof(int32_t) * kMeanBufferLength);
- memset(timestamp_buffer_, 0, sizeof(int32_t) * kMeanBufferLength);
-
- // Initialize the history with a uniformly distributed histogram.
- quant_hist_uw8_[0][0] = 0;
- quant_hist_uw8_[0][kNumQuants - 1] = 255;
- for (int32_t i = 0; i < kNumProbs; i++) {
- // Unsigned round. <Q0>
- quant_hist_uw8_[0][i + 1] =
- static_cast<uint8_t>((prob_uw16_[i] * 255 + (1 << 10)) >> 11);
- }
-
- for (int32_t i = 1; i < kFrameHistory_size; i++) {
- memcpy(quant_hist_uw8_[i], quant_hist_uw8_[0],
- sizeof(uint8_t) * kNumQuants);
- }
-}
-
-int32_t VPMDeflickering::ProcessFrame(VideoFrame* frame,
- VideoProcessing::FrameStats* stats) {
- assert(frame);
- uint32_t frame_memory;
- uint8_t quant_uw8[kNumQuants];
- uint8_t maxquant_uw8[kNumQuants];
- uint8_t minquant_uw8[kNumQuants];
- uint16_t target_quant_uw16[kNumQuants];
- uint16_t increment_uw16;
- uint8_t map_uw8[256];
-
- uint16_t tmp_uw16;
- uint32_t tmp_uw32;
- int width = frame->width();
- int height = frame->height();
-
- if (frame->IsZeroSize()) {
- return VPM_GENERAL_ERROR;
- }
-
- // Stricter height check due to subsampling size calculation below.
- if (height < 2) {
- LOG(LS_ERROR) << "Invalid frame size.";
- return VPM_GENERAL_ERROR;
- }
-
- if (!VideoProcessing::ValidFrameStats(*stats)) {
- return VPM_GENERAL_ERROR;
- }
-
- if (PreDetection(frame->timestamp(), *stats) == -1)
- return VPM_GENERAL_ERROR;
-
- // Flicker detection
- int32_t det_flicker = DetectFlicker();
- if (det_flicker < 0) {
- return VPM_GENERAL_ERROR;
- } else if (det_flicker != 1) {
- return 0;
- }
-
- // Size of luminance component.
- const uint32_t y_size = height * width;
-
- const uint32_t y_sub_size =
- width * (((height - 1) >> kLog2OfDownsamplingFactor) + 1);
- uint8_t* y_sorted = new uint8_t[y_sub_size];
- uint32_t sort_row_idx = 0;
- for (int i = 0; i < height; i += kDownsamplingFactor) {
- memcpy(y_sorted + sort_row_idx * width, frame->buffer(kYPlane) + i * width,
- width);
- sort_row_idx++;
- }
-
- webrtc::Sort(y_sorted, y_sub_size, webrtc::TYPE_UWord8);
-
- uint32_t prob_idx_uw32 = 0;
- quant_uw8[0] = 0;
- quant_uw8[kNumQuants - 1] = 255;
-
- // Ensure we won't get an overflow below.
- // In practice, the number of subsampled pixels will not become this large.
- if (y_sub_size > (1 << 21) - 1) {
- LOG(LS_ERROR) << "Subsampled number of pixels too large.";
- return -1;
- }
-
- for (int32_t i = 0; i < kNumProbs; i++) {
- // <Q0>.
- prob_idx_uw32 = WEBRTC_SPL_UMUL_32_16(y_sub_size, prob_uw16_[i]) >> 11;
- quant_uw8[i + 1] = y_sorted[prob_idx_uw32];
- }
-
- delete[] y_sorted;
- y_sorted = NULL;
-
- // Shift history for new frame.
- memmove(quant_hist_uw8_[1], quant_hist_uw8_[0],
- (kFrameHistory_size - 1) * kNumQuants * sizeof(uint8_t));
- // Store current frame in history.
- memcpy(quant_hist_uw8_[0], quant_uw8, kNumQuants * sizeof(uint8_t));
-
- // We use a frame memory equal to the ceiling of half the frame rate to
- // ensure we capture an entire period of flicker.
- frame_memory = (frame_rate_ + (1 << 5)) >> 5; // Unsigned ceiling. <Q0>
- // frame_rate_ in Q4.
- if (frame_memory > kFrameHistory_size) {
- frame_memory = kFrameHistory_size;
- }
-
- // Get maximum and minimum.
- for (int32_t i = 0; i < kNumQuants; i++) {
- maxquant_uw8[i] = 0;
- minquant_uw8[i] = 255;
- for (uint32_t j = 0; j < frame_memory; j++) {
- if (quant_hist_uw8_[j][i] > maxquant_uw8[i]) {
- maxquant_uw8[i] = quant_hist_uw8_[j][i];
- }
-
- if (quant_hist_uw8_[j][i] < minquant_uw8[i]) {
- minquant_uw8[i] = quant_hist_uw8_[j][i];
- }
- }
- }
-
- // Get target quantiles.
- for (int32_t i = 0; i < kNumQuants - kMaxOnlyLength; i++) {
- // target = w * maxquant_uw8 + (1 - w) * minquant_uw8
- // Weights w = |weight_uw16_| are in Q15, hence the final output has to be
- // right shifted by 8 to end up in Q7.
- target_quant_uw16[i] = static_cast<uint16_t>(
- (weight_uw16_[i] * maxquant_uw8[i] +
- ((1 << 15) - weight_uw16_[i]) * minquant_uw8[i]) >>
- 8); // <Q7>
- }
-
- for (int32_t i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++) {
- target_quant_uw16[i] = ((uint16_t)maxquant_uw8[i]) << 7;
- }
-
- // Compute the map from input to output pixels.
- uint16_t mapUW16; // <Q7>
- for (int32_t i = 1; i < kNumQuants; i++) {
- // As quant and targetQuant are limited to UWord8, it's safe to use Q7 here.
- tmp_uw32 =
- static_cast<uint32_t>(target_quant_uw16[i] - target_quant_uw16[i - 1]);
- tmp_uw16 = static_cast<uint16_t>(quant_uw8[i] - quant_uw8[i - 1]); // <Q0>
-
- if (tmp_uw16 > 0) {
- increment_uw16 =
- static_cast<uint16_t>(WebRtcSpl_DivU32U16(tmp_uw32,
- tmp_uw16)); // <Q7>
- } else {
- // The value is irrelevant; the loop below will only iterate once.
- increment_uw16 = 0;
- }
-
- mapUW16 = target_quant_uw16[i - 1];
- for (uint32_t j = quant_uw8[i - 1]; j < (uint32_t)(quant_uw8[i] + 1); j++) {
- // Unsigned round. <Q0>
- map_uw8[j] = (uint8_t)((mapUW16 + (1 << 6)) >> 7);
- mapUW16 += increment_uw16;
- }
- }
-
- // Map to the output frame.
- uint8_t* buffer = frame->buffer(kYPlane);
- for (uint32_t i = 0; i < y_size; i++) {
- buffer[i] = map_uw8[buffer[i]];
- }
-
- // Frame was altered, so reset stats.
- VideoProcessing::ClearFrameStats(stats);
-
- return VPM_OK;
-}
-
-/**
- Performs some pre-detection operations. Must be called before
- DetectFlicker().
-
- \param[in] timestamp Timestamp of the current frame.
- \param[in] stats Statistics of the current frame.
-
- \return 0: Success\n
- 2: Detection not possible due to flickering frequency too close to
- zero.\n
- -1: Error
-*/
-int32_t VPMDeflickering::PreDetection(
- const uint32_t timestamp,
- const VideoProcessing::FrameStats& stats) {
- int32_t mean_val; // Mean value of frame (Q4)
- uint32_t frame_rate = 0;
- int32_t meanBufferLength; // Temp variable.
-
- mean_val = ((stats.sum << kmean_valueScaling) / stats.num_pixels);
- // Update mean value buffer.
- // This should be done even though we might end up in an unreliable detection.
- memmove(mean_buffer_ + 1, mean_buffer_,
- (kMeanBufferLength - 1) * sizeof(int32_t));
- mean_buffer_[0] = mean_val;
-
- // Update timestamp buffer.
- // This should be done even though we might end up in an unreliable detection.
- memmove(timestamp_buffer_ + 1, timestamp_buffer_,
- (kMeanBufferLength - 1) * sizeof(uint32_t));
- timestamp_buffer_[0] = timestamp;
-
- /* Compute current frame rate (Q4) */
- if (timestamp_buffer_[kMeanBufferLength - 1] != 0) {
- frame_rate = ((90000 << 4) * (kMeanBufferLength - 1));
- frame_rate /=
- (timestamp_buffer_[0] - timestamp_buffer_[kMeanBufferLength - 1]);
- } else if (timestamp_buffer_[1] != 0) {
- frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]);
- }
-
- /* Determine required size of mean value buffer (mean_buffer_length_) */
- if (frame_rate == 0) {
- meanBufferLength = 1;
- } else {
- meanBufferLength =
- (kNumFlickerBeforeDetect * frame_rate) / kMinFrequencyToDetect;
- }
- /* Sanity check of buffer length */
- if (meanBufferLength >= kMeanBufferLength) {
- /* Too long buffer. The flickering frequency is too close to zero, which
- * makes the estimation unreliable.
- */
- mean_buffer_length_ = 0;
- return 2;
- }
- mean_buffer_length_ = meanBufferLength;
-
- if ((timestamp_buffer_[mean_buffer_length_ - 1] != 0) &&
- (mean_buffer_length_ != 1)) {
- frame_rate = ((90000 << 4) * (mean_buffer_length_ - 1));
- frame_rate /=
- (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]);
- } else if (timestamp_buffer_[1] != 0) {
- frame_rate = (90000 << 4) / (timestamp_buffer_[0] - timestamp_buffer_[1]);
- }
- frame_rate_ = frame_rate;
-
- return VPM_OK;
-}
-
-/**
- This function detects flicker in the video stream. As a side effect the
- mean value buffer is updated with the new mean value.
-
- \return 0: No flickering detected\n
- 1: Flickering detected\n
- 2: Detection not possible due to unreliable frequency interval
- -1: Error
-*/
-int32_t VPMDeflickering::DetectFlicker() {
- uint32_t i;
- int32_t freqEst; // (Q4) Frequency estimate to base detection upon
- int32_t ret_val = -1;
-
- /* Sanity check for mean_buffer_length_ */
- if (mean_buffer_length_ < 2) {
- /* Not possible to estimate frequency */
- return 2;
- }
- // Count zero crossings with a dead zone to be robust against noise. If the
- // noise std is 2 pixel this corresponds to about 95% confidence interval.
- int32_t deadzone = (kZeroCrossingDeadzone << kmean_valueScaling); // Q4
- int32_t meanOfBuffer = 0; // Mean value of mean value buffer.
- int32_t numZeros = 0; // Number of zeros that cross the dead-zone.
- int32_t cntState = 0; // State variable for zero crossing regions.
- int32_t cntStateOld = 0; // Previous state for zero crossing regions.
-
- for (i = 0; i < mean_buffer_length_; i++) {
- meanOfBuffer += mean_buffer_[i];
- }
- meanOfBuffer += (mean_buffer_length_ >> 1); // Rounding, not truncation.
- meanOfBuffer /= mean_buffer_length_;
-
- // Count zero crossings.
- cntStateOld = (mean_buffer_[0] >= (meanOfBuffer + deadzone));
- cntStateOld -= (mean_buffer_[0] <= (meanOfBuffer - deadzone));
- for (i = 1; i < mean_buffer_length_; i++) {
- cntState = (mean_buffer_[i] >= (meanOfBuffer + deadzone));
- cntState -= (mean_buffer_[i] <= (meanOfBuffer - deadzone));
- if (cntStateOld == 0) {
- cntStateOld = -cntState;
- }
- if (((cntState + cntStateOld) == 0) && (cntState != 0)) {
- numZeros++;
- cntStateOld = cntState;
- }
- }
- // END count zero crossings.
-
- /* Frequency estimation according to:
- * freqEst = numZeros * frame_rate / 2 / mean_buffer_length_;
- *
- * Resolution is set to Q4
- */
- freqEst = ((numZeros * 90000) << 3);
- freqEst /=
- (timestamp_buffer_[0] - timestamp_buffer_[mean_buffer_length_ - 1]);
-
- /* Translate frequency estimate to regions close to 100 and 120 Hz */
- uint8_t freqState = 0; // Current translation state;
- // (0) Not in interval,
- // (1) Within valid interval,
- // (2) Out of range
- int32_t freqAlias = freqEst;
- if (freqEst > kMinFrequencyToDetect) {
- uint8_t aliasState = 1;
- while (freqState == 0) {
- /* Increase frequency */
- freqAlias += (aliasState * frame_rate_);
- freqAlias += ((freqEst << 1) * (1 - (aliasState << 1)));
- /* Compute state */
- freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation);
- freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation);
- freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation));
- /* Switch alias state */
- aliasState++;
- aliasState &= 0x01;
- }
- }
- /* Is frequency estimate within detection region? */
- if (freqState == 1) {
- ret_val = 1;
- } else if (freqState == 0) {
- ret_val = 2;
- } else {
- ret_val = 0;
- }
- return ret_val;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/deflickering.h b/chromium/third_party/webrtc/modules/video_processing/deflickering.h
deleted file mode 100644
index 3ff2723aba8..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/deflickering.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
-#define WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
-
-#include <string.h> // NULL
-
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class VPMDeflickering {
- public:
- VPMDeflickering();
- ~VPMDeflickering();
-
- void Reset();
- int32_t ProcessFrame(VideoFrame* frame, VideoProcessing::FrameStats* stats);
-
- private:
- int32_t PreDetection(uint32_t timestamp,
- const VideoProcessing::FrameStats& stats);
-
- int32_t DetectFlicker();
-
- enum { kMeanBufferLength = 32 };
- enum { kFrameHistory_size = 15 };
- enum { kNumProbs = 12 };
- enum { kNumQuants = kNumProbs + 2 };
- enum { kMaxOnlyLength = 5 };
-
- uint32_t mean_buffer_length_;
- uint8_t detection_state_; // 0: No flickering
- // 1: Flickering detected
- // 2: In flickering
- int32_t mean_buffer_[kMeanBufferLength];
- uint32_t timestamp_buffer_[kMeanBufferLength];
- uint32_t frame_rate_;
- static const uint16_t prob_uw16_[kNumProbs];
- static const uint16_t weight_uw16_[kNumQuants - kMaxOnlyLength];
- uint8_t quant_hist_uw8_[kFrameHistory_size][kNumQuants];
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_PROCESSING_DEFLICKERING_H_
diff --git a/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc b/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc
index fd0d0efb97d..100cdb519ab 100644
--- a/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.cc
@@ -15,29 +15,22 @@
namespace webrtc {
VPMFramePreprocessor::VPMFramePreprocessor()
- : content_metrics_(nullptr),
- resampled_frame_(),
- enable_ca_(false),
- frame_cnt_(0) {
+ : resampled_frame_(), frame_cnt_(0) {
spatial_resampler_ = new VPMSimpleSpatialResampler();
- ca_ = new VPMContentAnalysis(true);
vd_ = new VPMVideoDecimator();
- EnableDenosing(false);
+ EnableDenoising(false);
+ denoised_frame_toggle_ = 0;
}
VPMFramePreprocessor::~VPMFramePreprocessor() {
Reset();
- delete ca_;
delete vd_;
delete spatial_resampler_;
}
void VPMFramePreprocessor::Reset() {
- ca_->Release();
vd_->Reset();
- content_metrics_ = nullptr;
spatial_resampler_->Reset();
- enable_ca_ = false;
frame_cnt_ = 0;
}
@@ -45,10 +38,6 @@ void VPMFramePreprocessor::EnableTemporalDecimation(bool enable) {
vd_->EnableTemporalDecimation(enable);
}
-void VPMFramePreprocessor::EnableContentAnalysis(bool enable) {
- enable_ca_ = enable;
-}
-
void VPMFramePreprocessor::SetInputFrameResampleMode(
VideoFrameResampling resampling_mode) {
spatial_resampler_->SetInputFrameResampleMode(resampling_mode);
@@ -70,15 +59,6 @@ int32_t VPMFramePreprocessor::SetTargetResolution(uint32_t width,
return VPM_OK;
}
-void VPMFramePreprocessor::SetTargetFramerate(int frame_rate) {
- if (frame_rate == -1) {
- vd_->EnableTemporalDecimation(false);
- } else {
- vd_->EnableTemporalDecimation(true);
- vd_->SetTargetFramerate(frame_rate);
- }
-}
-
void VPMFramePreprocessor::UpdateIncomingframe_rate() {
vd_->UpdateIncomingframe_rate();
}
@@ -95,7 +75,7 @@ uint32_t VPMFramePreprocessor::GetDecimatedHeight() const {
return spatial_resampler_->TargetHeight();
}
-void VPMFramePreprocessor::EnableDenosing(bool enable) {
+void VPMFramePreprocessor::EnableDenoising(bool enable) {
if (enable) {
denoiser_.reset(new VideoDenoiser(true));
} else {
@@ -116,9 +96,18 @@ const VideoFrame* VPMFramePreprocessor::PreprocessFrame(
const VideoFrame* current_frame = &frame;
if (denoiser_) {
- denoiser_->DenoiseFrame(*current_frame, &denoised_frame_,
- &denoised_frame_prev_, 0);
- current_frame = &denoised_frame_;
+ VideoFrame* denoised_frame = &denoised_frame_[0];
+ VideoFrame* denoised_frame_prev = &denoised_frame_[1];
+ // Swap the buffer to save one memcpy in DenoiseFrame.
+ if (denoised_frame_toggle_) {
+ denoised_frame = &denoised_frame_[1];
+ denoised_frame_prev = &denoised_frame_[0];
+ }
+ // Invert the flag.
+ denoised_frame_toggle_ ^= 1;
+ denoiser_->DenoiseFrame(*current_frame, denoised_frame, denoised_frame_prev,
+ true);
+ current_frame = denoised_frame;
}
if (spatial_resampler_->ApplyResample(current_frame->width(),
@@ -130,18 +119,8 @@ const VideoFrame* VPMFramePreprocessor::PreprocessFrame(
current_frame = &resampled_frame_;
}
- // Perform content analysis on the frame to be encoded.
- if (enable_ca_ && frame_cnt_ % kSkipFrameCA == 0) {
- // Compute new metrics every |kSkipFramesCA| frames, starting with
- // the first frame.
- content_metrics_ = ca_->ComputeContentMetrics(*current_frame);
- }
++frame_cnt_;
return current_frame;
}
-VideoContentMetrics* VPMFramePreprocessor::GetContentMetrics() const {
- return content_metrics_;
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h b/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h
index c35dd0d7aff..4ac6b76e830 100644
--- a/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h
+++ b/chromium/third_party/webrtc/modules/video_processing/frame_preprocessor.h
@@ -14,7 +14,6 @@
#include <memory>
#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/content_analysis.h"
#include "webrtc/modules/video_processing/spatial_resampler.h"
#include "webrtc/modules/video_processing/video_decimator.h"
#include "webrtc/typedefs.h"
@@ -38,17 +37,11 @@ class VPMFramePreprocessor {
void SetInputFrameResampleMode(VideoFrameResampling resampling_mode);
- // Enable content analysis.
- void EnableContentAnalysis(bool enable);
-
// Set target resolution: frame rate and dimension.
int32_t SetTargetResolution(uint32_t width,
uint32_t height,
uint32_t frame_rate);
- // Set target frame rate.
- void SetTargetFramerate(int frame_rate);
-
// Update incoming frame rate/dimension.
void UpdateIncomingframe_rate();
@@ -60,24 +53,20 @@ class VPMFramePreprocessor {
uint32_t GetDecimatedHeight() const;
// Preprocess output:
- void EnableDenosing(bool enable);
+ void EnableDenoising(bool enable);
const VideoFrame* PreprocessFrame(const VideoFrame& frame);
- VideoContentMetrics* GetContentMetrics() const;
private:
// The content does not change so much every frame, so to reduce complexity
// we can compute new content metrics every |kSkipFrameCA| frames.
enum { kSkipFrameCA = 2 };
- VideoContentMetrics* content_metrics_;
- VideoFrame denoised_frame_;
- VideoFrame denoised_frame_prev_;
+ VideoFrame denoised_frame_[2];
VideoFrame resampled_frame_;
VPMSpatialResampler* spatial_resampler_;
- VPMContentAnalysis* ca_;
VPMVideoDecimator* vd_;
std::unique_ptr<VideoDenoiser> denoiser_;
- bool enable_ca_;
+ uint8_t denoised_frame_toggle_;
uint32_t frame_cnt_;
};
diff --git a/chromium/third_party/webrtc/modules/video_processing/include/video_processing.h b/chromium/third_party/webrtc/modules/video_processing/include/video_processing.h
index a8d63588876..e2069ddbe7a 100644
--- a/chromium/third_party/webrtc/modules/video_processing/include/video_processing.h
+++ b/chromium/third_party/webrtc/modules/video_processing/include/video_processing.h
@@ -28,46 +28,9 @@ namespace webrtc {
class VideoProcessing {
public:
- struct FrameStats {
- uint32_t hist[256]; // Frame histogram.
- uint32_t mean;
- uint32_t sum;
- uint32_t num_pixels;
- uint32_t sub_sampling_factor; // Sub-sampling factor, in powers of 2.
- };
-
- enum BrightnessWarning { kNoWarning, kDarkWarning, kBrightWarning };
-
static VideoProcessing* Create();
virtual ~VideoProcessing() {}
- // Retrieves statistics for the input frame. This function must be used to
- // prepare a FrameStats struct for use in certain VPM functions.
- static void GetFrameStats(const VideoFrame& frame, FrameStats* stats);
-
- // Checks the validity of a FrameStats struct. Currently, valid implies only
- // that is had changed from its initialized state.
- static bool ValidFrameStats(const FrameStats& stats);
-
- static void ClearFrameStats(FrameStats* stats);
-
- // Increases/decreases the luminance value. 'delta' can be in the range {}
- static void Brighten(int delta, VideoFrame* frame);
-
- // Detects and removes camera flicker from a video stream. Every frame from
- // the stream must be passed in. A frame will only be altered if flicker has
- // been detected. Has a fixed-point implementation.
- // Frame statistics provided by GetFrameStats(). On return the stats will
- // be reset to zero if the frame was altered. Call GetFrameStats() again
- // if the statistics for the altered frame are required.
- virtual int32_t Deflickering(VideoFrame* frame, FrameStats* stats) = 0;
-
- // Detects if a video frame is excessively bright or dark. Returns a
- // warning if this is the case. Multiple frames should be passed in before
- // expecting a warning. Has a floating-point implementation.
- virtual int32_t BrightnessDetection(const VideoFrame& frame,
- const FrameStats& stats) = 0;
-
// The following functions refer to the pre-processor unit within VPM. The
// pre-processor perfoms spatial/temporal decimation and content analysis on
// the frames prior to encoding.
@@ -79,8 +42,6 @@ class VideoProcessing {
uint32_t height,
uint32_t frame_rate) = 0;
- virtual void SetTargetFramerate(int frame_rate) = 0;
-
virtual uint32_t GetDecimatedFrameRate() = 0;
virtual uint32_t GetDecimatedWidth() const = 0;
virtual uint32_t GetDecimatedHeight() const = 0;
@@ -90,11 +51,8 @@ class VideoProcessing {
virtual void SetInputFrameResampleMode(
VideoFrameResampling resampling_mode) = 0;
- virtual void EnableDenosing(bool enable) = 0;
+ virtual void EnableDenoising(bool enable) = 0;
virtual const VideoFrame* PreprocessFrame(const VideoFrame& frame) = 0;
-
- virtual VideoContentMetrics* GetContentMetrics() const = 0;
- virtual void EnableContentAnalysis(bool enable) = 0;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/brightness_detection_test.cc b/chromium/third_party/webrtc/modules/video_processing/test/brightness_detection_test.cc
deleted file mode 100644
index abce518e584..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/test/brightness_detection_test.cc
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <memory>
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
-
-namespace webrtc {
-
-#if defined(WEBRTC_IOS)
-#define MAYBE_BrightnessDetection DISABLED_BrightnessDetection
-#else
-#define MAYBE_BrightnessDetection BrightnessDetection
-#endif
-TEST_F(VideoProcessingTest, MAYBE_BrightnessDetection) {
- uint32_t frameNum = 0;
- int32_t brightnessWarning = 0;
- uint32_t warningCount = 0;
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_) {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- frameNum++;
- VideoProcessing::FrameStats stats;
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
- 0);
- if (brightnessWarning != VideoProcessing::kNoWarning) {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect few warnings
- float warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("\nWarning proportions:\n");
- printf("Stock foreman: %.1f %%\n", warningProportion);
- EXPECT_LT(warningProportion, 10);
-
- rewind(source_file_);
- frameNum = 0;
- warningCount = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_ &&
- frameNum < 300) {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- frameNum++;
-
- uint8_t* frame = video_frame_.buffer(kYPlane);
- uint32_t yTmp = 0;
- for (int yIdx = 0; yIdx < width_ * height_; yIdx++) {
- yTmp = frame[yIdx] << 1;
- if (yTmp > 255) {
- yTmp = 255;
- }
- frame[yIdx] = static_cast<uint8_t>(yTmp);
- }
-
- VideoProcessing::FrameStats stats;
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
- 0);
- EXPECT_NE(VideoProcessing::kDarkWarning, brightnessWarning);
- if (brightnessWarning == VideoProcessing::kBrightWarning) {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect many brightness warnings
- warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("Bright foreman: %.1f %%\n", warningProportion);
- EXPECT_GT(warningProportion, 95);
-
- rewind(source_file_);
- frameNum = 0;
- warningCount = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_ &&
- frameNum < 300) {
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- frameNum++;
-
- uint8_t* y_plane = video_frame_.buffer(kYPlane);
- int32_t yTmp = 0;
- for (int yIdx = 0; yIdx < width_ * height_; yIdx++) {
- yTmp = y_plane[yIdx] >> 1;
- y_plane[yIdx] = static_cast<uint8_t>(yTmp);
- }
-
- VideoProcessing::FrameStats stats;
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_GE(brightnessWarning = vp_->BrightnessDetection(video_frame_, stats),
- 0);
- EXPECT_NE(VideoProcessing::kBrightWarning, brightnessWarning);
- if (brightnessWarning == VideoProcessing::kDarkWarning) {
- warningCount++;
- }
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- // Expect many darkness warnings
- warningProportion = static_cast<float>(warningCount) / frameNum * 100;
- printf("Dark foreman: %.1f %%\n\n", warningProportion);
- EXPECT_GT(warningProportion, 90);
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/content_metrics_test.cc b/chromium/third_party/webrtc/modules/video_processing/test/content_metrics_test.cc
deleted file mode 100644
index 80bb56489bb..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/test/content_metrics_test.cc
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <memory>
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/content_analysis.h"
-#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
-
-namespace webrtc {
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_ContentAnalysis) {
-#else
-TEST_F(VideoProcessingTest, ContentAnalysis) {
-#endif
- VPMContentAnalysis ca__c(false);
- VPMContentAnalysis ca__sse(true);
- VideoContentMetrics* _cM_c;
- VideoContentMetrics* _cM_SSE;
-
- ca__c.Initialize(width_, height_);
- ca__sse.Initialize(width_, height_);
-
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_) {
- // Using ConvertToI420 to add stride to the image.
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- _cM_c = ca__c.ComputeContentMetrics(video_frame_);
- _cM_SSE = ca__sse.ComputeContentMetrics(video_frame_);
-
- ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
- ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);
- ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h);
- ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude);
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/deflickering_test.cc b/chromium/third_party/webrtc/modules/video_processing/test/deflickering_test.cc
deleted file mode 100644
index 5ff5692cce9..00000000000
--- a/chromium/third_party/webrtc/modules/video_processing/test/deflickering_test.cc
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <stdio.h>
-#include <stdlib.h>
-
-#include <memory>
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/test/video_processing_unittest.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/test/testsupport/fileutils.h"
-
-namespace webrtc {
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_Deflickering) {
-#else
-TEST_F(VideoProcessingTest, Deflickering) {
-#endif
- enum { NumRuns = 30 };
- uint32_t frameNum = 0;
- const uint32_t frame_rate = 15;
-
- int64_t min_runtime = 0;
- int64_t avg_runtime = 0;
-
- // Close automatically opened Foreman.
- fclose(source_file_);
- const std::string input_file =
- webrtc::test::ResourcePath("deflicker_before_cif_short", "yuv");
- source_file_ = fopen(input_file.c_str(), "rb");
- ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file: " << input_file
- << "\n";
-
- const std::string output_file =
- webrtc::test::OutputPath() + "deflicker_output_cif_short.yuv";
- FILE* deflickerFile = fopen(output_file.c_str(), "wb");
- ASSERT_TRUE(deflickerFile != NULL)
- << "Could not open output file: " << output_file << "\n";
-
- printf("\nRun time [us / frame]:\n");
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
- TickTime t0;
- TickTime t1;
- TickInterval acc_ticks;
- uint32_t timeStamp = 1;
-
- frameNum = 0;
- while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
- frame_length_) {
- frameNum++;
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_,
- height_, 0, kVideoRotation_0, &video_frame_));
- video_frame_.set_timestamp(timeStamp);
-
- t0 = TickTime::Now();
- VideoProcessing::FrameStats stats;
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_EQ(0, vp_->Deflickering(&video_frame_, &stats));
- t1 = TickTime::Now();
- acc_ticks += (t1 - t0);
-
- if (run_idx == 0) {
- if (PrintVideoFrame(video_frame_, deflickerFile) < 0) {
- return;
- }
- }
- timeStamp += (90000 / frame_rate);
- }
- ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
-
- printf("%u\n", static_cast<int>(acc_ticks.Microseconds() / frameNum));
- if (acc_ticks.Microseconds() < min_runtime || run_idx == 0) {
- min_runtime = acc_ticks.Microseconds();
- }
- avg_runtime += acc_ticks.Microseconds();
-
- rewind(source_file_);
- }
- ASSERT_EQ(0, fclose(deflickerFile));
- // TODO(kjellander): Add verification of deflicker output file.
-
- printf("\nAverage run time = %d us / frame\n",
- static_cast<int>(avg_runtime / frameNum / NumRuns));
- printf("Min run time = %d us / frame\n\n",
- static_cast<int>(min_runtime / frameNum));
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc b/chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc
index a45f933bb54..4c13a05d635 100644
--- a/chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/test/denoiser_test.cc
@@ -31,18 +31,10 @@ TEST_F(VideoProcessingTest, CopyMem) {
}
}
- memset(dst, 0, 8 * 8);
- df_c->CopyMem8x8(src, 8, dst, 8);
- EXPECT_EQ(0, memcmp(src, dst, 8 * 8));
-
memset(dst, 0, 16 * 16);
df_c->CopyMem16x16(src, 16, dst, 16);
EXPECT_EQ(0, memcmp(src, dst, 16 * 16));
- memset(dst, 0, 8 * 8);
- df_sse_neon->CopyMem16x16(src, 8, dst, 8);
- EXPECT_EQ(0, memcmp(src, dst, 8 * 8));
-
memset(dst, 0, 16 * 16);
df_sse_neon->CopyMem16x16(src, 16, dst, 16);
EXPECT_EQ(0, memcmp(src, dst, 16 * 16));
@@ -87,10 +79,9 @@ TEST_F(VideoProcessingTest, MbDenoise) {
}
}
memset(dst, 0, 16 * 16);
- df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
memset(dst_sse_neon, 0, 16 * 16);
- df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1,
- false);
+ df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1);
EXPECT_EQ(0, memcmp(dst, dst_sse_neon, 16 * 16));
// Test case: |diff| >= |4 + shift_inc1|
@@ -101,10 +92,9 @@ TEST_F(VideoProcessingTest, MbDenoise) {
}
}
memset(dst, 0, 16 * 16);
- df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
memset(dst_sse_neon, 0, 16 * 16);
- df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1,
- false);
+ df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1);
EXPECT_EQ(0, memcmp(dst, dst_sse_neon, 16 * 16));
// Test case: |diff| >= 8
@@ -115,10 +105,9 @@ TEST_F(VideoProcessingTest, MbDenoise) {
}
}
memset(dst, 0, 16 * 16);
- df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
memset(dst_sse_neon, 0, 16 * 16);
- df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1,
- false);
+ df_sse_neon->MbDenoise(running_src, 16, dst_sse_neon, 16, src, 16, 0, 1);
EXPECT_EQ(0, memcmp(dst, dst_sse_neon, 16 * 16));
// Test case: |diff| > 15
@@ -130,22 +119,23 @@ TEST_F(VideoProcessingTest, MbDenoise) {
}
memset(dst, 0, 16 * 16);
DenoiserDecision decision =
- df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ df_c->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
EXPECT_EQ(COPY_BLOCK, decision);
- decision =
- df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1, false);
+ decision = df_sse_neon->MbDenoise(running_src, 16, dst, 16, src, 16, 0, 1);
EXPECT_EQ(COPY_BLOCK, decision);
}
TEST_F(VideoProcessingTest, Denoiser) {
+ // Used in swap buffer.
+ int denoised_frame_toggle = 0;
// Create pure C denoiser.
VideoDenoiser denoiser_c(false);
// Create SSE or NEON denoiser.
VideoDenoiser denoiser_sse_neon(true);
VideoFrame denoised_frame_c;
- VideoFrame denoised_frame_track_c;
+ VideoFrame denoised_frame_prev_c;
VideoFrame denoised_frame_sse_neon;
- VideoFrame denoised_frame_track_sse_neon;
+ VideoFrame denoised_frame_prev_sse_neon;
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
@@ -154,13 +144,25 @@ TEST_F(VideoProcessingTest, Denoiser) {
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
- denoiser_c.DenoiseFrame(video_frame_, &denoised_frame_c,
- &denoised_frame_track_c, -1);
- denoiser_sse_neon.DenoiseFrame(video_frame_, &denoised_frame_sse_neon,
- &denoised_frame_track_sse_neon, -1);
-
+ VideoFrame* p_denoised_c = &denoised_frame_c;
+ VideoFrame* p_denoised_prev_c = &denoised_frame_prev_c;
+ VideoFrame* p_denoised_sse_neon = &denoised_frame_sse_neon;
+ VideoFrame* p_denoised_prev_sse_neon = &denoised_frame_prev_sse_neon;
+ // Swap the buffer to save one memcpy in DenoiseFrame.
+ if (denoised_frame_toggle) {
+ p_denoised_c = &denoised_frame_prev_c;
+ p_denoised_prev_c = &denoised_frame_c;
+ p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
+ p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
+ }
+ denoiser_c.DenoiseFrame(video_frame_, p_denoised_c, p_denoised_prev_c,
+ false);
+ denoiser_sse_neon.DenoiseFrame(video_frame_, p_denoised_sse_neon,
+ p_denoised_prev_sse_neon, false);
+ // Invert the flag.
+ denoised_frame_toggle ^= 1;
// Denoising results should be the same for C and SSE/NEON denoiser.
- ASSERT_TRUE(test::FramesEqual(denoised_frame_c, denoised_frame_sse_neon));
+ ASSERT_TRUE(test::FramesEqual(*p_denoised_c, *p_denoised_sse_neon));
}
ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc b/chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc
index 0d18d0a4c89..9e61b51884f 100644
--- a/chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/test/video_processing_unittest.cc
@@ -15,8 +15,8 @@
#include <memory>
#include <string>
+#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@@ -51,8 +51,6 @@ static void TestSize(const VideoFrame& source_frame,
int target_height,
double expected_psnr,
VideoProcessing* vpm);
-static bool CompareFrames(const webrtc::VideoFrame& frame1,
- const webrtc::VideoFrame& frame2);
static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
const VideoFrame& processed);
@@ -73,9 +71,12 @@ void VideoProcessingTest::SetUp() {
video_frame_.CreateEmptyFrame(width_, height_, width_,
half_width_, half_width_);
// Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
- memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
- memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
- memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane));
+ memset(video_frame_.video_frame_buffer()->MutableDataY(), 0,
+ video_frame_.allocated_size(kYPlane));
+ memset(video_frame_.video_frame_buffer()->MutableDataU(), 0,
+ video_frame_.allocated_size(kUPlane));
+ memset(video_frame_.video_frame_buffer()->MutableDataV(), 0,
+ video_frame_.allocated_size(kVPlane));
const std::string video_file =
webrtc::test::ResourcePath("foreman_cif", "yuv");
source_file_ = fopen(video_file.c_str(), "rb");
@@ -93,108 +94,6 @@ void VideoProcessingTest::TearDown() {
}
#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_HandleNullBuffer) {
-#else
-TEST_F(VideoProcessingTest, HandleNullBuffer) {
-#endif
- // TODO(mikhal/stefan): Do we need this one?
- VideoProcessing::FrameStats stats;
- // Video frame with unallocated buffer.
- VideoFrame videoFrame;
-
- vp_->GetFrameStats(videoFrame, &stats);
- EXPECT_EQ(stats.num_pixels, 0u);
-
- EXPECT_EQ(-1, vp_->Deflickering(&videoFrame, &stats));
-
- EXPECT_EQ(-3, vp_->BrightnessDetection(videoFrame, stats));
-}
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_HandleBadStats) {
-#else
-TEST_F(VideoProcessingTest, HandleBadStats) {
-#endif
- VideoProcessing::FrameStats stats;
- vp_->ClearFrameStats(&stats);
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_,
- fread(video_buffer.get(), 1, frame_length_, source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
-
- EXPECT_EQ(-1, vp_->Deflickering(&video_frame_, &stats));
-
- EXPECT_EQ(-3, vp_->BrightnessDetection(video_frame_, stats));
-}
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_IdenticalResultsAfterReset) {
-#else
-TEST_F(VideoProcessingTest, IdenticalResultsAfterReset) {
-#endif
- VideoFrame video_frame2;
- VideoProcessing::FrameStats stats;
- // Only testing non-static functions here.
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_,
- fread(video_buffer.get(), 1, frame_length_, source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- video_frame2.CopyFrame(video_frame_);
- ASSERT_EQ(0, vp_->Deflickering(&video_frame_, &stats));
-
- // Retrieve frame stats again in case Deflickering() has zeroed them.
- vp_->GetFrameStats(video_frame2, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- ASSERT_EQ(0, vp_->Deflickering(&video_frame2, &stats));
- EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
-
- ASSERT_EQ(frame_length_,
- fread(video_buffer.get(), 1, frame_length_, source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- video_frame2.CopyFrame(video_frame_);
- ASSERT_EQ(0, vp_->BrightnessDetection(video_frame_, stats));
-
- ASSERT_EQ(0, vp_->BrightnessDetection(video_frame2, stats));
- EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
-}
-
-#if defined(WEBRTC_IOS)
-TEST_F(VideoProcessingTest, DISABLED_FrameStats) {
-#else
-TEST_F(VideoProcessingTest, FrameStats) {
-#endif
- VideoProcessing::FrameStats stats;
- vp_->ClearFrameStats(&stats);
- std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
- ASSERT_EQ(frame_length_,
- fread(video_buffer.get(), 1, frame_length_, source_file_));
- EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
- 0, kVideoRotation_0, &video_frame_));
-
- EXPECT_FALSE(vp_->ValidFrameStats(stats));
- vp_->GetFrameStats(video_frame_, &stats);
- EXPECT_GT(stats.num_pixels, 0u);
- EXPECT_TRUE(vp_->ValidFrameStats(stats));
-
- printf("\nFrameStats\n");
- printf("mean: %u\nnum_pixels: %u\nsubSamplFactor: %u\nsum: %u\n\n",
- static_cast<unsigned int>(stats.mean),
- static_cast<unsigned int>(stats.num_pixels),
- static_cast<unsigned int>(stats.sub_sampling_factor),
- static_cast<unsigned int>(stats.sum));
-
- vp_->ClearFrameStats(&stats);
- EXPECT_FALSE(vp_->ValidFrameStats(stats));
-}
-
-#if defined(WEBRTC_IOS)
TEST_F(VideoProcessingTest, DISABLED_PreprocessorLogic) {
#else
TEST_F(VideoProcessingTest, PreprocessorLogic) {
@@ -230,8 +129,6 @@ TEST_F(VideoProcessingTest, Resampler) {
rewind(source_file_);
ASSERT_TRUE(source_file_ != NULL) << "Cannot read input file \n";
- // CA not needed here
- vp_->EnableContentAnalysis(false);
// no temporal decimation
vp_->EnableTemporalDecimation(false);
@@ -248,11 +145,12 @@ TEST_F(VideoProcessingTest, Resampler) {
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
// Initiate test timer.
- const TickTime time_start = TickTime::Now();
+ const int64_t time_start = rtc::TimeNanos();
// Init the sourceFrame with a timestamp.
- video_frame_.set_render_time_ms(time_start.MillisecondTimestamp());
- video_frame_.set_timestamp(time_start.MillisecondTimestamp() * 90);
+ int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
+ video_frame_.set_render_time_ms(time_start_ms);
+ video_frame_.set_timestamp(time_start_ms * 90);
// Test scaling to different sizes: source is of |width|/|height| = 352/288.
// Pure scaling:
@@ -295,7 +193,8 @@ TEST_F(VideoProcessingTest, Resampler) {
TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
// Stop timer.
- const int64_t runtime = (TickTime::Now() - time_start).Microseconds();
+ const int64_t runtime =
+ (rtc::TimeNanos() - time_start) / rtc::kNumNanosecsPerMicrosec;
if (runtime < min_runtime || run_idx == 0) {
min_runtime = runtime;
}
@@ -378,22 +277,6 @@ void TestSize(const VideoFrame& source_frame,
target_height);
}
-bool CompareFrames(const webrtc::VideoFrame& frame1,
- const webrtc::VideoFrame& frame2) {
- for (int plane = 0; plane < webrtc::kNumOfPlanes; plane++) {
- webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
- int allocated_size1 = frame1.allocated_size(plane_type);
- int allocated_size2 = frame2.allocated_size(plane_type);
- if (allocated_size1 != allocated_size2)
- return false;
- const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
- const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
- if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
- return false;
- }
- return true;
-}
-
void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
const VideoFrame& processed) {
// Skip if writing to files is not enabled.
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc
index b111a0e4123..376dec74a35 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.cc
@@ -45,14 +45,6 @@ std::unique_ptr<DenoiserFilter> DenoiserFilter::Create(
filter.reset(new DenoiserFilterNEON());
if (cpu_type != nullptr)
*cpu_type = CPU_NEON;
-#elif defined(WEBRTC_DETECT_NEON)
- if (WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) {
- filter.reset(new DenoiserFilterNEON());
- if (cpu_type != nullptr)
- *cpu_type = CPU_NEON;
- } else {
- filter.reset(new DenoiserFilterC());
- }
#else
filter.reset(new DenoiserFilterC());
#endif
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h
index f2c7570083d..1254a88d3c9 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter.h
@@ -25,12 +25,6 @@ extern const int kSumDiffThresholdHigh;
enum DenoiserDecision { COPY_BLOCK, FILTER_BLOCK };
enum CpuType { CPU_NEON, CPU_NOT_NEON };
-struct DenoiseMetrics {
- uint32_t var;
- uint32_t sad;
- uint8_t denoise;
- bool is_skin;
-};
class DenoiserFilter {
public:
@@ -43,10 +37,6 @@ class DenoiserFilter {
int src_stride,
uint8_t* dst,
int dst_stride) = 0;
- virtual void CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) = 0;
virtual uint32_t Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -59,8 +49,7 @@ class DenoiserFilter {
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) = 0;
+ int increase_denoising) = 0;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc
index 8c84f4989c2..1b3c0b70987 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.cc
@@ -25,17 +25,6 @@ void DenoiserFilterC::CopyMem16x16(const uint8_t* src,
}
}
-void DenoiserFilterC::CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) {
- for (int i = 0; i < 8; i++) {
- memcpy(dst, src, 8);
- src += src_stride;
- dst += dst_stride;
- }
-}
-
uint32_t DenoiserFilterC::Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -66,8 +55,7 @@ DenoiserDecision DenoiserFilterC::MbDenoise(uint8_t* mc_running_avg_y,
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) {
+ int increase_denoising) {
int sum_diff_thresh = 0;
int sum_diff = 0;
int adj_val[3] = {3, 4, 6};
@@ -137,60 +125,10 @@ DenoiserDecision DenoiserFilterC::MbDenoise(uint8_t* mc_running_avg_y,
sum_diff += col_sum[c];
}
- if (denoise_always)
- sum_diff_thresh = INT_MAX;
- else if (increase_denoising)
- sum_diff_thresh = kSumDiffThresholdHigh;
- else
- sum_diff_thresh = kSumDiffThreshold;
- if (abs(sum_diff) > sum_diff_thresh) {
- int delta = ((abs(sum_diff) - sum_diff_thresh) >> 8) + 1;
- // Only apply the adjustment for max delta up to 3.
- if (delta < 4) {
- sig -= sig_stride * 16;
- mc_running_avg_y -= mc_avg_y_stride * 16;
- running_avg_y -= avg_y_stride * 16;
- for (int r = 0; r < 16; ++r) {
- for (int c = 0; c < 16; ++c) {
- int diff = mc_running_avg_y[c] - sig[c];
- int adjustment = abs(diff);
- if (adjustment > delta)
- adjustment = delta;
- if (diff > 0) {
- // Bring denoised signal down.
- if (running_avg_y[c] - adjustment < 0)
- running_avg_y[c] = 0;
- else
- running_avg_y[c] = running_avg_y[c] - adjustment;
- col_sum[c] -= adjustment;
- } else if (diff < 0) {
- // Bring denoised signal up.
- if (running_avg_y[c] + adjustment > 255)
- running_avg_y[c] = 255;
- else
- running_avg_y[c] = running_avg_y[c] + adjustment;
- col_sum[c] += adjustment;
- }
- }
- sig += sig_stride;
- mc_running_avg_y += mc_avg_y_stride;
- running_avg_y += avg_y_stride;
- }
-
- sum_diff = 0;
- for (int c = 0; c < 16; ++c) {
- if (col_sum[c] >= 128) {
- col_sum[c] = 127;
- }
- sum_diff += col_sum[c];
- }
-
- if (abs(sum_diff) > sum_diff_thresh)
- return COPY_BLOCK;
- } else {
- return COPY_BLOCK;
- }
- }
+ sum_diff_thresh =
+ increase_denoising ? kSumDiffThresholdHigh : kSumDiffThreshold;
+ if (abs(sum_diff) > sum_diff_thresh)
+ return COPY_BLOCK;
return FILTER_BLOCK;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h
index 3e52c3e47c9..d8b6c5eb797 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_c.h
@@ -22,10 +22,6 @@ class DenoiserFilterC : public DenoiserFilter {
int src_stride,
uint8_t* dst,
int dst_stride) override;
- void CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) override;
uint32_t Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -38,8 +34,7 @@ class DenoiserFilterC : public DenoiserFilter {
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) override;
+ int increase_denoising) override;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc
index 2920305f71b..68c94cbdb7a 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.cc
@@ -14,6 +14,8 @@
namespace webrtc {
+const int kSumDiffThresholdHighNeon = 600;
+
static int HorizontalAddS16x8(const int16x8_t v_16x8) {
const int32x4_t a = vpaddlq_s16(v_16x8);
const int64x2_t b = vpaddlq_s32(a);
@@ -75,20 +77,6 @@ void DenoiserFilterNEON::CopyMem16x16(const uint8_t* src,
}
}
-void DenoiserFilterNEON::CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) {
- uint8x8_t vtmp;
-
- for (int r = 0; r < 8; r++) {
- vtmp = vld1_u8(src);
- vst1_u8(dst, vtmp);
- src += src_stride;
- dst += dst_stride;
- }
-}
-
uint32_t DenoiserFilterNEON::Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -106,8 +94,7 @@ DenoiserDecision DenoiserFilterNEON::MbDenoise(uint8_t* mc_running_avg_y,
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) {
+ int increase_denoising) {
// If motion_magnitude is small, making the denoiser more aggressive by
// increasing the adjustment for each level, level1 adjustment is
// increased, the deltas stay the same.
@@ -190,92 +177,13 @@ DenoiserDecision DenoiserFilterNEON::MbDenoise(uint8_t* mc_running_avg_y,
}
// Too much adjustments => copy block.
- {
- int64x1_t x = vqadd_s64(vget_high_s64(v_sum_diff_total),
- vget_low_s64(v_sum_diff_total));
- int sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0);
- if (denoise_always)
- sum_diff_thresh = INT_MAX;
- else if (increase_denoising)
- sum_diff_thresh = kSumDiffThresholdHigh;
- else
- sum_diff_thresh = kSumDiffThreshold;
- if (sum_diff > sum_diff_thresh) {
- // Before returning to copy the block (i.e., apply no denoising),
- // checK if we can still apply some (weaker) temporal filtering to
- // this block, that would otherwise not be denoised at all. Simplest
- // is to apply an additional adjustment to running_avg_y to bring it
- // closer to sig. The adjustment is capped by a maximum delta, and
- // chosen such that in most cases the resulting sum_diff will be
- // within the accceptable range given by sum_diff_thresh.
-
- // The delta is set by the excess of absolute pixel diff over the
- // threshold.
- int delta = ((sum_diff - sum_diff_thresh) >> 8) + 1;
- // Only apply the adjustment for max delta up to 3.
- if (delta < 4) {
- const uint8x16_t k_delta = vmovq_n_u8(delta);
- sig -= sig_stride * 16;
- mc_running_avg_y -= mc_running_avg_y_stride * 16;
- running_avg_y -= running_avg_y_stride * 16;
- for (int r = 0; r < 16; ++r) {
- uint8x16_t v_running_avg_y = vld1q_u8(running_avg_y);
- const uint8x16_t v_sig = vld1q_u8(sig);
- const uint8x16_t v_mc_running_avg_y = vld1q_u8(mc_running_avg_y);
-
- // Calculate absolute difference and sign masks.
- const uint8x16_t v_abs_diff = vabdq_u8(v_sig, v_mc_running_avg_y);
- const uint8x16_t v_diff_pos_mask =
- vcltq_u8(v_sig, v_mc_running_avg_y);
- const uint8x16_t v_diff_neg_mask =
- vcgtq_u8(v_sig, v_mc_running_avg_y);
- // Clamp absolute difference to delta to get the adjustment.
- const uint8x16_t v_abs_adjustment = vminq_u8(v_abs_diff, (k_delta));
-
- const uint8x16_t v_pos_adjustment =
- vandq_u8(v_diff_pos_mask, v_abs_adjustment);
- const uint8x16_t v_neg_adjustment =
- vandq_u8(v_diff_neg_mask, v_abs_adjustment);
-
- v_running_avg_y = vqsubq_u8(v_running_avg_y, v_pos_adjustment);
- v_running_avg_y = vqaddq_u8(v_running_avg_y, v_neg_adjustment);
-
- // Store results.
- vst1q_u8(running_avg_y, v_running_avg_y);
-
- {
- const int8x16_t v_sum_diff =
- vqsubq_s8(vreinterpretq_s8_u8(v_neg_adjustment),
- vreinterpretq_s8_u8(v_pos_adjustment));
-
- const int16x8_t fe_dc_ba_98_76_54_32_10 = vpaddlq_s8(v_sum_diff);
- const int32x4_t fedc_ba98_7654_3210 =
- vpaddlq_s16(fe_dc_ba_98_76_54_32_10);
- const int64x2_t fedcba98_76543210 =
- vpaddlq_s32(fedc_ba98_7654_3210);
-
- v_sum_diff_total = vqaddq_s64(v_sum_diff_total, fedcba98_76543210);
- }
- // Update pointers for next iteration.
- sig += sig_stride;
- mc_running_avg_y += mc_running_avg_y_stride;
- running_avg_y += running_avg_y_stride;
- }
- {
- // Update the sum of all pixel differences of this MB.
- x = vqadd_s64(vget_high_s64(v_sum_diff_total),
- vget_low_s64(v_sum_diff_total));
- sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0);
-
- if (sum_diff > sum_diff_thresh) {
- return COPY_BLOCK;
- }
- }
- } else {
- return COPY_BLOCK;
- }
- }
- }
+ int64x1_t x = vqadd_s64(vget_high_s64(v_sum_diff_total),
+ vget_low_s64(v_sum_diff_total));
+ int sum_diff = vget_lane_s32(vabs_s32(vreinterpret_s32_s64(x)), 0);
+ sum_diff_thresh =
+ increase_denoising ? kSumDiffThresholdHighNeon : kSumDiffThreshold;
+ if (sum_diff > sum_diff_thresh)
+ return COPY_BLOCK;
// Tell above level that block was filtered.
running_avg_y -= running_avg_y_stride * 16;
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h
index 2e3ea268290..55850bd1ea5 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_neon.h
@@ -22,10 +22,6 @@ class DenoiserFilterNEON : public DenoiserFilter {
int src_stride,
uint8_t* dst,
int dst_stride) override;
- void CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) override;
uint32_t Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -38,8 +34,7 @@ class DenoiserFilterNEON : public DenoiserFilter {
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) override;
+ int increase_denoising) override;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc
index 614b6c94859..0545a97398e 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.cc
@@ -9,7 +9,6 @@
*/
#include <emmintrin.h>
-
#include "webrtc/modules/video_processing/util/denoiser_filter_sse2.h"
namespace webrtc {
@@ -110,18 +109,6 @@ void DenoiserFilterSSE2::CopyMem16x16(const uint8_t* src,
}
}
-// TODO(jackychen): Optimize this function using SSE2.
-void DenoiserFilterSSE2::CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) {
- for (int i = 0; i < 8; i++) {
- memcpy(dst, src, 8);
- src += src_stride;
- dst += dst_stride;
- }
-}
-
uint32_t DenoiserFilterSSE2::Variance16x8(const uint8_t* src,
int src_stride,
const uint8_t* ref,
@@ -139,8 +126,8 @@ DenoiserDecision DenoiserFilterSSE2::MbDenoise(uint8_t* mc_running_avg_y,
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) {
+ int increase_denoising) {
+ DenoiserDecision decision = FILTER_BLOCK;
unsigned int sum_diff_thresh = 0;
int shift_inc =
(increase_denoising && motion_magnitude <= kMotionMagnitudeThreshold) ? 1
@@ -210,76 +197,13 @@ DenoiserDecision DenoiserFilterSSE2::MbDenoise(uint8_t* mc_running_avg_y,
running_avg_y += avg_y_stride;
}
- {
- // Compute the sum of all pixel differences of this MB.
- unsigned int abs_sum_diff = AbsSumDiff16x1(acc_diff);
- if (denoise_always)
- sum_diff_thresh = INT_MAX;
- else if (increase_denoising)
- sum_diff_thresh = kSumDiffThresholdHigh;
- else
- sum_diff_thresh = kSumDiffThreshold;
- if (abs_sum_diff > sum_diff_thresh) {
- // Before returning to copy the block (i.e., apply no denoising),
- // check if we can still apply some (weaker) temporal filtering to
- // this block, that would otherwise not be denoised at all. Simplest
- // is to apply an additional adjustment to running_avg_y to bring it
- // closer to sig. The adjustment is capped by a maximum delta, and
- // chosen such that in most cases the resulting sum_diff will be
- // within the acceptable range given by sum_diff_thresh.
-
- // The delta is set by the excess of absolute pixel diff over the
- // threshold.
- int delta = ((abs_sum_diff - sum_diff_thresh) >> 8) + 1;
- // Only apply the adjustment for max delta up to 3.
- if (delta < 4) {
- const __m128i k_delta = _mm_set1_epi8(delta);
- sig -= sig_stride * 16;
- mc_running_avg_y -= mc_avg_y_stride * 16;
- running_avg_y -= avg_y_stride * 16;
- for (int r = 0; r < 16; ++r) {
- __m128i v_running_avg_y =
- _mm_loadu_si128(reinterpret_cast<__m128i*>(&running_avg_y[0]));
- // Calculate differences.
- const __m128i v_sig =
- _mm_loadu_si128(reinterpret_cast<const __m128i*>(&sig[0]));
- const __m128i v_mc_running_avg_y =
- _mm_loadu_si128(reinterpret_cast<__m128i*>(&mc_running_avg_y[0]));
- const __m128i pdiff = _mm_subs_epu8(v_mc_running_avg_y, v_sig);
- const __m128i ndiff = _mm_subs_epu8(v_sig, v_mc_running_avg_y);
- // Obtain the sign. FF if diff is negative.
- const __m128i diff_sign = _mm_cmpeq_epi8(pdiff, k_0);
- // Clamp absolute difference to delta to get the adjustment.
- const __m128i adj = _mm_min_epu8(_mm_or_si128(pdiff, ndiff), k_delta);
- // Restore the sign and get positive and negative adjustments.
- __m128i padj, nadj;
- padj = _mm_andnot_si128(diff_sign, adj);
- nadj = _mm_and_si128(diff_sign, adj);
- // Calculate filtered value.
- v_running_avg_y = _mm_subs_epu8(v_running_avg_y, padj);
- v_running_avg_y = _mm_adds_epu8(v_running_avg_y, nadj);
- _mm_storeu_si128(reinterpret_cast<__m128i*>(running_avg_y),
- v_running_avg_y);
-
- // Accumulate the adjustments.
- acc_diff = _mm_subs_epi8(acc_diff, padj);
- acc_diff = _mm_adds_epi8(acc_diff, nadj);
-
- // Update pointers for next iteration.
- sig += sig_stride;
- mc_running_avg_y += mc_avg_y_stride;
- running_avg_y += avg_y_stride;
- }
- abs_sum_diff = AbsSumDiff16x1(acc_diff);
- if (abs_sum_diff > sum_diff_thresh) {
- return COPY_BLOCK;
- }
- } else {
- return COPY_BLOCK;
- }
- }
- }
- return FILTER_BLOCK;
+ // Compute the sum of all pixel differences of this MB.
+ unsigned int abs_sum_diff = AbsSumDiff16x1(acc_diff);
+ sum_diff_thresh =
+ increase_denoising ? kSumDiffThresholdHigh : kSumDiffThreshold;
+ if (abs_sum_diff > sum_diff_thresh)
+ decision = COPY_BLOCK;
+ return decision;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h
index 395fa10eca0..731344c809c 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/denoiser_filter_sse2.h
@@ -22,10 +22,6 @@ class DenoiserFilterSSE2 : public DenoiserFilter {
int src_stride,
uint8_t* dst,
int dst_stride) override;
- void CopyMem8x8(const uint8_t* src,
- int src_stride,
- uint8_t* dst,
- int dst_stride) override;
uint32_t Variance16x8(const uint8_t* a,
int a_stride,
const uint8_t* b,
@@ -38,8 +34,7 @@ class DenoiserFilterSSE2 : public DenoiserFilter {
const uint8_t* sig,
int sig_stride,
uint8_t motion_magnitude,
- int increase_denoising,
- bool denoise_always) override;
+ int increase_denoising) override;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc b/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc
index 87beac38ae5..3b0d59ef717 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.cc
@@ -9,6 +9,9 @@
*/
#include "webrtc/modules/video_processing/util/noise_estimation.h"
+#if DISPLAYNEON
+#include <android/log.h>
+#endif
namespace webrtc {
@@ -27,10 +30,10 @@ void NoiseEstimation::GetNoise(int mb_index, uint32_t var, uint32_t luma) {
consec_low_var_[mb_index]++;
num_static_block_++;
if (consec_low_var_[mb_index] >= kConsecLowVarFrame &&
- (luma >> 8) < kAverageLumaMax && (luma >> 8) > kAverageLumaMin) {
+ (luma >> 6) < kAverageLumaMax && (luma >> 6) > kAverageLumaMin) {
// Normalized var by the average luma value, this gives more weight to
// darker blocks.
- int nor_var = var / (luma >> 12);
+ int nor_var = var / (luma >> 10);
noise_var_ +=
nor_var > kBlockSelectionVarMax ? kBlockSelectionVarMax : nor_var;
num_noisy_block_++;
@@ -46,25 +49,36 @@ void NoiseEstimation::UpdateNoiseLevel() {
// condition more reasonable.
// No enough samples implies the motion of the camera or too many moving
// objects in the frame.
- if (num_static_block_ < (0.65 * mb_cols_ * mb_rows_) || !num_noisy_block_) {
+ if (num_static_block_ <
+ (0.65 * mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL) ||
+ !num_noisy_block_) {
+#if DISPLAY
+ printf("Not enough samples. %d \n", num_static_block_);
+#elif DISPLAYNEON
+ __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY",
+ "Not enough samples. %d \n", num_static_block_);
+#endif
noise_var_ = 0;
noise_var_accum_ = 0;
- num_static_block_ = 0;
num_noisy_block_ = 0;
-#if DISPLAY
- printf("Not enough samples.\n");
-#endif
+ num_static_block_ = 0;
return;
} else {
- // Normalized by the number of noisy blocks.
- noise_var_ /= num_noisy_block_;
- // Get the percentage of static blocks.
- percent_static_block_ =
- static_cast<double>(num_static_block_) / (mb_cols_ * mb_rows_);
#if DISPLAY
- printf("%d %d fraction = %.3f\n", num_static_block_, mb_cols_ * mb_rows_,
+ printf("%d %d fraction = %.3f\n", num_static_block_,
+ mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL,
percent_static_block_);
+#elif DISPLAYNEON
+ __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY", "%d %d fraction = %.3f\n",
+ num_static_block_,
+ mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL,
+ percent_static_block_);
#endif
+ // Normalized by the number of noisy blocks.
+ noise_var_ /= num_noisy_block_;
+ // Get the percentage of static blocks.
+ percent_static_block_ = static_cast<double>(num_static_block_) /
+ (mb_cols_ * mb_rows_ / NOISE_SUBSAMPLE_INTERVAL);
num_noisy_block_ = 0;
num_static_block_ = 0;
}
@@ -75,12 +89,16 @@ void NoiseEstimation::UpdateNoiseLevel() {
} else {
noise_var_accum_ = (noise_var_accum_ * 15 + noise_var_) / 16;
}
- // Reset noise_var_ for the next frame.
- noise_var_ = 0;
#if DISPLAY
printf("noise_var_accum_ = %.1f, noise_var_ = %d.\n", noise_var_accum_,
noise_var_);
+#elif DISPLAYNEON
+ __android_log_print(ANDROID_LOG_DEBUG, "DISPLAY",
+ "noise_var_accum_ = %.1f, noise_var_ = %d.\n",
+ noise_var_accum_, noise_var_);
#endif
+ // Reset noise_var_ for the next frame.
+ noise_var_ = 0;
}
uint8_t NoiseEstimation::GetNoiseLevel() {
diff --git a/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h b/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h
index ca5cc2324fb..294bfb3a731 100644
--- a/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h
+++ b/chromium/third_party/webrtc/modules/video_processing/util/noise_estimation.h
@@ -11,28 +11,36 @@
#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_NOISE_ESTIMATION_H_
#define WEBRTC_MODULES_VIDEO_PROCESSING_UTIL_NOISE_ESTIMATION_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_processing/include/video_processing_defines.h"
#include "webrtc/modules/video_processing/util/denoiser_filter.h"
namespace webrtc {
-#define EXPERIMENTAL 0
-#define DISPLAY 0
+#define DISPLAY 0 // Rectangle diagnostics
+#define DISPLAYNEON 0 // Rectangle diagnostics on NEON
-const int kNoiseThreshold = 200;
+const int kNoiseThreshold = 150;
const int kNoiseThresholdNeon = 70;
const int kConsecLowVarFrame = 6;
const int kAverageLumaMin = 20;
const int kAverageLumaMax = 220;
const int kBlockSelectionVarMax = kNoiseThreshold << 1;
+// TODO(jackychen): To test different sampling strategy.
+// Collect noise data every NOISE_SUBSAMPLE_INTERVAL blocks.
+#define NOISE_SUBSAMPLE_INTERVAL 41
+
class NoiseEstimation {
public:
void Init(int width, int height, CpuType cpu_type);
+ // Collect noise data from one qualified block.
void GetNoise(int mb_index, uint32_t var, uint32_t luma);
+ // Reset the counter for consecutive low-var blocks.
void ResetConsecLowVar(int mb_index);
+ // Update noise level for current frame.
void UpdateNoiseLevel();
// 0: low noise, 1: high noise
uint8_t GetNoiseLevel();
@@ -42,13 +50,13 @@ class NoiseEstimation {
int height_;
int mb_rows_;
int mb_cols_;
+ int num_noisy_block_;
+ int num_static_block_;
CpuType cpu_type_;
uint32_t noise_var_;
double noise_var_accum_;
- int num_noisy_block_;
- int num_static_block_;
double percent_static_block_;
- rtc::scoped_ptr<uint32_t[]> consec_low_var_;
+ std::unique_ptr<uint32_t[]> consec_low_var_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_decimator.cc b/chromium/third_party/webrtc/modules/video_processing/video_decimator.cc
index 63e347b026e..c6623fa836c 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_decimator.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/video_decimator.cc
@@ -9,9 +9,9 @@
*/
#include "webrtc/base/checks.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_processing/video_decimator.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#define VD_MIN(a, b) ((a) < (b)) ? (a) : (b)
@@ -95,7 +95,7 @@ bool VPMVideoDecimator::DropFrame() {
}
uint32_t VPMVideoDecimator::GetDecimatedFrameRate() {
- ProcessIncomingframe_rate(TickTime::MillisecondTimestamp());
+ ProcessIncomingframe_rate(rtc::TimeMillis());
if (!enable_temporal_decimation_) {
return static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
}
@@ -104,12 +104,12 @@ uint32_t VPMVideoDecimator::GetDecimatedFrameRate() {
}
uint32_t VPMVideoDecimator::Inputframe_rate() {
- ProcessIncomingframe_rate(TickTime::MillisecondTimestamp());
+ ProcessIncomingframe_rate(rtc::TimeMillis());
return static_cast<uint32_t>(incoming_frame_rate_ + 0.5f);
}
void VPMVideoDecimator::UpdateIncomingframe_rate() {
- int64_t now = TickTime::MillisecondTimestamp();
+ int64_t now = rtc::TimeMillis();
if (incoming_frame_times_[0] == 0) {
// First no shift.
} else {
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc b/chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc
index b00da5c90a1..f116f882cd6 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/video_denoiser.cc
@@ -7,305 +7,347 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+
#include "webrtc/common_video/libyuv/include/scaler.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/video_denoiser.h"
namespace webrtc {
+#if DISPLAY || DISPLAYNEON
+static void CopyMem8x8(const uint8_t* src,
+ int src_stride,
+ uint8_t* dst,
+ int dst_stride) {
+ for (int i = 0; i < 8; i++) {
+ memcpy(dst, src, 8);
+ src += src_stride;
+ dst += dst_stride;
+ }
+}
+
+static void ShowRect(const std::unique_ptr<DenoiserFilter>& filter,
+ const std::unique_ptr<uint8_t[]>& d_status,
+ const std::unique_ptr<uint8_t[]>& moving_edge_red,
+ const std::unique_ptr<uint8_t[]>& x_density,
+ const std::unique_ptr<uint8_t[]>& y_density,
+ const uint8_t* u_src,
+ const uint8_t* v_src,
+ uint8_t* u_dst,
+ uint8_t* v_dst,
+ int mb_rows_,
+ int mb_cols_,
+ int stride_u_,
+ int stride_v_) {
+ for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) {
+ for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) {
+ int mb_index = mb_row * mb_cols_ + mb_col;
+ const uint8_t* mb_src_u =
+ u_src + (mb_row << 3) * stride_u_ + (mb_col << 3);
+ const uint8_t* mb_src_v =
+ v_src + (mb_row << 3) * stride_v_ + (mb_col << 3);
+ uint8_t* mb_dst_u = u_dst + (mb_row << 3) * stride_u_ + (mb_col << 3);
+ uint8_t* mb_dst_v = v_dst + (mb_row << 3) * stride_v_ + (mb_col << 3);
+ uint8_t uv_tmp[8 * 8];
+ memset(uv_tmp, 200, 8 * 8);
+ if (d_status[mb_index] == 1) {
+ // Paint to red.
+ CopyMem8x8(mb_src_u, stride_u_, mb_dst_u, stride_u_);
+ CopyMem8x8(uv_tmp, 8, mb_dst_v, stride_v_);
+ } else if (moving_edge_red[mb_row * mb_cols_ + mb_col] &&
+ x_density[mb_col] * y_density[mb_row]) {
+ // Paint to blue.
+ CopyMem8x8(uv_tmp, 8, mb_dst_u, stride_u_);
+ CopyMem8x8(mb_src_v, stride_v_, mb_dst_v, stride_v_);
+ } else {
+ CopyMem8x8(mb_src_u, stride_u_, mb_dst_u, stride_u_);
+ CopyMem8x8(mb_src_v, stride_v_, mb_dst_v, stride_v_);
+ }
+ }
+ }
+}
+#endif
+
VideoDenoiser::VideoDenoiser(bool runtime_cpu_detection)
: width_(0),
height_(0),
filter_(DenoiserFilter::Create(runtime_cpu_detection, &cpu_type_)),
ne_(new NoiseEstimation()) {}
-#if EXPERIMENTAL
-// Check the mb position(1: close to the center, 3: close to the border).
-static int PositionCheck(int mb_row, int mb_col, int mb_rows, int mb_cols) {
- if ((mb_row >= (mb_rows >> 3)) && (mb_row <= (7 * mb_rows >> 3)) &&
- (mb_col >= (mb_cols >> 3)) && (mb_col <= (7 * mb_cols >> 3)))
+void VideoDenoiser::DenoiserReset(const VideoFrame& frame,
+ VideoFrame* denoised_frame,
+ VideoFrame* denoised_frame_prev) {
+ width_ = frame.width();
+ height_ = frame.height();
+ mb_cols_ = width_ >> 4;
+ mb_rows_ = height_ >> 4;
+ stride_y_ = frame.video_frame_buffer()->StrideY();
+ stride_u_ = frame.video_frame_buffer()->StrideU();
+ stride_v_ = frame.video_frame_buffer()->StrideV();
+
+ // Allocate an empty buffer for denoised_frame_prev.
+ denoised_frame_prev->CreateEmptyFrame(width_, height_, stride_y_, stride_u_,
+ stride_v_);
+ // Allocate and initialize denoised_frame with key frame.
+ denoised_frame->CreateFrame(
+ frame.video_frame_buffer()->DataY(),
+ frame.video_frame_buffer()->DataU(),
+ frame.video_frame_buffer()->DataV(),
+ width_, height_, stride_y_, stride_u_, stride_v_, kVideoRotation_0);
+ // Set time parameters to the output frame.
+ denoised_frame->set_timestamp(frame.timestamp());
+ denoised_frame->set_render_time_ms(frame.render_time_ms());
+
+ // Init noise estimator and allocate buffers.
+ ne_->Init(width_, height_, cpu_type_);
+ moving_edge_.reset(new uint8_t[mb_cols_ * mb_rows_]);
+ mb_filter_decision_.reset(new DenoiserDecision[mb_cols_ * mb_rows_]);
+ x_density_.reset(new uint8_t[mb_cols_]);
+ y_density_.reset(new uint8_t[mb_rows_]);
+ moving_object_.reset(new uint8_t[mb_cols_ * mb_rows_]);
+}
+
+int VideoDenoiser::PositionCheck(int mb_row, int mb_col, int noise_level) {
+ if (noise_level == 0)
return 1;
- else if ((mb_row >= (mb_rows >> 4)) && (mb_row <= (15 * mb_rows >> 4)) &&
- (mb_col >= (mb_cols >> 4)) && (mb_col <= (15 * mb_cols >> 4)))
+ if ((mb_row <= (mb_rows_ >> 4)) || (mb_col <= (mb_cols_ >> 4)) ||
+ (mb_col >= (15 * mb_cols_ >> 4)))
+ return 3;
+ else if ((mb_row <= (mb_rows_ >> 3)) || (mb_col <= (mb_cols_ >> 3)) ||
+ (mb_col >= (7 * mb_cols_ >> 3)))
return 2;
else
- return 3;
+ return 1;
}
-static void ReduceFalseDetection(const std::unique_ptr<uint8_t[]>& d_status,
- std::unique_ptr<uint8_t[]>* d_status_tmp1,
- std::unique_ptr<uint8_t[]>* d_status_tmp2,
- int noise_level,
- int mb_rows,
- int mb_cols) {
- // Draft. This can be optimized. This code block is to reduce false detection
- // in moving object detection.
- int mb_row_min = noise_level ? mb_rows >> 3 : 1;
- int mb_col_min = noise_level ? mb_cols >> 3 : 1;
- int mb_row_max = noise_level ? (7 * mb_rows >> 3) : mb_rows - 2;
- int mb_col_max = noise_level ? (7 * mb_cols >> 3) : mb_cols - 2;
- memcpy((*d_status_tmp1).get(), d_status.get(), mb_rows * mb_cols);
- // Up left.
- for (int mb_row = mb_row_min; mb_row <= mb_row_max; ++mb_row) {
- for (int mb_col = mb_col_min; mb_col <= mb_col_max; ++mb_col) {
- (*d_status_tmp1)[mb_row * mb_cols + mb_col] |=
- ((*d_status_tmp1)[(mb_row - 1) * mb_cols + mb_col] |
- (*d_status_tmp1)[mb_row * mb_cols + mb_col - 1]);
+void VideoDenoiser::ReduceFalseDetection(
+ const std::unique_ptr<uint8_t[]>& d_status,
+ std::unique_ptr<uint8_t[]>* moving_edge_red,
+ int noise_level) {
+ // From up left corner.
+ int mb_col_stop = mb_cols_ - 1;
+ for (int mb_row = 0; mb_row <= mb_rows_ - 1; ++mb_row) {
+ for (int mb_col = 0; mb_col <= mb_col_stop; ++mb_col) {
+ if (d_status[mb_row * mb_cols_ + mb_col]) {
+ mb_col_stop = mb_col - 1;
+ break;
+ }
+ (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0;
}
}
- memcpy((*d_status_tmp2).get(), (*d_status_tmp1).get(), mb_rows * mb_cols);
- memcpy((*d_status_tmp1).get(), d_status.get(), mb_rows * mb_cols);
- // Bottom left.
- for (int mb_row = mb_row_max; mb_row >= mb_row_min; --mb_row) {
- for (int mb_col = mb_col_min; mb_col <= mb_col_max; ++mb_col) {
- (*d_status_tmp1)[mb_row * mb_cols + mb_col] |=
- ((*d_status_tmp1)[(mb_row + 1) * mb_cols + mb_col] |
- (*d_status_tmp1)[mb_row * mb_cols + mb_col - 1]);
- (*d_status_tmp2)[mb_row * mb_cols + mb_col] &=
- (*d_status_tmp1)[mb_row * mb_cols + mb_col];
+ // From bottom left corner.
+ mb_col_stop = mb_cols_ - 1;
+ for (int mb_row = mb_rows_ - 1; mb_row >= 0; --mb_row) {
+ for (int mb_col = 0; mb_col <= mb_col_stop; ++mb_col) {
+ if (d_status[mb_row * mb_cols_ + mb_col]) {
+ mb_col_stop = mb_col - 1;
+ break;
+ }
+ (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0;
}
}
- memcpy((*d_status_tmp1).get(), d_status.get(), mb_rows * mb_cols);
- // Up right.
- for (int mb_row = mb_row_min; mb_row <= mb_row_max; ++mb_row) {
- for (int mb_col = mb_col_max; mb_col >= mb_col_min; --mb_col) {
- (*d_status_tmp1)[mb_row * mb_cols + mb_col] |=
- ((*d_status_tmp1)[(mb_row - 1) * mb_cols + mb_col] |
- (*d_status_tmp1)[mb_row * mb_cols + mb_col + 1]);
- (*d_status_tmp2)[mb_row * mb_cols + mb_col] &=
- (*d_status_tmp1)[mb_row * mb_cols + mb_col];
+ // From up right corner.
+ mb_col_stop = 0;
+ for (int mb_row = 0; mb_row <= mb_rows_ - 1; ++mb_row) {
+ for (int mb_col = mb_cols_ - 1; mb_col >= mb_col_stop; --mb_col) {
+ if (d_status[mb_row * mb_cols_ + mb_col]) {
+ mb_col_stop = mb_col + 1;
+ break;
+ }
+ (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0;
}
}
- memcpy((*d_status_tmp1).get(), d_status.get(), mb_rows * mb_cols);
- // Bottom right.
- for (int mb_row = mb_row_max; mb_row >= mb_row_min; --mb_row) {
- for (int mb_col = mb_col_max; mb_col >= mb_col_min; --mb_col) {
- (*d_status_tmp1)[mb_row * mb_cols + mb_col] |=
- ((*d_status_tmp1)[(mb_row + 1) * mb_cols + mb_col] |
- (*d_status_tmp1)[mb_row * mb_cols + mb_col + 1]);
- (*d_status_tmp2)[mb_row * mb_cols + mb_col] &=
- (*d_status_tmp1)[mb_row * mb_cols + mb_col];
+ // From bottom right corner.
+ mb_col_stop = 0;
+ for (int mb_row = mb_rows_ - 1; mb_row >= 0; --mb_row) {
+ for (int mb_col = mb_cols_ - 1; mb_col >= mb_col_stop; --mb_col) {
+ if (d_status[mb_row * mb_cols_ + mb_col]) {
+ mb_col_stop = mb_col + 1;
+ break;
+ }
+ (*moving_edge_red)[mb_row * mb_cols_ + mb_col] = 0;
}
}
}
-static bool TrailingBlock(const std::unique_ptr<uint8_t[]>& d_status,
- int mb_row,
- int mb_col,
- int mb_rows,
- int mb_cols) {
- int mb_index = mb_row * mb_cols + mb_col;
- if (!mb_row || !mb_col || mb_row == mb_rows - 1 || mb_col == mb_cols - 1)
- return false;
- return d_status[mb_index + 1] || d_status[mb_index - 1] ||
- d_status[mb_index + mb_cols] || d_status[mb_index - mb_cols];
+bool VideoDenoiser::IsTrailingBlock(const std::unique_ptr<uint8_t[]>& d_status,
+ int mb_row,
+ int mb_col) {
+ bool ret = false;
+ int mb_index = mb_row * mb_cols_ + mb_col;
+ if (!mb_row || !mb_col || mb_row == mb_rows_ - 1 || mb_col == mb_cols_ - 1)
+ ret = false;
+ else
+ ret = d_status[mb_index + 1] || d_status[mb_index - 1] ||
+ d_status[mb_index + mb_cols_] || d_status[mb_index - mb_cols_];
+ return ret;
}
-#endif
-#if DISPLAY
-void ShowRect(const std::unique_ptr<DenoiserFilter>& filter,
- const std::unique_ptr<uint8_t[]>& d_status,
- const std::unique_ptr<uint8_t[]>& d_status_tmp2,
- const std::unique_ptr<uint8_t[]>& x_density,
- const std::unique_ptr<uint8_t[]>& y_density,
- const uint8_t* u_src,
- const uint8_t* v_src,
- uint8_t* u_dst,
- uint8_t* v_dst,
- int mb_rows,
- int mb_cols,
- int stride_u,
- int stride_v) {
- for (int mb_row = 0; mb_row < mb_rows; ++mb_row) {
- for (int mb_col = 0; mb_col < mb_cols; ++mb_col) {
- int mb_index = mb_row * mb_cols + mb_col;
- const uint8_t* mb_src_u =
- u_src + (mb_row << 3) * stride_u + (mb_col << 3);
- const uint8_t* mb_src_v =
- v_src + (mb_row << 3) * stride_v + (mb_col << 3);
- uint8_t* mb_dst_u = u_dst + (mb_row << 3) * stride_u + (mb_col << 3);
- uint8_t* mb_dst_v = v_dst + (mb_row << 3) * stride_v + (mb_col << 3);
- uint8_t y_tmp_255[8 * 8];
- memset(y_tmp_255, 200, 8 * 8);
- // x_density_[mb_col] * y_density_[mb_row]
- if (d_status[mb_index] == 1) {
- // Paint to red.
- filter->CopyMem8x8(mb_src_u, stride_u, mb_dst_u, stride_u);
- filter->CopyMem8x8(y_tmp_255, 8, mb_dst_v, stride_v);
-#if EXPERIMENTAL
- } else if (d_status_tmp2[mb_row * mb_cols + mb_col] &&
- x_density[mb_col] * y_density[mb_row]) {
-#else
- } else if (x_density[mb_col] * y_density[mb_row]) {
-#endif
- // Paint to blue.
- filter->CopyMem8x8(y_tmp_255, 8, mb_dst_u, stride_u);
- filter->CopyMem8x8(mb_src_v, stride_v, mb_dst_v, stride_v);
- } else {
- filter->CopyMem8x8(mb_src_u, stride_u, mb_dst_u, stride_u);
- filter->CopyMem8x8(mb_src_v, stride_v, mb_dst_v, stride_v);
+void VideoDenoiser::CopySrcOnMOB(const uint8_t* y_src, uint8_t* y_dst) {
+ // Loop over to copy src block if the block is marked as moving object block
+ // or if the block may cause trailing artifacts.
+ for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) {
+ const int mb_index_base = mb_row * mb_cols_;
+ const int offset_base = (mb_row << 4) * stride_y_;
+ const uint8_t* mb_src_base = y_src + offset_base;
+ uint8_t* mb_dst_base = y_dst + offset_base;
+ for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) {
+ const int mb_index = mb_index_base + mb_col;
+ const uint32_t offset_col = mb_col << 4;
+ const uint8_t* mb_src = mb_src_base + offset_col;
+ uint8_t* mb_dst = mb_dst_base + offset_col;
+ // Check if the block is a moving object block or may cause a trailing
+ // artifacts.
+ if (mb_filter_decision_[mb_index] != FILTER_BLOCK ||
+ IsTrailingBlock(moving_edge_, mb_row, mb_col) ||
+ (x_density_[mb_col] * y_density_[mb_row] &&
+ moving_object_[mb_row * mb_cols_ + mb_col])) {
+ // Copy y source.
+ filter_->CopyMem16x16(mb_src, stride_y_, mb_dst, stride_y_);
+ }
+ }
+ }
+}
+
+void VideoDenoiser::CopyLumaOnMargin(const uint8_t* y_src, uint8_t* y_dst) {
+ if ((mb_rows_ << 4) != height_) {
+ const uint8_t* margin_y_src = y_src + (mb_rows_ << 4) * stride_y_;
+ uint8_t* margin_y_dst = y_dst + (mb_rows_ << 4) * stride_y_;
+ memcpy(margin_y_dst, margin_y_src, (height_ - (mb_rows_ << 4)) * stride_y_);
+ }
+ if ((mb_cols_ << 4) != width_) {
+ const uint8_t* margin_y_src = y_src + (mb_cols_ << 4);
+ uint8_t* margin_y_dst = y_dst + (mb_cols_ << 4);
+ for (int i = 0; i < height_; ++i) {
+ for (int j = mb_cols_ << 4; j < width_; ++j) {
+ margin_y_dst[i * stride_y_ + j] = margin_y_src[i * stride_y_ + j];
}
}
}
}
-#endif
void VideoDenoiser::DenoiseFrame(const VideoFrame& frame,
VideoFrame* denoised_frame,
VideoFrame* denoised_frame_prev,
- int noise_level_prev) {
- int stride_y = frame.stride(kYPlane);
- int stride_u = frame.stride(kUPlane);
- int stride_v = frame.stride(kVPlane);
- // If previous width and height are different from current frame's, then no
- // denoising for the current frame.
+ bool noise_estimation_enabled) {
+ // If previous width and height are different from current frame's, need to
+ // reallocate the buffers and no denoising for the current frame.
if (width_ != frame.width() || height_ != frame.height()) {
- width_ = frame.width();
- height_ = frame.height();
- denoised_frame->CreateFrame(frame.buffer(kYPlane), frame.buffer(kUPlane),
- frame.buffer(kVPlane), width_, height_,
- stride_y, stride_u, stride_v, kVideoRotation_0);
- denoised_frame_prev->CreateFrame(
- frame.buffer(kYPlane), frame.buffer(kUPlane), frame.buffer(kVPlane),
- width_, height_, stride_y, stride_u, stride_v, kVideoRotation_0);
- // Setting time parameters to the output frame.
- denoised_frame->set_timestamp(frame.timestamp());
- denoised_frame->set_render_time_ms(frame.render_time_ms());
- ne_->Init(width_, height_, cpu_type_);
+ DenoiserReset(frame, denoised_frame, denoised_frame_prev);
return;
}
- // For 16x16 block.
- int mb_cols = width_ >> 4;
- int mb_rows = height_ >> 4;
- if (metrics_.get() == nullptr)
- metrics_.reset(new DenoiseMetrics[mb_cols * mb_rows]());
- if (d_status_.get() == nullptr) {
- d_status_.reset(new uint8_t[mb_cols * mb_rows]());
-#if EXPERIMENTAL
- d_status_tmp1_.reset(new uint8_t[mb_cols * mb_rows]());
- d_status_tmp2_.reset(new uint8_t[mb_cols * mb_rows]());
-#endif
- x_density_.reset(new uint8_t[mb_cols]());
- y_density_.reset(new uint8_t[mb_rows]());
- }
- // Denoise on Y plane.
- uint8_t* y_dst = denoised_frame->buffer(kYPlane);
- uint8_t* u_dst = denoised_frame->buffer(kUPlane);
- uint8_t* v_dst = denoised_frame->buffer(kVPlane);
- uint8_t* y_dst_prev = denoised_frame_prev->buffer(kYPlane);
- const uint8_t* y_src = frame.buffer(kYPlane);
- const uint8_t* u_src = frame.buffer(kUPlane);
- const uint8_t* v_src = frame.buffer(kVPlane);
- uint8_t noise_level = noise_level_prev == -1 ? 0 : ne_->GetNoiseLevel();
- // Temporary buffer to store denoising result.
- uint8_t y_tmp[16 * 16] = {0};
- memset(x_density_.get(), 0, mb_cols);
- memset(y_density_.get(), 0, mb_rows);
+ // Set buffer pointers.
+ const uint8_t* y_src = frame.video_frame_buffer()->DataY();
+ const uint8_t* u_src = frame.video_frame_buffer()->DataU();
+ const uint8_t* v_src = frame.video_frame_buffer()->DataV();
+ uint8_t* y_dst = denoised_frame->video_frame_buffer()->MutableDataY();
+ uint8_t* u_dst = denoised_frame->video_frame_buffer()->MutableDataU();
+ uint8_t* v_dst = denoised_frame->video_frame_buffer()->MutableDataV();
+ uint8_t* y_dst_prev =
+ denoised_frame_prev->video_frame_buffer()->MutableDataY();
+ memset(x_density_.get(), 0, mb_cols_);
+ memset(y_density_.get(), 0, mb_rows_);
+ memset(moving_object_.get(), 1, mb_cols_ * mb_rows_);
+ uint8_t noise_level = noise_estimation_enabled ? ne_->GetNoiseLevel() : 0;
+ int thr_var_base = 16 * 16 * 2;
// Loop over blocks to accumulate/extract noise level and update x/y_density
// factors for moving object detection.
- for (int mb_row = 0; mb_row < mb_rows; ++mb_row) {
- for (int mb_col = 0; mb_col < mb_cols; ++mb_col) {
- const uint8_t* mb_src = y_src + (mb_row << 4) * stride_y + (mb_col << 4);
- uint8_t* mb_dst_prev =
- y_dst_prev + (mb_row << 4) * stride_y + (mb_col << 4);
- int mb_index = mb_row * mb_cols + mb_col;
-#if EXPERIMENTAL
- int pos_factor = PositionCheck(mb_row, mb_col, mb_rows, mb_cols);
- uint32_t thr_var_adp = 16 * 16 * 5 * (noise_level ? pos_factor : 1);
-#else
- uint32_t thr_var_adp = 16 * 16 * 5;
-#endif
- int brightness = 0;
- for (int i = 0; i < 16; ++i) {
- for (int j = 0; j < 16; ++j) {
- brightness += mb_src[i * stride_y + j];
+ for (int mb_row = 0; mb_row < mb_rows_; ++mb_row) {
+ const int mb_index_base = mb_row * mb_cols_;
+ const int offset_base = (mb_row << 4) * stride_y_;
+ const uint8_t* mb_src_base = y_src + offset_base;
+ uint8_t* mb_dst_base = y_dst + offset_base;
+ uint8_t* mb_dst_prev_base = y_dst_prev + offset_base;
+ for (int mb_col = 0; mb_col < mb_cols_; ++mb_col) {
+ const int mb_index = mb_index_base + mb_col;
+ const bool ne_enable = (mb_index % NOISE_SUBSAMPLE_INTERVAL == 0);
+ const int pos_factor = PositionCheck(mb_row, mb_col, noise_level);
+ const uint32_t thr_var_adp = thr_var_base * pos_factor;
+ const uint32_t offset_col = mb_col << 4;
+ const uint8_t* mb_src = mb_src_base + offset_col;
+ uint8_t* mb_dst = mb_dst_base + offset_col;
+ uint8_t* mb_dst_prev = mb_dst_prev_base + offset_col;
+
+ // TODO(jackychen): Need SSE2/NEON opt.
+ int luma = 0;
+ if (ne_enable) {
+ for (int i = 4; i < 12; ++i) {
+ for (int j = 4; j < 12; ++j) {
+ luma += mb_src[i * stride_y_ + j];
+ }
}
}
- // Get the denoised block.
- filter_->MbDenoise(mb_dst_prev, stride_y, y_tmp, 16, mb_src, stride_y, 0,
- 1, true);
- // The variance is based on the denoised blocks in time T and T-1.
- metrics_[mb_index].var = filter_->Variance16x8(
- mb_dst_prev, stride_y, y_tmp, 16, &metrics_[mb_index].sad);
+ // Get the filtered block and filter_decision.
+ mb_filter_decision_[mb_index] =
+ filter_->MbDenoise(mb_dst_prev, stride_y_, mb_dst, stride_y_, mb_src,
+ stride_y_, 0, noise_level);
- if (metrics_[mb_index].var > thr_var_adp) {
- ne_->ResetConsecLowVar(mb_index);
- d_status_[mb_index] = 1;
-#if EXPERIMENTAL
- if (noise_level == 0 || pos_factor < 3) {
- x_density_[mb_col] += 1;
- y_density_[mb_row] += 1;
+ // If filter decision is FILTER_BLOCK, no need to check moving edge.
+ // It is unlikely for a moving edge block to be filtered in current
+ // setting.
+ if (mb_filter_decision_[mb_index] == FILTER_BLOCK) {
+ uint32_t sse_t = 0;
+ if (ne_enable) {
+ // The variance used in noise estimation is based on the src block in
+ // time t (mb_src) and filtered block in time t-1 (mb_dist_prev).
+ uint32_t noise_var = filter_->Variance16x8(mb_dst_prev, stride_y_,
+ mb_src, stride_y_, &sse_t);
+ ne_->GetNoise(mb_index, noise_var, luma);
}
-#else
- x_density_[mb_col] += 1;
- y_density_[mb_row] += 1;
-#endif
+ moving_edge_[mb_index] = 0; // Not a moving edge block.
} else {
uint32_t sse_t = 0;
- // The variance is based on the src blocks in time T and denoised block
- // in time T-1.
- uint32_t noise_var = filter_->Variance16x8(mb_dst_prev, stride_y,
- mb_src, stride_y, &sse_t);
- ne_->GetNoise(mb_index, noise_var, brightness);
- d_status_[mb_index] = 0;
- }
- // Track denoised frame.
- filter_->CopyMem16x16(y_tmp, 16, mb_dst_prev, stride_y);
- }
- }
-
-#if EXPERIMENTAL
- ReduceFalseDetection(d_status_, &d_status_tmp1_, &d_status_tmp2_, noise_level,
- mb_rows, mb_cols);
-#endif
-
- // Denoise each MB based on the results of moving objects detection.
- for (int mb_row = 0; mb_row < mb_rows; ++mb_row) {
- for (int mb_col = 0; mb_col < mb_cols; ++mb_col) {
- const uint8_t* mb_src = y_src + (mb_row << 4) * stride_y + (mb_col << 4);
- uint8_t* mb_dst = y_dst + (mb_row << 4) * stride_y + (mb_col << 4);
- const uint8_t* mb_src_u =
- u_src + (mb_row << 3) * stride_u + (mb_col << 3);
- const uint8_t* mb_src_v =
- v_src + (mb_row << 3) * stride_v + (mb_col << 3);
- uint8_t* mb_dst_u = u_dst + (mb_row << 3) * stride_u + (mb_col << 3);
- uint8_t* mb_dst_v = v_dst + (mb_row << 3) * stride_v + (mb_col << 3);
-#if EXPERIMENTAL
- if ((!d_status_tmp2_[mb_row * mb_cols + mb_col] ||
- x_density_[mb_col] * y_density_[mb_row] == 0) &&
- !TrailingBlock(d_status_, mb_row, mb_col, mb_rows, mb_cols)) {
-#else
- if (x_density_[mb_col] * y_density_[mb_row] == 0) {
-#endif
- if (filter_->MbDenoise(mb_dst, stride_y, y_tmp, 16, mb_src, stride_y, 0,
- noise_level, false) == FILTER_BLOCK) {
- filter_->CopyMem16x16(y_tmp, 16, mb_dst, stride_y);
+ // The variance used in MOD is based on the filtered blocks in time
+ // T (mb_dst) and T-1 (mb_dst_prev).
+ uint32_t noise_var = filter_->Variance16x8(mb_dst_prev, stride_y_,
+ mb_dst, stride_y_, &sse_t);
+ if (noise_var > thr_var_adp) { // Moving edge checking.
+ if (ne_enable) {
+ ne_->ResetConsecLowVar(mb_index);
+ }
+ moving_edge_[mb_index] = 1; // Mark as moving edge block.
+ x_density_[mb_col] += (pos_factor < 3);
+ y_density_[mb_row] += (pos_factor < 3);
} else {
- // Copy y source.
- filter_->CopyMem16x16(mb_src, stride_y, mb_dst, stride_y);
+ moving_edge_[mb_index] = 0;
+ if (ne_enable) {
+ // The variance used in noise estimation is based on the src block
+ // in time t (mb_src) and filtered block in time t-1 (mb_dist_prev).
+ uint32_t noise_var = filter_->Variance16x8(
+ mb_dst_prev, stride_y_, mb_src, stride_y_, &sse_t);
+ ne_->GetNoise(mb_index, noise_var, luma);
+ }
}
- } else {
- // Copy y source.
- filter_->CopyMem16x16(mb_src, stride_y, mb_dst, stride_y);
}
- filter_->CopyMem8x8(mb_src_u, stride_u, mb_dst_u, stride_u);
- filter_->CopyMem8x8(mb_src_v, stride_v, mb_dst_v, stride_v);
- }
- }
+ } // End of for loop
+ } // End of for loop
-#if DISPLAY // Rectangle diagnostics
- // Show rectangular region
- ShowRect(filter_, d_status_, d_status_tmp2_, x_density_, y_density_, u_src,
- v_src, u_dst, v_dst, mb_rows, mb_cols, stride_u, stride_v);
-#endif
+ ReduceFalseDetection(moving_edge_, &moving_object_, noise_level);
+
+ CopySrcOnMOB(y_src, y_dst);
+
+ // When frame width/height not divisible by 16, copy the margin to
+ // denoised_frame.
+ if ((mb_rows_ << 4) != height_ || (mb_cols_ << 4) != width_)
+ CopyLumaOnMargin(y_src, y_dst);
- // Setting time parameters to the output frame.
+ // TODO(jackychen): Need SSE2/NEON opt.
+ // Copy u/v planes.
+ memcpy(u_dst, u_src, (height_ >> 1) * stride_u_);
+ memcpy(v_dst, v_src, (height_ >> 1) * stride_v_);
+
+ // Set time parameters to the output frame.
denoised_frame->set_timestamp(frame.timestamp());
denoised_frame->set_render_time_ms(frame.render_time_ms());
- return;
+
+#if DISPLAY || DISPLAYNEON
+ // Show rectangular region
+ ShowRect(filter_, moving_edge_, moving_object_, x_density_, y_density_, u_src,
+ v_src, u_dst, v_dst, mb_rows_, mb_cols_, stride_u_, stride_v_);
+#endif
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_denoiser.h b/chromium/third_party/webrtc/modules/video_processing/video_denoiser.h
index 03b30d91c7f..114f663c03d 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_denoiser.h
+++ b/chromium/third_party/webrtc/modules/video_processing/video_denoiser.h
@@ -22,25 +22,58 @@ namespace webrtc {
class VideoDenoiser {
public:
explicit VideoDenoiser(bool runtime_cpu_detection);
+
void DenoiseFrame(const VideoFrame& frame,
VideoFrame* denoised_frame,
- VideoFrame* denoised_frame_track,
- int noise_level_prev);
+ VideoFrame* denoised_frame_prev,
+ bool noise_estimation_enabled);
private:
+ void DenoiserReset(const VideoFrame& frame,
+ VideoFrame* denoised_frame,
+ VideoFrame* denoised_frame_prev);
+
+ // Check the mb position, return 1: close to the frame center (between 1/8
+ // and 7/8 of width/height), 3: close to the border (out of 1/16 and 15/16
+ // of width/height), 2: in between.
+ int PositionCheck(int mb_row, int mb_col, int noise_level);
+
+ // To reduce false detection in moving object detection (MOD).
+ void ReduceFalseDetection(const std::unique_ptr<uint8_t[]>& d_status,
+ std::unique_ptr<uint8_t[]>* d_status_red,
+ int noise_level);
+
+ // Return whether a block might cause trailing artifact by checking if one of
+ // its neighbor blocks is a moving edge block.
+ bool IsTrailingBlock(const std::unique_ptr<uint8_t[]>& d_status,
+ int mb_row,
+ int mb_col);
+
+ // Copy input blocks to dst buffer on moving object blocks (MOB).
+ void CopySrcOnMOB(const uint8_t* y_src, uint8_t* y_dst);
+
+ // Copy luma margin blocks when frame width/height not divisible by 16.
+ void CopyLumaOnMargin(const uint8_t* y_src, uint8_t* y_dst);
+
int width_;
int height_;
+ int mb_rows_;
+ int mb_cols_;
+ int stride_y_;
+ int stride_u_;
+ int stride_v_;
CpuType cpu_type_;
- std::unique_ptr<DenoiseMetrics[]> metrics_;
std::unique_ptr<DenoiserFilter> filter_;
std::unique_ptr<NoiseEstimation> ne_;
- std::unique_ptr<uint8_t[]> d_status_;
-#if EXPERIMENTAL
- std::unique_ptr<uint8_t[]> d_status_tmp1_;
- std::unique_ptr<uint8_t[]> d_status_tmp2_;
-#endif
+ // 1 for moving edge block, 0 for static block.
+ std::unique_ptr<uint8_t[]> moving_edge_;
+ // 1 for moving object block, 0 for static block.
+ std::unique_ptr<uint8_t[]> moving_object_;
+ // x_density_ and y_density_ are used in MOD process.
std::unique_ptr<uint8_t[]> x_density_;
std::unique_ptr<uint8_t[]> y_density_;
+ // Save the return values by MbDenoise for each block.
+ std::unique_ptr<DenoiserDecision[]> mb_filter_decision_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_processing.gypi b/chromium/third_party/webrtc/modules/video_processing/video_processing.gypi
index 5bf0ea36c36..3e90fd21571 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_processing.gypi
+++ b/chromium/third_party/webrtc/modules/video_processing/video_processing.gypi
@@ -20,12 +20,6 @@
'sources': [
'include/video_processing.h',
'include/video_processing_defines.h',
- 'brightness_detection.cc',
- 'brightness_detection.h',
- 'content_analysis.cc',
- 'content_analysis.h',
- 'deflickering.cc',
- 'deflickering.h',
'frame_preprocessor.cc',
'frame_preprocessor.h',
'spatial_resampler.cc',
@@ -62,7 +56,6 @@
'target_name': 'video_processing_sse2',
'type': 'static_library',
'sources': [
- 'content_analysis_sse2.cc',
'util/denoiser_filter_sse2.cc',
'util/denoiser_filter_sse2.h',
],
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc b/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc
index f34886f10f2..86f75bf239d 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.cc
@@ -18,21 +18,6 @@
namespace webrtc {
-namespace {
-
-int GetSubSamplingFactor(int width, int height) {
- if (width * height >= 640 * 480) {
- return 3;
- } else if (width * height >= 352 * 288) {
- return 2;
- } else if (width * height >= 176 * 144) {
- return 1;
- } else {
- return 0;
- }
-}
-} // namespace
-
VideoProcessing* VideoProcessing::Create() {
return new VideoProcessingImpl();
}
@@ -40,83 +25,6 @@ VideoProcessing* VideoProcessing::Create() {
VideoProcessingImpl::VideoProcessingImpl() {}
VideoProcessingImpl::~VideoProcessingImpl() {}
-void VideoProcessing::GetFrameStats(const VideoFrame& frame,
- FrameStats* stats) {
- ClearFrameStats(stats); // The histogram needs to be zeroed out.
- if (frame.IsZeroSize()) {
- return;
- }
-
- int width = frame.width();
- int height = frame.height();
- stats->sub_sampling_factor = GetSubSamplingFactor(width, height);
-
- const uint8_t* buffer = frame.buffer(kYPlane);
- // Compute histogram and sum of frame
- for (int i = 0; i < height; i += (1 << stats->sub_sampling_factor)) {
- int k = i * width;
- for (int j = 0; j < width; j += (1 << stats->sub_sampling_factor)) {
- stats->hist[buffer[k + j]]++;
- stats->sum += buffer[k + j];
- }
- }
-
- stats->num_pixels = (width * height) / ((1 << stats->sub_sampling_factor) *
- (1 << stats->sub_sampling_factor));
- assert(stats->num_pixels > 0);
-
- // Compute mean value of frame
- stats->mean = stats->sum / stats->num_pixels;
-}
-
-bool VideoProcessing::ValidFrameStats(const FrameStats& stats) {
- if (stats.num_pixels == 0) {
- LOG(LS_WARNING) << "Invalid frame stats.";
- return false;
- }
- return true;
-}
-
-void VideoProcessing::ClearFrameStats(FrameStats* stats) {
- stats->mean = 0;
- stats->sum = 0;
- stats->num_pixels = 0;
- stats->sub_sampling_factor = 0;
- memset(stats->hist, 0, sizeof(stats->hist));
-}
-
-void VideoProcessing::Brighten(int delta, VideoFrame* frame) {
- RTC_DCHECK(!frame->IsZeroSize());
- RTC_DCHECK(frame->width() > 0);
- RTC_DCHECK(frame->height() > 0);
-
- int num_pixels = frame->width() * frame->height();
-
- int look_up[256];
- for (int i = 0; i < 256; i++) {
- int val = i + delta;
- look_up[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
- }
-
- uint8_t* temp_ptr = frame->buffer(kYPlane);
- for (int i = 0; i < num_pixels; i++) {
- *temp_ptr = static_cast<uint8_t>(look_up[*temp_ptr]);
- temp_ptr++;
- }
-}
-
-int32_t VideoProcessingImpl::Deflickering(VideoFrame* frame,
- FrameStats* stats) {
- rtc::CritScope mutex(&mutex_);
- return deflickering_.ProcessFrame(frame, stats);
-}
-
-int32_t VideoProcessingImpl::BrightnessDetection(const VideoFrame& frame,
- const FrameStats& stats) {
- rtc::CritScope mutex(&mutex_);
- return brightness_detection_.ProcessFrame(frame, stats);
-}
-
void VideoProcessingImpl::EnableTemporalDecimation(bool enable) {
rtc::CritScope mutex(&mutex_);
frame_pre_processor_.EnableTemporalDecimation(enable);
@@ -135,11 +43,6 @@ int32_t VideoProcessingImpl::SetTargetResolution(uint32_t width,
return frame_pre_processor_.SetTargetResolution(width, height, frame_rate);
}
-void VideoProcessingImpl::SetTargetFramerate(int frame_rate) {
- rtc::CritScope cs(&mutex_);
- frame_pre_processor_.SetTargetFramerate(frame_rate);
-}
-
uint32_t VideoProcessingImpl::GetDecimatedFrameRate() {
rtc::CritScope cs(&mutex_);
return frame_pre_processor_.GetDecimatedFrameRate();
@@ -155,9 +58,9 @@ uint32_t VideoProcessingImpl::GetDecimatedHeight() const {
return frame_pre_processor_.GetDecimatedHeight();
}
-void VideoProcessingImpl::EnableDenosing(bool enable) {
+void VideoProcessingImpl::EnableDenoising(bool enable) {
rtc::CritScope cs(&mutex_);
- frame_pre_processor_.EnableDenosing(enable);
+ frame_pre_processor_.EnableDenoising(enable);
}
const VideoFrame* VideoProcessingImpl::PreprocessFrame(
@@ -166,14 +69,4 @@ const VideoFrame* VideoProcessingImpl::PreprocessFrame(
return frame_pre_processor_.PreprocessFrame(frame);
}
-VideoContentMetrics* VideoProcessingImpl::GetContentMetrics() const {
- rtc::CritScope mutex(&mutex_);
- return frame_pre_processor_.GetContentMetrics();
-}
-
-void VideoProcessingImpl::EnableContentAnalysis(bool enable) {
- rtc::CritScope mutex(&mutex_);
- frame_pre_processor_.EnableContentAnalysis(enable);
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h b/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h
index 1d9a3775cf1..21e23c904dd 100644
--- a/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h
+++ b/chromium/third_party/webrtc/modules/video_processing/video_processing_impl.h
@@ -13,8 +13,6 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_processing/brightness_detection.h"
-#include "webrtc/modules/video_processing/deflickering.h"
#include "webrtc/modules/video_processing/frame_preprocessor.h"
namespace webrtc {
@@ -26,28 +24,20 @@ class VideoProcessingImpl : public VideoProcessing {
~VideoProcessingImpl() override;
// Implements VideoProcessing.
- int32_t Deflickering(VideoFrame* frame, FrameStats* stats) override;
- int32_t BrightnessDetection(const VideoFrame& frame,
- const FrameStats& stats) override;
void EnableTemporalDecimation(bool enable) override;
void SetInputFrameResampleMode(VideoFrameResampling resampling_mode) override;
- void EnableContentAnalysis(bool enable) override;
int32_t SetTargetResolution(uint32_t width,
uint32_t height,
uint32_t frame_rate) override;
- void SetTargetFramerate(int frame_rate) override;
uint32_t GetDecimatedFrameRate() override;
uint32_t GetDecimatedWidth() const override;
uint32_t GetDecimatedHeight() const override;
- void EnableDenosing(bool enable) override;
+ void EnableDenoising(bool enable) override;
const VideoFrame* PreprocessFrame(const VideoFrame& frame) override;
- VideoContentMetrics* GetContentMetrics() const override;
private:
rtc::CriticalSection mutex_;
- VPMDeflickering deflickering_ GUARDED_BY(mutex_);
- VPMBrightnessDetection brightness_detection_;
- VPMFramePreprocessor frame_pre_processor_;
+ VPMFramePreprocessor frame_pre_processor_ GUARDED_BY(mutex_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/BUILD.gn b/chromium/third_party/webrtc/modules/video_render/BUILD.gn
deleted file mode 100644
index 0771bd7080c..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/BUILD.gn
+++ /dev/null
@@ -1,178 +0,0 @@
-# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-import("../../build/webrtc.gni")
-
-source_set("video_render_module") {
- sources = [
- "external/video_render_external_impl.cc",
- "external/video_render_external_impl.h",
- "i_video_render.h",
- "video_render.h",
- "video_render_defines.h",
- "video_render_impl.h",
- ]
-
- deps = [
- "../..:webrtc_common",
- "../../common_video",
- "../../system_wrappers",
- "../utility",
- ]
-
- configs += [ "../..:common_config" ]
- public_configs = [ "../..:common_inherited_config" ]
-
- if (is_clang) {
- # Suppress warnings from Chrome's Clang plugins.
- # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
- configs -= [ "//build/config/clang:find_bad_constructs" ]
- }
-}
-
-source_set("video_render") {
- sources = [
- "video_render_impl.cc",
- ]
- deps = [
- ":video_render_module",
- "../../system_wrappers",
- ]
-
- configs += [ "../..:common_config" ]
- public_configs = [ "../..:common_inherited_config" ]
-
- if (is_clang) {
- # Suppress warnings from Chrome's Clang plugins.
- # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
- configs -= [ "//build/config/clang:find_bad_constructs" ]
- }
-}
-
-if (!build_with_chromium) {
- config("video_render_internal_impl_config") {
- if (is_ios) {
- libs = [
- "OpenGLES.framework",
- "QuartzCore.framework",
- ]
- }
- }
-
- source_set("video_render_internal_impl") {
- libs = []
- sources = [
- "video_render_internal_impl.cc",
- ]
- deps = [
- ":video_render_module",
- "../../system_wrappers",
- ]
-
- if (is_linux) {
- sources += [
- "linux/video_render_linux_impl.cc",
- "linux/video_render_linux_impl.h",
- "linux/video_x11_channel.cc",
- "linux/video_x11_channel.h",
- "linux/video_x11_render.cc",
- "linux/video_x11_render.h",
- ]
-
- deps += [ "../..:webrtc_common" ]
-
- libs += [ "Xext" ]
- }
- if (is_mac) {
- sources += [
- "mac/cocoa_full_screen_window.h",
- "mac/cocoa_full_screen_window.mm",
- "mac/cocoa_render_view.h",
- "mac/cocoa_render_view.mm",
- "mac/video_render_agl.cc",
- "mac/video_render_agl.h",
- "mac/video_render_mac_carbon_impl.cc",
- "mac/video_render_mac_carbon_impl.h",
- "mac/video_render_mac_cocoa_impl.h",
- "mac/video_render_mac_cocoa_impl.mm",
- "mac/video_render_nsopengl.h",
- "mac/video_render_nsopengl.mm",
- ]
-
- libs += [
- "CoreVideo.framework",
- "QTKit.framework",
- ]
- }
- if (is_win) {
- sources += [
- "windows/i_video_render_win.h",
- "windows/video_render_direct3d9.cc",
- "windows/video_render_direct3d9.h",
- "windows/video_render_windows_impl.cc",
- "windows/video_render_windows_impl.h",
- ]
-
- directxsdk_exists =
- exec_script("//build/dir_exists.py",
- [ rebase_path("//third_party/directxsdk/files",
- root_build_dir) ],
- "trim string") == "True"
- if (directxsdk_exists) {
- directxsdk_path = "//third_party/directxsdk/files"
- } else {
- directxsdk_path =
- exec_script("../../build/find_directx_sdk.py", [], "trim string")
- }
- include_dirs = [ directxsdk_path + "/Include" ]
- }
- if (is_android) {
- sources += [
- "android/video_render_android_impl.cc",
- "android/video_render_android_impl.h",
- "android/video_render_android_native_opengl2.cc",
- "android/video_render_android_native_opengl2.h",
- "android/video_render_android_surface_view.cc",
- "android/video_render_android_surface_view.h",
- "android/video_render_opengles20.cc",
- "android/video_render_opengles20.h",
- ]
-
- libs += [ "GLESv2" ]
- }
- if (is_ios) {
- sources += [
- "ios/open_gles20.h",
- "ios/open_gles20.mm",
- "ios/video_render_ios_channel.h",
- "ios/video_render_ios_channel.mm",
- "ios/video_render_ios_gles20.h",
- "ios/video_render_ios_gles20.mm",
- "ios/video_render_ios_impl.h",
- "ios/video_render_ios_impl.mm",
- "ios/video_render_ios_view.h",
- "ios/video_render_ios_view.mm",
- ]
-
- deps += [ "../..:webrtc_common" ]
-
- cflags = [ "-fobjc-arc" ] # CLANG_ENABLE_OBJC_ARC = YES.
- }
-
- all_dependent_configs = [ ":video_render_internal_impl_config" ]
-
- configs += [ "../..:common_config" ]
- public_configs = [ "../..:common_inherited_config" ]
-
- if (is_clang) {
- # Suppress warnings from Chrome's Clang plugins.
- # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
- configs -= [ "//build/config/clang:find_bad_constructs" ]
- }
- }
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/OWNERS b/chromium/third_party/webrtc/modules/video_render/OWNERS
deleted file mode 100644
index 3aaa5328f5c..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/OWNERS
+++ /dev/null
@@ -1,12 +0,0 @@
-mflodman@webrtc.org
-perkj@webrtc.org
-tkchin@webrtc.org
-
-per-file *.isolate=kjellander@webrtc.org
-
-# These are for the common case of adding or renaming files. If you're doing
-# structural changes, please get a review from a reviewer in this file.
-per-file *.gyp=*
-per-file *.gypi=*
-
-per-file BUILD.gn=kjellander@webrtc.org
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.cc
deleted file mode 100644
index 9affb23d99f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.cc
+++ /dev/null
@@ -1,316 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-
-#include "webrtc/modules/video_render/video_render_internal.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#ifdef ANDROID
-#include <android/log.h>
-#include <stdio.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
-#else
-#include "webrtc/system_wrappers/include/trace.h"
-#endif
-
-namespace webrtc {
-
-JavaVM* VideoRenderAndroid::g_jvm = NULL;
-
-int32_t SetRenderAndroidVM(JavaVM* javaVM) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
- VideoRenderAndroid::g_jvm = javaVM;
- return 0;
-}
-
-VideoRenderAndroid::VideoRenderAndroid(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool /*fullscreen*/):
- _id(id),
- _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _renderType(videoRenderType),
- _ptrWindow((jobject)(window)),
- _javaShutDownFlag(false),
- _javaShutdownEvent(*EventWrapper::Create()),
- _javaRenderEvent(*EventWrapper::Create()),
- _lastJavaRenderEvent(0),
- _javaRenderJniEnv(NULL) {
-}
-
-VideoRenderAndroid::~VideoRenderAndroid() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "VideoRenderAndroid dtor");
-
- if (_javaRenderThread)
- StopRender();
-
- for (AndroidStreamMap::iterator it = _streamsMap.begin();
- it != _streamsMap.end();
- ++it) {
- delete it->second;
- }
- delete &_javaShutdownEvent;
- delete &_javaRenderEvent;
- delete &_critSect;
-}
-
-int32_t VideoRenderAndroid::ChangeWindow(void* /*window*/) {
- return -1;
-}
-
-VideoRenderCallback*
-VideoRenderAndroid::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right,
- const float bottom) {
- CriticalSectionScoped cs(&_critSect);
-
- AndroidStream* renderStream = NULL;
- AndroidStreamMap::iterator item = _streamsMap.find(streamId);
- if (item != _streamsMap.end() && item->second != NULL) {
- WEBRTC_TRACE(kTraceInfo,
- kTraceVideoRenderer,
- -1,
- "%s: Render stream already exists",
- __FUNCTION__);
- return renderStream;
- }
-
- renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
- right, bottom, *this);
- if (renderStream) {
- _streamsMap[streamId] = renderStream;
- }
- else {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
- return NULL;
- }
- return renderStream;
-}
-
-int32_t VideoRenderAndroid::DeleteIncomingRenderStream(
- const uint32_t streamId) {
- CriticalSectionScoped cs(&_critSect);
-
- AndroidStreamMap::iterator item = _streamsMap.find(streamId);
- if (item == _streamsMap.end()) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
- return -1;
- }
- delete item->second;
- _streamsMap.erase(item);
- return 0;
-}
-
-int32_t VideoRenderAndroid::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const {
- return -1;
-}
-
-int32_t VideoRenderAndroid::StartRender() {
- CriticalSectionScoped cs(&_critSect);
-
- if (_javaRenderThread) {
- // StartRender is called when this stream should start render.
- // However StopRender is not called when the streams stop rendering.
- // Thus the the thread is only deleted when the renderer is removed.
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s, Render thread already exist", __FUNCTION__);
- return 0;
- }
-
- _javaRenderThread.reset(new rtc::PlatformThread(JavaRenderThreadFun, this,
- "AndroidRenderThread"));
-
- _javaRenderThread->Start();
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: thread started",
- __FUNCTION__);
- _javaRenderThread->SetPriority(rtc::kRealtimePriority);
- return 0;
-}
-
-int32_t VideoRenderAndroid::StopRender() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
- {
- CriticalSectionScoped cs(&_critSect);
- if (!_javaRenderThread)
- {
- return -1;
- }
- _javaShutDownFlag = true;
- _javaRenderEvent.Set();
- }
-
- _javaShutdownEvent.Wait(3000);
- CriticalSectionScoped cs(&_critSect);
- _javaRenderThread->Stop();
- _javaRenderThread.reset();
-
- return 0;
-}
-
-void VideoRenderAndroid::ReDraw() {
- CriticalSectionScoped cs(&_critSect);
- // Allow redraw if it was more than 20ms since last.
- if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) {
- _lastJavaRenderEvent = TickTime::MillisecondTimestamp();
- _javaRenderEvent.Set();
- }
-}
-
-bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
- return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
-}
-
-bool VideoRenderAndroid::JavaRenderThreadProcess()
-{
- _javaRenderEvent.Wait(1000);
-
- CriticalSectionScoped cs(&_critSect);
- if (!_javaRenderJniEnv) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !_javaRenderJniEnv) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, _javaRenderJniEnv);
- return false;
- }
- }
-
- for (AndroidStreamMap::iterator it = _streamsMap.begin();
- it != _streamsMap.end();
- ++it) {
- it->second->DeliverFrame(_javaRenderJniEnv);
- }
-
- if (_javaShutDownFlag) {
- if (g_jvm->DetachCurrentThread() < 0)
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- else {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "%s: Java thread detached", __FUNCTION__);
- }
- _javaRenderJniEnv = NULL;
- _javaShutDownFlag = false;
- _javaShutdownEvent.Set();
- return false; // Do not run this thread again.
- }
- return true;
-}
-
-VideoRenderType VideoRenderAndroid::RenderType() {
- return _renderType;
-}
-
-RawVideoType VideoRenderAndroid::PerferedVideoType() {
- return kVideoI420;
-}
-
-bool VideoRenderAndroid::FullScreen() {
- return false;
-}
-
-int32_t VideoRenderAndroid::GetGraphicsMemory(
- uint64_t& /*totalGraphicsMemory*/,
- uint64_t& /*availableGraphicsMemory*/) const {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::GetScreenResolution(
- uint32_t& /*screenWidth*/,
- uint32_t& /*screenHeight*/) const {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-uint32_t VideoRenderAndroid::RenderFrameRate(
- const uint32_t /*streamId*/) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::SetStreamCropping(
- const uint32_t /*streamId*/,
- const float /*left*/,
- const float /*top*/,
- const float /*right*/,
- const float /*bottom*/) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::SetTransparentBackground(const bool enable) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float rigth, const float bottom) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderAndroid::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right,
- const float bottom) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Android", __FUNCTION__);
- return -1;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.h b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.h
deleted file mode 100644
index 06fd7a1c7cd..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_impl.h
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
-
-#include <jni.h>
-
-#include <map>
-#include <memory>
-
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/modules/video_render/i_video_render.h"
-
-
-namespace webrtc {
-
-//#define ANDROID_LOG
-
-class CriticalSectionWrapper;
-class EventWrapper;
-
-// The object a module user uses to send new frames to the java renderer
-// Base class for android render streams.
-
-class AndroidStream : public VideoRenderCallback {
- public:
- // DeliverFrame is called from a thread connected to the Java VM.
- // Used for Delivering frame for rendering.
- virtual void DeliverFrame(JNIEnv* jniEnv)=0;
-
- virtual ~AndroidStream() {};
-};
-
-class VideoRenderAndroid: IVideoRender {
- public:
- VideoRenderAndroid(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- virtual ~VideoRenderAndroid();
-
- virtual int32_t Init()=0;
-
- virtual int32_t ChangeWindow(void* window);
-
- virtual VideoRenderCallback* AddIncomingRenderStream(
- const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t DeleteIncomingRenderStream(
- const uint32_t streamId);
-
- virtual int32_t GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- virtual void ReDraw();
-
- // Properties
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t GetGraphicsMemory(
- uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float rigth, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey, const float left,
- const float top, const float right,
- const float bottom);
- static JavaVM* g_jvm;
-
- protected:
- virtual AndroidStream* CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer) = 0;
-
- int32_t _id;
- CriticalSectionWrapper& _critSect;
- VideoRenderType _renderType;
- jobject _ptrWindow;
-
- private:
- static bool JavaRenderThreadFun(void* obj);
- bool JavaRenderThreadProcess();
-
- // Map with streams to render.
- typedef std::map<int32_t, AndroidStream*> AndroidStreamMap;
- AndroidStreamMap _streamsMap;
- // True if the _javaRenderThread thread shall be detached from the JVM.
- bool _javaShutDownFlag;
- EventWrapper& _javaShutdownEvent;
- EventWrapper& _javaRenderEvent;
- int64_t _lastJavaRenderEvent;
- JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
- // TODO(pbos): Remove unique_ptr and use the member directly.
- std::unique_ptr<rtc::PlatformThread> _javaRenderThread;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
deleted file mode 100644
index 286776e317b..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
+++ /dev/null
@@ -1,450 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#ifdef ANDROID_LOG
-#include <android/log.h>
-#include <stdio.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
-#else
-#include "webrtc/system_wrappers/include/trace.h"
-#endif
-
-namespace webrtc {
-
-AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- VideoRenderAndroid(id, videoRenderType, window, fullscreen),
- _javaRenderObj(NULL),
- _javaRenderClass(NULL) {
-}
-
-bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) {
- if (!g_jvm) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "RendererAndroid():UseOpenGL No JVM set.");
- return false;
- }
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideoRenderer,
- -1,
- "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
- res, env);
- return false;
- }
- isAttached = true;
- }
-
- // get the renderer class
- jclass javaRenderClassLocal =
- env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
- if (!javaRenderClassLocal) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "%s: could not find ViEAndroidRenderer class",
- __FUNCTION__);
- return false;
- }
-
- // get the method ID for UseOpenGL
- jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
- "UseOpenGL2",
- "(Ljava/lang/Object;)Z");
- if (cidUseOpenGL == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "%s: could not get UseOpenGL ID", __FUNCTION__);
- return false;
- }
- jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
- cidUseOpenGL, (jobject) window);
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
- return res;
-}
-
-AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "AndroidNativeOpenGl2Renderer dtor");
- if (g_jvm) {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- env = NULL;
- }
- else {
- isAttached = true;
- }
- }
-
- env->DeleteGlobalRef(_javaRenderObj);
- env->DeleteGlobalRef(_javaRenderClass);
-
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
-}
-
-int32_t AndroidNativeOpenGl2Renderer::Init() {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- if (!g_jvm) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "(%s): Not a valid Java VM pointer.", __FUNCTION__);
- return -1;
- }
- if (!_ptrWindow) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "(%s): No window have been provided.", __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- return -1;
- }
- isAttached = true;
- }
-
- // get the ViEAndroidGLES20 class
- jclass javaRenderClassLocal =
- env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
- if (!javaRenderClassLocal) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not find ViEAndroidGLES20", __FUNCTION__);
- return -1;
- }
-
- // create a global reference to the class (to tell JNI that
- // we are referencing it after this function has returned)
- _javaRenderClass =
- reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
- if (!_javaRenderClass) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not create Java SurfaceHolder class reference",
- __FUNCTION__);
- return -1;
- }
-
- // Delete local class ref, we only use the global ref
- env->DeleteLocalRef(javaRenderClassLocal);
-
- // create a reference to the object (to tell JNI that we are referencing it
- // after this function has returned)
- _javaRenderObj = env->NewGlobalRef(_ptrWindow);
- if (!_javaRenderObj) {
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java SurfaceRender object reference",
- __FUNCTION__);
- return -1;
- }
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
- __FUNCTION__);
- return 0;
-
-}
-AndroidStream*
-AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
- __FUNCTION__, streamId);
- AndroidNativeOpenGl2Channel* stream =
- new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
- _javaRenderObj);
- if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
- return stream;
- else {
- delete stream;
- }
- return NULL;
-}
-
-AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
- uint32_t streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,jobject javaRenderObj):
- _id(streamId),
- _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
- _registerNativeCID(NULL), _deRegisterNativeCID(NULL),
- _openGLRenderer(streamId) {
-
-}
-AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "AndroidNativeOpenGl2Channel dtor");
- if (_jvm) {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- env = NULL;
- } else {
- isAttached = true;
- }
- }
- if (env && _deRegisterNativeCID) {
- env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
- }
-
- if (isAttached) {
- if (_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
-
- delete &_renderCritSect;
-}
-
-int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
- if (!_jvm) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Not a valid Java VM pointer", __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__, res, env);
- return -1;
- }
- isAttached = true;
- }
-
- jclass javaRenderClass =
- env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
- if (!javaRenderClass) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not find ViESurfaceRenderer", __FUNCTION__);
- return -1;
- }
-
- // get the method ID for the ReDraw function
- _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
- if (_redrawCid == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get ReDraw ID", __FUNCTION__);
- return -1;
- }
-
- _registerNativeCID = env->GetMethodID(javaRenderClass,
- "RegisterNativeObject", "(J)V");
- if (_registerNativeCID == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get RegisterNativeObject ID", __FUNCTION__);
- return -1;
- }
-
- _deRegisterNativeCID = env->GetMethodID(javaRenderClass,
- "DeRegisterNativeObject", "()V");
- if (_deRegisterNativeCID == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get DeRegisterNativeObject ID",
- __FUNCTION__);
- return -1;
- }
-
- JNINativeMethod nativeFunctions[2] = {
- { "DrawNative",
- "(J)V",
- (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
- { "CreateOpenGLNative",
- "(JII)I",
- (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
- };
- if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
- "%s: Registered native functions", __FUNCTION__);
- }
- else {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "%s: Failed to register native functions", __FUNCTION__);
- return -1;
- }
-
- env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
- if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) {
- return -1;
- }
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
- return 0;
-}
-
-int32_t AndroidNativeOpenGl2Channel::RenderFrame(const uint32_t /*streamId*/,
- const VideoFrame& videoFrame) {
- // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
- _renderCritSect.Enter();
- _bufferToRender = videoFrame;
- _renderCritSect.Leave();
- _renderer.ReDraw();
- return 0;
-}
-
-/*Implements AndroidStream
- * Calls the Java object and render the buffer in _bufferToRender
- */
-void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) {
- //TickTime timeNow=TickTime::Now();
-
- //Draw the Surface
- jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
-
- // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
- // "%s: time to deliver %lld" ,__FUNCTION__,
- // (TickTime::Now()-timeNow).Milliseconds());
-}
-
-/*
- * JNI callback from Java class. Called when the render
- * want to render a frame. Called from the GLRenderThread
- * Method: DrawNative
- * Signature: (J)V
- */
-void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
- JNIEnv * env, jobject, jlong context) {
- AndroidNativeOpenGl2Channel* renderChannel =
- reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
- renderChannel->DrawNative();
-}
-
-void AndroidNativeOpenGl2Channel::DrawNative() {
- _renderCritSect.Enter();
- _openGLRenderer.Render(_bufferToRender);
- _renderCritSect.Leave();
-}
-
-/*
- * JNI callback from Java class. Called when the GLSurfaceview
- * have created a surface. Called from the GLRenderThread
- * Method: CreateOpenGLNativeStatic
- * Signature: (JII)I
- */
-jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
- JNIEnv * env,
- jobject,
- jlong context,
- jint width,
- jint height) {
- AndroidNativeOpenGl2Channel* renderChannel =
- reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
- return renderChannel->CreateOpenGLNative(width, height);
-}
-
-jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
- int width, int height) {
- return _openGLRenderer.Setup(width, height);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.h b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
deleted file mode 100644
index 8be247b8342..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
-
-#include <jni.h>
-
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-#include "webrtc/modules/video_render/android/video_render_opengles20.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-
-class AndroidNativeOpenGl2Channel: public AndroidStream {
- public:
- AndroidNativeOpenGl2Channel(
- uint32_t streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,jobject javaRenderObj);
- ~AndroidNativeOpenGl2Channel();
-
- int32_t Init(int32_t zOrder, const float left, const float top,
- const float right, const float bottom);
-
- //Implement VideoRenderCallback
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- //Implements AndroidStream
- virtual void DeliverFrame(JNIEnv* jniEnv);
-
- private:
- static jint JNICALL CreateOpenGLNativeStatic(
- JNIEnv * env,
- jobject,
- jlong context,
- jint width,
- jint height);
- jint CreateOpenGLNative(int width, int height);
-
- static void JNICALL DrawNativeStatic(JNIEnv * env,jobject, jlong context);
- void DrawNative();
- uint32_t _id;
- CriticalSectionWrapper& _renderCritSect;
-
- VideoFrame _bufferToRender;
- VideoRenderAndroid& _renderer;
- JavaVM* _jvm;
- jobject _javaRenderObj;
-
- jmethodID _redrawCid;
- jmethodID _registerNativeCID;
- jmethodID _deRegisterNativeCID;
- VideoRenderOpenGles20 _openGLRenderer;
-};
-
-
-class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
- public:
- AndroidNativeOpenGl2Renderer(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- ~AndroidNativeOpenGl2Renderer();
- static bool UseOpenGL2(void* window);
-
- int32_t Init();
- virtual AndroidStream* CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer);
-
- private:
- jobject _javaRenderObj;
- jclass _javaRenderClass;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.cc
deleted file mode 100644
index ea3b106b1ed..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.cc
+++ /dev/null
@@ -1,474 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#ifdef ANDROID_LOG
-#include <android/log.h>
-#include <stdio.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
-#else
-#include "webrtc/system_wrappers/include/trace.h"
-#endif
-
-namespace webrtc {
-
-AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- VideoRenderAndroid(id,videoRenderType,window,fullscreen),
- _javaRenderObj(NULL),
- _javaRenderClass(NULL) {
-}
-
-AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "AndroidSurfaceViewRenderer dtor");
- if(g_jvm) {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__,
- res,
- env);
- env=NULL;
- }
- else {
- isAttached = true;
- }
- }
- env->DeleteGlobalRef(_javaRenderObj);
- env->DeleteGlobalRef(_javaRenderClass);
-
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
-}
-
-int32_t AndroidSurfaceViewRenderer::Init() {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- if (!g_jvm) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "(%s): Not a valid Java VM pointer.",
- __FUNCTION__);
- return -1;
- }
- if(!_ptrWindow) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "(%s): No window have been provided.",
- __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = g_jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__,
- res,
- env);
- return -1;
- }
- isAttached = true;
- }
-
- // get the ViESurfaceRender class
- jclass javaRenderClassLocal =
- env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
- if (!javaRenderClassLocal) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not find ViESurfaceRenderer",
- __FUNCTION__);
- return -1;
- }
-
- // create a global reference to the class (to tell JNI that
- // we are referencing it after this function has returned)
- _javaRenderClass =
- reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
- if (!_javaRenderClass) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java ViESurfaceRenderer class reference",
- __FUNCTION__);
- return -1;
- }
-
- // Delete local class ref, we only use the global ref
- env->DeleteLocalRef(javaRenderClassLocal);
-
- // get the method ID for the constructor
- jmethodID cid = env->GetMethodID(_javaRenderClass,
- "<init>",
- "(Landroid/view/SurfaceView;)V");
- if (cid == NULL) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not get constructor ID",
- __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- // construct the object
- jobject javaRenderObjLocal = env->NewObject(_javaRenderClass,
- cid,
- _ptrWindow);
- if (!javaRenderObjLocal) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java Render",
- __FUNCTION__);
- return -1;
- }
-
- // create a reference to the object (to tell JNI that we are referencing it
- // after this function has returned)
- _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
- if (!_javaRenderObj) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not create Java SurfaceRender object reference",
- __FUNCTION__);
- return -1;
- }
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (g_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "%s: Could not detach thread from JVM", __FUNCTION__);
- }
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
- return 0;
-}
-
-AndroidStream*
-AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer) {
- WEBRTC_TRACE(kTraceDebug,
- kTraceVideoRenderer,
- _id,
- "%s: Id %d",
- __FUNCTION__,
- streamId);
- AndroidSurfaceViewChannel* stream =
- new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj);
- if(stream && stream->Init(zOrder, left, top, right, bottom) == 0)
- return stream;
- else
- delete stream;
- return NULL;
-}
-
-AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
- uint32_t streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,
- jobject javaRenderObj) :
- _id(streamId),
- _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _renderer(renderer),
- _jvm(jvm),
- _javaRenderObj(javaRenderObj),
-#ifndef ANDROID_NDK_8_OR_ABOVE
- _javaByteBufferObj(NULL),
- _directBuffer(NULL),
-#endif
- _bitmapWidth(0),
- _bitmapHeight(0) {
-}
-
-AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
- WEBRTC_TRACE(kTraceInfo,
- kTraceVideoRenderer,
- _id,
- "AndroidSurfaceViewChannel dtor");
- delete &_renderCritSect;
- if(_jvm) {
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__,
- res,
- env);
- env=NULL;
- }
- else {
- isAttached = true;
- }
- }
-
- env->DeleteGlobalRef(_javaByteBufferObj);
- if (isAttached) {
- if (_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
- }
-}
-
-int32_t AndroidSurfaceViewChannel::Init(
- int32_t /*zOrder*/,
- const float left,
- const float top,
- const float right,
- const float bottom) {
-
- WEBRTC_TRACE(kTraceDebug,
- kTraceVideoRenderer,
- _id,
- "%s: AndroidSurfaceViewChannel",
- __FUNCTION__);
- if (!_jvm) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Not a valid Java VM pointer",
- __FUNCTION__);
- return -1;
- }
-
- if( (top > 1 || top < 0) ||
- (right > 1 || right < 0) ||
- (bottom > 1 || bottom < 0) ||
- (left > 1 || left < 0)) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Wrong coordinates", __FUNCTION__);
- return -1;
- }
-
- // get the JNI env for this thread
- bool isAttached = false;
- JNIEnv* env = NULL;
- if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
- // try to attach the thread and get the env
- // Attach this thread to JVM
- jint res = _jvm->AttachCurrentThread(&env, NULL);
-
- // Get the JNI env for this thread
- if ((res < 0) || !env) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: Could not attach thread to JVM (%d, %p)",
- __FUNCTION__,
- res,
- env);
- return -1;
- }
- isAttached = true;
- }
-
- jclass javaRenderClass =
- env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
- if (!javaRenderClass) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not find ViESurfaceRenderer",
- __FUNCTION__);
- return -1;
- }
-
- // get the method ID for the CreateIntArray
- _createByteBufferCid =
- env->GetMethodID(javaRenderClass,
- "CreateByteBuffer",
- "(II)Ljava/nio/ByteBuffer;");
- if (_createByteBufferCid == NULL) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not get CreateByteBuffer ID",
- __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- // get the method ID for the DrawByteBuffer function
- _drawByteBufferCid = env->GetMethodID(javaRenderClass,
- "DrawByteBuffer",
- "()V");
- if (_drawByteBufferCid == NULL) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not get DrawByteBuffer ID",
- __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- // get the method ID for the SetCoordinates function
- _setCoordinatesCid = env->GetMethodID(javaRenderClass,
- "SetCoordinates",
- "(FFFF)V");
- if (_setCoordinatesCid == NULL) {
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: could not get SetCoordinates ID",
- __FUNCTION__);
- return -1; /* exception thrown */
- }
-
- env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid,
- left, top, right, bottom);
-
- // Detach this thread if it was attached
- if (isAttached) {
- if (_jvm->DetachCurrentThread() < 0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- _id,
- "%s: Could not detach thread from JVM",
- __FUNCTION__);
- }
- }
-
- WEBRTC_TRACE(kTraceDebug,
- kTraceVideoRenderer,
- _id,
- "%s: AndroidSurfaceViewChannel done",
- __FUNCTION__);
- return 0;
-}
-
-int32_t AndroidSurfaceViewChannel::RenderFrame(const uint32_t /*streamId*/,
- const VideoFrame& videoFrame) {
- // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
- _renderCritSect.Enter();
- _bufferToRender = videoFrame;
- _renderCritSect.Leave();
- _renderer.ReDraw();
- return 0;
-}
-
-
-/*Implements AndroidStream
- * Calls the Java object and render the buffer in _bufferToRender
- */
-void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
- _renderCritSect.Enter();
-
- if (_bitmapWidth != _bufferToRender.width() ||
- _bitmapHeight != _bufferToRender.height()) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
- "%d",__FUNCTION__,
- _bufferToRender.width(), _bufferToRender.height());
- if (_javaByteBufferObj) {
- jniEnv->DeleteGlobalRef(_javaByteBufferObj);
- _javaByteBufferObj = NULL;
- _directBuffer = NULL;
- }
-
- jobject javaByteBufferObj =
- jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
- _bufferToRender.width(),
- _bufferToRender.height());
- _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
- if (!_javaByteBufferObj) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
- "create Java ByteBuffer object reference", __FUNCTION__);
- _renderCritSect.Leave();
- return;
- } else {
- _directBuffer = static_cast<unsigned char*>
- (jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
- _bitmapWidth = _bufferToRender.width();
- _bitmapHeight = _bufferToRender.height();
- }
- }
-
- if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
- const int conversionResult =
- ConvertFromI420(_bufferToRender, kRGB565, 0, _directBuffer);
-
- if (conversionResult < 0) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
- " failed.", __FUNCTION__);
- _renderCritSect.Leave();
- return;
- }
- }
- _renderCritSect.Leave();
- // Draw the Surface
- jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.h b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.h
deleted file mode 100644
index 0f029b54f34..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_surface_view.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
-
-#include <jni.h>
-
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-
-class AndroidSurfaceViewChannel : public AndroidStream {
- public:
- AndroidSurfaceViewChannel(uint32_t streamId,
- JavaVM* jvm,
- VideoRenderAndroid& renderer,
- jobject javaRenderObj);
- ~AndroidSurfaceViewChannel();
-
- int32_t Init(int32_t zOrder, const float left, const float top,
- const float right, const float bottom);
-
- //Implement VideoRenderCallback
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- //Implements AndroidStream
- virtual void DeliverFrame(JNIEnv* jniEnv);
-
- private:
- uint32_t _id;
- CriticalSectionWrapper& _renderCritSect;
-
- VideoFrame _bufferToRender;
- VideoRenderAndroid& _renderer;
- JavaVM* _jvm;
- jobject _javaRenderObj;
-
- jobject _javaByteBufferObj;
- unsigned char* _directBuffer;
- jmethodID _createByteBufferCid;
- jmethodID _drawByteBufferCid;
-
- jmethodID _setCoordinatesCid;
- int _bitmapWidth;
- int _bitmapHeight;
-};
-
-class AndroidSurfaceViewRenderer : private VideoRenderAndroid {
- public:
- AndroidSurfaceViewRenderer(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
- ~AndroidSurfaceViewRenderer();
- int32_t Init();
- virtual AndroidStream* CreateAndroidRenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom,
- VideoRenderAndroid& renderer);
- private:
- jobject _javaRenderObj;
- jclass _javaRenderClass;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.cc
deleted file mode 100644
index 45db56a4f6e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.cc
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <GLES2/gl2.h>
-#include <GLES2/gl2ext.h>
-
-#include <stdio.h>
-#include <stdlib.h>
-
-#include "webrtc/modules/video_render/android/video_render_opengles20.h"
-
-//#define ANDROID_LOG
-
-#ifdef ANDROID_LOG
-#include <android/log.h>
-#include <stdio.h>
-
-#undef WEBRTC_TRACE
-#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
-#else
-#include "webrtc/system_wrappers/include/trace.h"
-#endif
-
-namespace webrtc {
-
-const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
-
-const char VideoRenderOpenGles20::g_vertextShader[] = {
- "attribute vec4 aPosition;\n"
- "attribute vec2 aTextureCoord;\n"
- "varying vec2 vTextureCoord;\n"
- "void main() {\n"
- " gl_Position = aPosition;\n"
- " vTextureCoord = aTextureCoord;\n"
- "}\n" };
-
-// The fragment shader.
-// Do YUV to RGB565 conversion.
-const char VideoRenderOpenGles20::g_fragmentShader[] = {
- "precision mediump float;\n"
- "uniform sampler2D Ytex;\n"
- "uniform sampler2D Utex,Vtex;\n"
- "varying vec2 vTextureCoord;\n"
- "void main(void) {\n"
- " float nx,ny,r,g,b,y,u,v;\n"
- " mediump vec4 txl,ux,vx;"
- " nx=vTextureCoord[0];\n"
- " ny=vTextureCoord[1];\n"
- " y=texture2D(Ytex,vec2(nx,ny)).r;\n"
- " u=texture2D(Utex,vec2(nx,ny)).r;\n"
- " v=texture2D(Vtex,vec2(nx,ny)).r;\n"
-
- //" y = v;\n"+
- " y=1.1643*(y-0.0625);\n"
- " u=u-0.5;\n"
- " v=v-0.5;\n"
-
- " r=y+1.5958*v;\n"
- " g=y-0.39173*u-0.81290*v;\n"
- " b=y+2.017*u;\n"
- " gl_FragColor=vec4(r,g,b,1.0);\n"
- "}\n" };
-
-VideoRenderOpenGles20::VideoRenderOpenGles20(int32_t id) :
- _id(id),
- _textureWidth(-1),
- _textureHeight(-1) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
- __FUNCTION__, (int) _id);
-
- const GLfloat vertices[20] = {
- // X, Y, Z, U, V
- -1, -1, 0, 0, 1, // Bottom Left
- 1, -1, 0, 1, 1, //Bottom Right
- 1, 1, 0, 1, 0, //Top Right
- -1, 1, 0, 0, 0 }; //Top Left
-
- memcpy(_vertices, vertices, sizeof(_vertices));
-}
-
-VideoRenderOpenGles20::~VideoRenderOpenGles20() {
-}
-
-int32_t VideoRenderOpenGles20::Setup(int32_t width, int32_t height) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: width %d, height %d", __FUNCTION__, (int) width,
- (int) height);
-
- printGLString("Version", GL_VERSION);
- printGLString("Vendor", GL_VENDOR);
- printGLString("Renderer", GL_RENDERER);
- printGLString("Extensions", GL_EXTENSIONS);
-
- int maxTextureImageUnits[2];
- int maxTextureSize[2];
- glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
- glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: number of textures %d, size %d", __FUNCTION__,
- (int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
-
- _program = createProgram(g_vertextShader, g_fragmentShader);
- if (!_program) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not create program", __FUNCTION__);
- return -1;
- }
-
- int positionHandle = glGetAttribLocation(_program, "aPosition");
- checkGlError("glGetAttribLocation aPosition");
- if (positionHandle == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not get aPosition handle", __FUNCTION__);
- return -1;
- }
-
- int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
- checkGlError("glGetAttribLocation aTextureCoord");
- if (textureHandle == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not get aTextureCoord handle", __FUNCTION__);
- return -1;
- }
-
- // set the vertices array in the shader
- // _vertices contains 4 vertices with 5 coordinates.
- // 3 for (xyz) for the vertices and 2 for the texture
- glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false,
- 5 * sizeof(GLfloat), _vertices);
- checkGlError("glVertexAttribPointer aPosition");
-
- glEnableVertexAttribArray(positionHandle);
- checkGlError("glEnableVertexAttribArray positionHandle");
-
- // set the texture coordinate array in the shader
- // _vertices contains 4 vertices with 5 coordinates.
- // 3 for (xyz) for the vertices and 2 for the texture
- glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
- * sizeof(GLfloat), &_vertices[3]);
- checkGlError("glVertexAttribPointer maTextureHandle");
- glEnableVertexAttribArray(textureHandle);
- checkGlError("glEnableVertexAttribArray textureHandle");
-
- glUseProgram(_program);
- int i = glGetUniformLocation(_program, "Ytex");
- checkGlError("glGetUniformLocation");
- glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
- checkGlError("glUniform1i Ytex");
-
- i = glGetUniformLocation(_program, "Utex");
- checkGlError("glGetUniformLocation Utex");
- glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
- checkGlError("glUniform1i Utex");
-
- i = glGetUniformLocation(_program, "Vtex");
- checkGlError("glGetUniformLocation");
- glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
- checkGlError("glUniform1i");
-
- glViewport(0, 0, width, height);
- checkGlError("glViewport");
- return 0;
-}
-
-// SetCoordinates
-// Sets the coordinates where the stream shall be rendered.
-// Values must be between 0 and 1.
-int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- if ((top > 1 || top < 0) || (right > 1 || right < 0) ||
- (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Wrong coordinates", __FUNCTION__);
- return -1;
- }
-
- // X, Y, Z, U, V
- // -1, -1, 0, 0, 1, // Bottom Left
- // 1, -1, 0, 1, 1, //Bottom Right
- // 1, 1, 0, 1, 0, //Top Right
- // -1, 1, 0, 0, 0 //Top Left
-
- // Bottom Left
- _vertices[0] = (left * 2) - 1;
- _vertices[1] = -1 * (2 * bottom) + 1;
- _vertices[2] = zOrder;
-
- //Bottom Right
- _vertices[5] = (right * 2) - 1;
- _vertices[6] = -1 * (2 * bottom) + 1;
- _vertices[7] = zOrder;
-
- //Top Right
- _vertices[10] = (right * 2) - 1;
- _vertices[11] = -1 * (2 * top) + 1;
- _vertices[12] = zOrder;
-
- //Top Left
- _vertices[15] = (left * 2) - 1;
- _vertices[16] = -1 * (2 * top) + 1;
- _vertices[17] = zOrder;
-
- return 0;
-}
-
-int32_t VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) {
- if (frameToRender.IsZeroSize()) {
- return -1;
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
- __FUNCTION__, (int) _id);
-
- glUseProgram(_program);
- checkGlError("glUseProgram");
-
- if (_textureWidth != (GLsizei) frameToRender.width() ||
- _textureHeight != (GLsizei) frameToRender.height()) {
- SetupTextures(frameToRender);
- }
- UpdateTextures(frameToRender);
-
- glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
- checkGlError("glDrawArrays");
-
- return 0;
-}
-
-GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
- const char* pSource) {
- GLuint shader = glCreateShader(shaderType);
- if (shader) {
- glShaderSource(shader, 1, &pSource, NULL);
- glCompileShader(shader);
- GLint compiled = 0;
- glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
- if (!compiled) {
- GLint infoLen = 0;
- glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
- if (infoLen) {
- char* buf = (char*) malloc(infoLen);
- if (buf) {
- glGetShaderInfoLog(shader, infoLen, NULL, buf);
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not compile shader %d: %s",
- __FUNCTION__, shaderType, buf);
- free(buf);
- }
- glDeleteShader(shader);
- shader = 0;
- }
- }
- }
- return shader;
-}
-
-GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
- const char* pFragmentSource) {
- GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
- if (!vertexShader) {
- return 0;
- }
-
- GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
- if (!pixelShader) {
- return 0;
- }
-
- GLuint program = glCreateProgram();
- if (program) {
- glAttachShader(program, vertexShader);
- checkGlError("glAttachShader");
- glAttachShader(program, pixelShader);
- checkGlError("glAttachShader");
- glLinkProgram(program);
- GLint linkStatus = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
- if (linkStatus != GL_TRUE) {
- GLint bufLength = 0;
- glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
- if (bufLength) {
- char* buf = (char*) malloc(bufLength);
- if (buf) {
- glGetProgramInfoLog(program, bufLength, NULL, buf);
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Could not link program: %s",
- __FUNCTION__, buf);
- free(buf);
- }
- }
- glDeleteProgram(program);
- program = 0;
- }
- }
- return program;
-}
-
-void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) {
- const char *v = (const char *) glGetString(s);
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
- name, v);
-}
-
-void VideoRenderOpenGles20::checkGlError(const char* op) {
-#ifdef ANDROID_LOG
- for (GLint error = glGetError(); error; error = glGetError()) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "after %s() glError (0x%x)\n", op, error);
- }
-#else
- return;
-#endif
-}
-
-static void InitializeTexture(int name, int id, int width, int height) {
- glActiveTexture(name);
- glBindTexture(GL_TEXTURE_2D, id);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
-}
-
-void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
- "%s: width %d, height %d", __FUNCTION__,
- frameToRender.width(), frameToRender.height());
-
- const GLsizei width = frameToRender.width();
- const GLsizei height = frameToRender.height();
-
- glGenTextures(3, _textureIds); //Generate the Y, U and V texture
- InitializeTexture(GL_TEXTURE0, _textureIds[0], width, height);
- InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2);
- InitializeTexture(GL_TEXTURE2, _textureIds[2], width / 2, height / 2);
-
- checkGlError("SetupTextures");
-
- _textureWidth = width;
- _textureHeight = height;
-}
-
-// Uploads a plane of pixel data, accounting for stride != width*bpp.
-static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride,
- const uint8_t* plane) {
- if (stride == width) {
- // Yay! We can upload the entire plane in a single GL call.
- glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- static_cast<const GLvoid*>(plane));
- } else {
- // Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and Android doesn't
- // have GL_EXT_unpack_subimage we have to upload a row at a time. Ick.
- for (int row = 0; row < height; ++row) {
- glTexSubImage2D(GL_TEXTURE_2D, 0, 0, row, width, 1, GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- static_cast<const GLvoid*>(plane + (row * stride)));
- }
- }
-}
-
-void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) {
- const GLsizei width = frameToRender.width();
- const GLsizei height = frameToRender.height();
-
- glActiveTexture(GL_TEXTURE0);
- glBindTexture(GL_TEXTURE_2D, _textureIds[0]);
- GlTexSubImage2D(width, height, frameToRender.stride(kYPlane),
- frameToRender.buffer(kYPlane));
-
- glActiveTexture(GL_TEXTURE1);
- glBindTexture(GL_TEXTURE_2D, _textureIds[1]);
- GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane),
- frameToRender.buffer(kUPlane));
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, _textureIds[2]);
- GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kVPlane),
- frameToRender.buffer(kVPlane));
-
- checkGlError("UpdateTextures");
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.h b/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.h
deleted file mode 100644
index 57e2a10d42e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_opengles20.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#include <GLES2/gl2.h>
-#include <GLES2/gl2ext.h>
-
-namespace webrtc
-{
-
-class VideoRenderOpenGles20 {
- public:
- VideoRenderOpenGles20(int32_t id);
- ~VideoRenderOpenGles20();
-
- int32_t Setup(int32_t widht, int32_t height);
- int32_t Render(const VideoFrame& frameToRender);
- int32_t SetCoordinates(int32_t zOrder, const float left, const float top,
- const float right, const float bottom);
-
- private:
- void printGLString(const char *name, GLenum s);
- void checkGlError(const char* op);
- GLuint loadShader(GLenum shaderType, const char* pSource);
- GLuint createProgram(const char* pVertexSource,
- const char* pFragmentSource);
- void SetupTextures(const VideoFrame& frameToRender);
- void UpdateTextures(const VideoFrame& frameToRender);
-
- int32_t _id;
- GLuint _textureIds[3]; // Texture id of Y,U and V texture.
- GLuint _program;
- GLsizei _textureWidth;
- GLsizei _textureHeight;
-
- GLfloat _vertices[20];
- static const char g_indices[];
-
- static const char g_vertextShader[];
- static const char g_fragmentShader[];
-
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.cc b/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.cc
deleted file mode 100644
index 58df07875ec..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.cc
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/external/video_render_external_impl.h"
-
-namespace webrtc {
-
-VideoRenderExternalImpl::VideoRenderExternalImpl(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
- _fullscreen(fullscreen)
-{
-}
-
-VideoRenderExternalImpl::~VideoRenderExternalImpl()
-{
- delete &_critSect;
-}
-
-int32_t VideoRenderExternalImpl::Init()
-{
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::ChangeWindow(void* window)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-VideoRenderCallback*
-VideoRenderExternalImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return this;
-}
-
-int32_t VideoRenderExternalImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- CriticalSectionScoped cs(&_critSect);
-
- zOrder = 0;
- left = 0;
- top = 0;
- right = 0;
- bottom = 0;
-
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::StartRender()
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::StopRender()
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-VideoRenderType VideoRenderExternalImpl::RenderType()
-{
- return kRenderExternal;
-}
-
-RawVideoType VideoRenderExternalImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool VideoRenderExternalImpl::FullScreen()
-{
- CriticalSectionScoped cs(&_critSect);
- return _fullscreen;
-}
-
-int32_t VideoRenderExternalImpl::GetGraphicsMemory(
- uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const
-{
- totalGraphicsMemory = 0;
- availableGraphicsMemory = 0;
- return -1;
-}
-
-int32_t VideoRenderExternalImpl::GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_critSect);
- screenWidth = 0;
- screenHeight = 0;
- return 0;
-}
-
-uint32_t VideoRenderExternalImpl::RenderFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::SetStreamCropping(
- const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::SetTransparentBackground(
- const bool enable)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- return 0;
-}
-
-// VideoRenderCallback
-int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- return 0;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.h b/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.h
deleted file mode 100644
index a8b663fff7e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/external/video_render_external_impl.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
-
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-
-namespace webrtc {
-
-// Class definitions
-class VideoRenderExternalImpl: IVideoRender, public VideoRenderCallback
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderExternalImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen);
-
- virtual ~VideoRenderExternalImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t
- GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey, const float left,
- const float top, const float right,
- const float bottom);
-
- // VideoRenderCallback
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
-private:
- CriticalSectionWrapper& _critSect;
- bool _fullscreen;
-};
-
-} // namespace webrtc
-
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/i_video_render.h b/chromium/third_party/webrtc/modules/video_render/i_video_render.h
deleted file mode 100644
index e6ec7a4680b..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/i_video_render.h
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
-
-#include "webrtc/modules/video_render/video_render.h"
-
-namespace webrtc {
-
-// Class definitions
-class IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- virtual ~IVideoRender() {}
-
- virtual int32_t Init() = 0;
-
- virtual int32_t ChangeWindow(void* window) = 0;
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId) = 0;
-
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const = 0;
- // Implemented in common code?
- //virtual uint32_t GetNumIncomingRenderStreams() const = 0;
- //virtual bool HasIncomingRenderStream(const uint16_t stramId) const = 0;
-
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender() = 0;
-
- virtual int32_t StopRender() = 0;
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
- virtual VideoRenderType RenderType() = 0;
-
- virtual RawVideoType PerferedVideoType() = 0;
-
- virtual bool FullScreen() = 0;
-
- // TODO: This should be treated in platform specific code only
- virtual int32_t
- GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const = 0;
-
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const = 0;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t SetTransparentBackground(const bool enable) = 0;
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float rigth,
- const float bottom) = 0;
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.h b/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.h
deleted file mode 100644
index 880ddb5231f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
-
-#include <OpenGLES/ES2/glext.h>
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-/*
- * This OpenGles20 is the class of renderer for VideoFrame into a GLES 2.0
- * windows used in the VideoRenderIosView class.
- */
-namespace webrtc {
-class OpenGles20 {
- public:
- OpenGles20();
- ~OpenGles20();
-
- bool Setup(int32_t width, int32_t height);
- bool Render(const VideoFrame& frame);
-
- // SetCoordinates
- // Sets the coordinates where the stream shall be rendered.
- // Values must be between 0 and 1.
- bool SetCoordinates(const float z_order,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- private:
- // Compile and load the vertex and fragment shaders defined at the top of
- // open_gles20.mm
- GLuint LoadShader(GLenum shader_type, const char* shader_source);
-
- GLuint CreateProgram(const char* vertex_source, const char* fragment_source);
-
- // Initialize the textures by the frame width and height
- void SetupTextures(const VideoFrame& frame);
-
- // Update the textures by the YUV data from the frame
- void UpdateTextures(const VideoFrame& frame);
-
- GLuint texture_ids_[3]; // Texture id of Y,U and V texture.
- GLuint program_;
- GLsizei texture_width_;
- GLsizei texture_height_;
-
- GLfloat vertices_[20];
- static const char indices_[];
- static const char vertext_shader_[];
- static const char fragment_shader_[];
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm b/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
deleted file mode 100644
index d1735280f26..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
+++ /dev/null
@@ -1,330 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-// This files is mostly copied from
-// webrtc/modules/video_render/android/video_render_opengles20.h
-
-// TODO(sjlee): unify this copy with the android one.
-#include "webrtc/modules/video_render/ios/open_gles20.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-const char OpenGles20::indices_[] = {0, 3, 2, 0, 2, 1};
-
-const char OpenGles20::vertext_shader_[] = {
- "attribute vec4 aPosition;\n"
- "attribute vec2 aTextureCoord;\n"
- "varying vec2 vTextureCoord;\n"
- "void main() {\n"
- " gl_Position = aPosition;\n"
- " vTextureCoord = aTextureCoord;\n"
- "}\n"};
-
-// The fragment shader.
-// Do YUV to RGB565 conversion.
-const char OpenGles20::fragment_shader_[] = {
- "precision mediump float;\n"
- "uniform sampler2D Ytex;\n"
- "uniform sampler2D Utex,Vtex;\n"
- "varying vec2 vTextureCoord;\n"
- "void main(void) {\n"
- " float nx,ny,r,g,b,y,u,v;\n"
- " mediump vec4 txl,ux,vx;"
- " nx=vTextureCoord[0];\n"
- " ny=vTextureCoord[1];\n"
- " y=texture2D(Ytex,vec2(nx,ny)).r;\n"
- " u=texture2D(Utex,vec2(nx,ny)).r;\n"
- " v=texture2D(Vtex,vec2(nx,ny)).r;\n"
- " y=1.1643*(y-0.0625);\n"
- " u=u-0.5;\n"
- " v=v-0.5;\n"
- " r=y+1.5958*v;\n"
- " g=y-0.39173*u-0.81290*v;\n"
- " b=y+2.017*u;\n"
- " gl_FragColor=vec4(r,g,b,1.0);\n"
- "}\n"};
-
-OpenGles20::OpenGles20() : texture_width_(-1), texture_height_(-1) {
- texture_ids_[0] = 0;
- texture_ids_[1] = 0;
- texture_ids_[2] = 0;
-
- program_ = 0;
-
- const GLfloat vertices[20] = {
- // X, Y, Z, U, V
- -1, -1, 0, 0, 1, // Bottom Left
- 1, -1, 0, 1, 1, // Bottom Right
- 1, 1, 0, 1, 0, // Top Right
- -1, 1, 0, 0, 0}; // Top Left
-
- memcpy(vertices_, vertices, sizeof(vertices_));
-}
-
-OpenGles20::~OpenGles20() {
- if (program_) {
- glDeleteTextures(3, texture_ids_);
- glDeleteProgram(program_);
- }
-}
-
-bool OpenGles20::Setup(int32_t width, int32_t height) {
- program_ = CreateProgram(vertext_shader_, fragment_shader_);
- if (!program_) {
- return false;
- }
-
- int position_handle = glGetAttribLocation(program_, "aPosition");
- int texture_handle = glGetAttribLocation(program_, "aTextureCoord");
-
- // set the vertices array in the shader
- // vertices_ contains 4 vertices with 5 coordinates.
- // 3 for (xyz) for the vertices and 2 for the texture
- glVertexAttribPointer(
- position_handle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), vertices_);
-
- glEnableVertexAttribArray(position_handle);
-
- // set the texture coordinate array in the shader
- // vertices_ contains 4 vertices with 5 coordinates.
- // 3 for (xyz) for the vertices and 2 for the texture
- glVertexAttribPointer(
- texture_handle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &vertices_[3]);
- glEnableVertexAttribArray(texture_handle);
-
- glUseProgram(program_);
- int i = glGetUniformLocation(program_, "Ytex");
- glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
-
- i = glGetUniformLocation(program_, "Utex");
- glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
-
- i = glGetUniformLocation(program_, "Vtex");
- glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
-
- glViewport(0, 0, width, height);
- return true;
-}
-
-bool OpenGles20::SetCoordinates(const float z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- if (top > 1 || top < 0 || right > 1 || right < 0 || bottom > 1 ||
- bottom < 0 || left > 1 || left < 0) {
- return false;
- }
-
- // Bottom Left
- vertices_[0] = (left * 2) - 1;
- vertices_[1] = -1 * (2 * bottom) + 1;
- vertices_[2] = z_order;
-
- // Bottom Right
- vertices_[5] = (right * 2) - 1;
- vertices_[6] = -1 * (2 * bottom) + 1;
- vertices_[7] = z_order;
-
- // Top Right
- vertices_[10] = (right * 2) - 1;
- vertices_[11] = -1 * (2 * top) + 1;
- vertices_[12] = z_order;
-
- // Top Left
- vertices_[15] = (left * 2) - 1;
- vertices_[16] = -1 * (2 * top) + 1;
- vertices_[17] = z_order;
-
- return true;
-}
-
-bool OpenGles20::Render(const VideoFrame& frame) {
- if (texture_width_ != (GLsizei)frame.width() ||
- texture_height_ != (GLsizei)frame.height()) {
- SetupTextures(frame);
- }
- UpdateTextures(frame);
-
- glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, indices_);
-
- return true;
-}
-
-GLuint OpenGles20::LoadShader(GLenum shader_type, const char* shader_source) {
- GLuint shader = glCreateShader(shader_type);
- if (shader) {
- glShaderSource(shader, 1, &shader_source, NULL);
- glCompileShader(shader);
-
- GLint compiled = 0;
- glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
- if (!compiled) {
- GLint info_len = 0;
- glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_len);
- if (info_len) {
- char* buf = (char*)malloc(info_len);
- glGetShaderInfoLog(shader, info_len, NULL, buf);
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- 0,
- "%s: Could not compile shader %d: %s",
- __FUNCTION__,
- shader_type,
- buf);
- free(buf);
- }
- glDeleteShader(shader);
- shader = 0;
- }
- }
- return shader;
-}
-
-GLuint OpenGles20::CreateProgram(const char* vertex_source,
- const char* fragment_source) {
- GLuint vertex_shader = LoadShader(GL_VERTEX_SHADER, vertex_source);
- if (!vertex_shader) {
- return -1;
- }
-
- GLuint fragment_shader = LoadShader(GL_FRAGMENT_SHADER, fragment_source);
- if (!fragment_shader) {
- return -1;
- }
-
- GLuint program = glCreateProgram();
- if (program) {
- glAttachShader(program, vertex_shader);
- glAttachShader(program, fragment_shader);
- glLinkProgram(program);
- GLint link_status = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &link_status);
- if (link_status != GL_TRUE) {
- GLint info_len = 0;
- glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_len);
- if (info_len) {
- char* buf = (char*)malloc(info_len);
- glGetProgramInfoLog(program, info_len, NULL, buf);
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- 0,
- "%s: Could not link program: %s",
- __FUNCTION__,
- buf);
- free(buf);
- }
- glDeleteProgram(program);
- program = 0;
- }
- }
-
- if (vertex_shader) {
- glDeleteShader(vertex_shader);
- }
-
- if (fragment_shader) {
- glDeleteShader(fragment_shader);
- }
-
- return program;
-}
-
-static void InitializeTexture(int name, int id, int width, int height) {
- glActiveTexture(name);
- glBindTexture(GL_TEXTURE_2D, id);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- GL_LUMINANCE,
- width,
- height,
- 0,
- GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- NULL);
-}
-
-void OpenGles20::SetupTextures(const VideoFrame& frame) {
- const GLsizei width = frame.width();
- const GLsizei height = frame.height();
-
- if (!texture_ids_[0]) {
- glGenTextures(3, texture_ids_); // Generate the Y, U and V texture
- }
-
- InitializeTexture(GL_TEXTURE0, texture_ids_[0], width, height);
- InitializeTexture(GL_TEXTURE1, texture_ids_[1], width / 2, height / 2);
- InitializeTexture(GL_TEXTURE2, texture_ids_[2], width / 2, height / 2);
-
- texture_width_ = width;
- texture_height_ = height;
-}
-
-// Uploads a plane of pixel data, accounting for stride != width*bpp.
-static void GlTexSubImage2D(GLsizei width,
- GLsizei height,
- int stride,
- const uint8_t* plane) {
- if (stride == width) {
- // Yay! We can upload the entire plane in a single GL call.
- glTexSubImage2D(GL_TEXTURE_2D,
- 0,
- 0,
- 0,
- width,
- height,
- GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- static_cast<const GLvoid*>(plane));
- } else {
- // Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and iOS doesn't
- // have GL_EXT_unpack_subimage we have to upload a row at a time. Ick.
- for (int row = 0; row < height; ++row) {
- glTexSubImage2D(GL_TEXTURE_2D,
- 0,
- 0,
- row,
- width,
- 1,
- GL_LUMINANCE,
- GL_UNSIGNED_BYTE,
- static_cast<const GLvoid*>(plane + (row * stride)));
- }
- }
-}
-
-void OpenGles20::UpdateTextures(const VideoFrame& frame) {
- const GLsizei width = frame.width();
- const GLsizei height = frame.height();
-
- glActiveTexture(GL_TEXTURE0);
- glBindTexture(GL_TEXTURE_2D, texture_ids_[0]);
- GlTexSubImage2D(width, height, frame.stride(kYPlane), frame.buffer(kYPlane));
-
- glActiveTexture(GL_TEXTURE1);
- glBindTexture(GL_TEXTURE_2D, texture_ids_[1]);
- GlTexSubImage2D(
- width / 2, height / 2, frame.stride(kUPlane), frame.buffer(kUPlane));
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, texture_ids_[2]);
- GlTexSubImage2D(
- width / 2, height / 2, frame.stride(kVPlane), frame.buffer(kVPlane));
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.h
deleted file mode 100644
index a15ba393dc0..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
-
-namespace webrtc {
-
-class VideoRenderIosGles20;
-
-class VideoRenderIosChannel : public VideoRenderCallback {
- public:
- explicit VideoRenderIosChannel(VideoRenderIosView* view);
- virtual ~VideoRenderIosChannel();
-
- // Implementation of VideoRenderCallback.
- int32_t RenderFrame(const uint32_t stream_id,
- const VideoFrame& video_frame) override;
-
- int SetStreamSettings(const float z_order,
- const float left,
- const float top,
- const float right,
- const float bottom);
- bool IsUpdated();
- bool RenderOffScreenBuffer();
-
- private:
- VideoRenderIosView* view_;
- VideoFrame* current_frame_;
- bool buffer_is_updated_;
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
deleted file mode 100644
index b2b15857f93..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
-
-using namespace webrtc;
-
-VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view)
- : view_(view), current_frame_(new VideoFrame()), buffer_is_updated_(false) {
-}
-
-VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
-
-int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
- const VideoFrame& video_frame) {
- current_frame_->CopyFrame(video_frame);
- current_frame_->set_render_time_ms(0);
- buffer_is_updated_ = true;
-
- return 0;
-}
-
-bool VideoRenderIosChannel::RenderOffScreenBuffer() {
- if (![view_ renderFrame:current_frame_]) {
- return false;
- }
-
- buffer_is_updated_ = false;
-
- return true;
-}
-
-bool VideoRenderIosChannel::IsUpdated() { return buffer_is_updated_; }
-
-int VideoRenderIosChannel::SetStreamSettings(const float z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- if (![view_ setCoordinatesForZOrder:z_order
- Left:left
- Top:bottom
- Right:right
- Bottom:top]) {
-
- return -1;
- }
-
- return 0;
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.h
deleted file mode 100644
index d4e04e79d73..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
-
-#include <list>
-#include <map>
-#include <memory>
-
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
-#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class EventTimerWrapper;
-
-class VideoRenderIosGles20 {
- public:
- VideoRenderIosGles20(VideoRenderIosView* view,
- bool full_screen,
- int render_id);
- virtual ~VideoRenderIosGles20();
-
- int Init();
- VideoRenderIosChannel* CreateEaglChannel(int channel,
- int z_order,
- float left,
- float top,
- float right,
- float bottom);
- int DeleteEaglChannel(int channel);
- bool HasChannel(int channel);
- bool ScreenUpdateProcess();
- int GetWindowRect(Rect& rect); // NOLINT
-
- int GetScreenResolution(uint& screen_width, uint& screen_height); // NOLINT
- int SetStreamCropping(const uint stream_id,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- int ChangeWindow(void* new_window);
- int StartRender();
- int StopRender();
-
- protected:
- static bool ScreenUpdateThreadProc(void* obj);
-
- private:
- bool RenderOffScreenBuffers();
- int SwapAndDisplayBuffers();
-
- private:
- std::unique_ptr<CriticalSectionWrapper> gles_crit_sec_;
- EventTimerWrapper* screen_update_event_;
- // TODO(pbos): Remove unique_ptr and use member directly.
- std::unique_ptr<rtc::PlatformThread> screen_update_thread_;
-
- VideoRenderIosView* view_;
- Rect window_rect_;
- int window_width_;
- int window_height_;
- bool is_full_screen_;
- GLint backing_width_;
- GLint backing_height_;
- GLuint view_renderbuffer_;
- GLuint view_framebuffer_;
- GLuint depth_renderbuffer_;
- std::map<int, VideoRenderIosChannel*> agl_channels_;
- std::multimap<int, int> z_order_to_channel_;
- EAGLContext* gles_context_;
- bool is_rendering_;
-};
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
deleted file mode 100644
index 6ad5db8b8cb..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
+++ /dev/null
@@ -1,285 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-
-using namespace webrtc;
-
-VideoRenderIosGles20::VideoRenderIosGles20(VideoRenderIosView* view,
- bool full_screen,
- int render_id)
- : gles_crit_sec_(CriticalSectionWrapper::CreateCriticalSection()),
- screen_update_event_(0),
- view_(view),
- window_rect_(),
- window_width_(0),
- window_height_(0),
- is_full_screen_(full_screen),
- agl_channels_(),
- z_order_to_channel_(),
- gles_context_([view context]),
- is_rendering_(true) {
- screen_update_thread_.reset(new rtc::PlatformThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateGles20"));
- screen_update_event_ = EventTimerWrapper::Create();
- GetWindowRect(window_rect_);
-}
-
-VideoRenderIosGles20::~VideoRenderIosGles20() {
- // Signal event to exit thread, then delete it
- rtc::PlatformThread* thread_wrapper = screen_update_thread_.release();
-
- if (thread_wrapper) {
- screen_update_event_->Set();
- screen_update_event_->StopTimer();
-
- thread_wrapper->Stop();
- delete thread_wrapper;
- delete screen_update_event_;
- screen_update_event_ = NULL;
- is_rendering_ = FALSE;
- }
-
- // Delete all channels
- std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
- while (it != agl_channels_.end()) {
- delete it->second;
- agl_channels_.erase(it);
- it = agl_channels_.begin();
- }
- agl_channels_.clear();
-
- // Clean the zOrder map
- std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
- while (z_it != z_order_to_channel_.end()) {
- z_order_to_channel_.erase(z_it);
- z_it = z_order_to_channel_.begin();
- }
- z_order_to_channel_.clear();
-}
-
-int VideoRenderIosGles20::Init() {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- if (!view_) {
- view_ = [[VideoRenderIosView alloc] init];
- }
-
- if (![view_ createContext]) {
- return -1;
- }
-
- screen_update_thread_->Start();
- screen_update_thread_->SetPriority(rtc::kRealtimePriority);
-
- // Start the event triggering the render process
- unsigned int monitor_freq = 60;
- screen_update_event_->StartTimer(true, 1000 / monitor_freq);
-
- window_width_ = window_rect_.right - window_rect_.left;
- window_height_ = window_rect_.bottom - window_rect_.top;
-
- return 0;
-}
-
-VideoRenderIosChannel* VideoRenderIosGles20::CreateEaglChannel(int channel,
- int z_order,
- float left,
- float top,
- float right,
- float bottom) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- if (HasChannel(channel)) {
- return NULL;
- }
-
- VideoRenderIosChannel* new_eagl_channel = new VideoRenderIosChannel(view_);
-
- if (new_eagl_channel->SetStreamSettings(z_order, left, top, right, bottom) ==
- -1) {
- return NULL;
- }
-
- agl_channels_[channel] = new_eagl_channel;
- z_order_to_channel_.insert(std::pair<int, int>(z_order, channel));
-
- return new_eagl_channel;
-}
-
-int VideoRenderIosGles20::DeleteEaglChannel(int channel) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- std::map<int, VideoRenderIosChannel*>::iterator it;
- it = agl_channels_.find(channel);
- if (it != agl_channels_.end()) {
- delete it->second;
- agl_channels_.erase(it);
- } else {
- return -1;
- }
-
- std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
- while (z_it != z_order_to_channel_.end()) {
- if (z_it->second == channel) {
- z_order_to_channel_.erase(z_it);
- break;
- }
- z_it++;
- }
-
- return 0;
-}
-
-bool VideoRenderIosGles20::HasChannel(int channel) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- std::map<int, VideoRenderIosChannel*>::iterator it =
- agl_channels_.find(channel);
-
- if (it != agl_channels_.end()) {
- return true;
- }
-
- return false;
-}
-
-// Rendering process
-bool VideoRenderIosGles20::ScreenUpdateThreadProc(void* obj) {
- return static_cast<VideoRenderIosGles20*>(obj)->ScreenUpdateProcess();
-}
-
-bool VideoRenderIosGles20::ScreenUpdateProcess() {
- screen_update_event_->Wait(100);
-
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- if (!is_rendering_) {
- return false;
- }
-
- if (!screen_update_thread_) {
- return false;
- }
-
- if (GetWindowRect(window_rect_) == -1) {
- return true;
- }
-
- if (window_width_ != (window_rect_.right - window_rect_.left) ||
- window_height_ != (window_rect_.bottom - window_rect_.top)) {
- window_width_ = window_rect_.right - window_rect_.left;
- window_height_ = window_rect_.bottom - window_rect_.top;
- }
-
- // Check if there are any updated buffers
- bool updated = false;
-
- std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
- while (it != agl_channels_.end()) {
- VideoRenderIosChannel* agl_channel = it->second;
-
- updated = agl_channel->IsUpdated();
- if (updated) {
- break;
- }
- it++;
- }
-
- if (updated) {
- // At least one buffer has been updated, we need to repaint the texture
- // Loop through all channels starting highest zOrder ending with lowest.
- for (std::multimap<int, int>::reverse_iterator r_it =
- z_order_to_channel_.rbegin();
- r_it != z_order_to_channel_.rend();
- r_it++) {
- int channel_id = r_it->second;
- std::map<int, VideoRenderIosChannel*>::iterator it =
- agl_channels_.find(channel_id);
-
- VideoRenderIosChannel* agl_channel = it->second;
-
- agl_channel->RenderOffScreenBuffer();
- }
-
- [view_ presentFramebuffer];
- }
-
- return true;
-}
-
-int VideoRenderIosGles20::GetWindowRect(Rect& rect) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- if (!view_) {
- return -1;
- }
-
- CGRect bounds = [view_ bounds];
- rect.top = bounds.origin.y;
- rect.left = bounds.origin.x;
- rect.bottom = bounds.size.height + bounds.origin.y;
- rect.right = bounds.size.width + bounds.origin.x;
-
- return 0;
-}
-
-int VideoRenderIosGles20::ChangeWindow(void* new_window) {
- CriticalSectionScoped cs(gles_crit_sec_.get());
-
- view_ = (__bridge VideoRenderIosView*)new_window;
-
- return 0;
-}
-
-int VideoRenderIosGles20::StartRender() {
- is_rendering_ = true;
- return 0;
-}
-
-int VideoRenderIosGles20::StopRender() {
- is_rendering_ = false;
- return 0;
-}
-
-int VideoRenderIosGles20::GetScreenResolution(uint& screen_width,
- uint& screen_height) {
- screen_width = [view_ bounds].size.width;
- screen_height = [view_ bounds].size.height;
- return 0;
-}
-
-int VideoRenderIosGles20::SetStreamCropping(const uint stream_id,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- // Check if there are any updated buffers
- // bool updated = false;
- uint counter = 0;
-
- std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
- while (it != agl_channels_.end()) {
- if (counter == stream_id) {
- VideoRenderIosChannel* agl_channel = it->second;
- agl_channel->SetStreamSettings(0, left, top, right, bottom);
- }
- counter++;
- it++;
- }
-
- return 0;
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
deleted file mode 100644
index 04a74933008..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
-
-#include <list>
-#include <map>
-#include <memory>
-
-#include "webrtc/modules/video_render/i_video_render.h"
-
-namespace webrtc {
-
-class VideoRenderIosGles20;
-class CriticalSectionWrapper;
-
-class VideoRenderIosImpl : IVideoRender {
- public:
- explicit VideoRenderIosImpl(const int32_t id,
- void* window,
- const bool full_screen);
-
- ~VideoRenderIosImpl();
-
- // Implementation of IVideoRender.
- int32_t Init() override;
- int32_t ChangeWindow(void* window) override;
-
- VideoRenderCallback* AddIncomingRenderStream(const uint32_t stream_id,
- const uint32_t z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) override;
-
- int32_t DeleteIncomingRenderStream(const uint32_t stream_id) override;
-
- int32_t GetIncomingRenderStreamProperties(const uint32_t stream_id,
- uint32_t& z_order,
- float& left,
- float& top,
- float& right,
- float& bottom) const override;
-
- int32_t StartRender() override;
- int32_t StopRender() override;
-
- VideoRenderType RenderType() override;
- RawVideoType PerferedVideoType() override;
- bool FullScreen() override;
- int32_t GetGraphicsMemory(
- uint64_t& total_graphics_memory,
- uint64_t& available_graphics_memory) const override; // NOLINT
- int32_t GetScreenResolution(
- uint32_t& screen_width,
- uint32_t& screen_height) const override; // NOLINT
- uint32_t RenderFrameRate(const uint32_t stream_id);
- int32_t SetStreamCropping(const uint32_t stream_id,
- const float left,
- const float top,
- const float right,
- const float bottom) override;
- int32_t ConfigureRenderer(const uint32_t stream_id,
- const unsigned int z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) override;
- int32_t SetTransparentBackground(const bool enable) override;
- int32_t SetText(const uint8_t text_id,
- const uint8_t* text,
- const int32_t text_length,
- const uint32_t text_color_ref,
- const uint32_t background_color_ref,
- const float left,
- const float top,
- const float right,
- const float bottom) override;
- int32_t SetBitmap(const void* bit_map,
- const uint8_t picture_id,
- const void* color_key,
- const float left,
- const float top,
- const float right,
- const float bottom);
- int32_t FullScreenRender(void* window, const bool enable);
-
- private:
- int32_t id_;
- void* ptr_window_;
- bool full_screen_;
-
- CriticalSectionWrapper* crit_sec_;
- std::unique_ptr<VideoRenderIosGles20> ptr_ios_render_;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
deleted file mode 100644
index 0ef411d56f8..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
-#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-#define IOS_UNSUPPORTED() \
- WEBRTC_TRACE(kTraceError, \
- kTraceVideoRenderer, \
- id_, \
- "%s is not supported on the iOS platform.", \
- __FUNCTION__); \
- return -1;
-
-VideoRenderIosImpl::VideoRenderIosImpl(const int32_t id,
- void* window,
- const bool full_screen)
- : id_(id),
- ptr_window_(window),
- full_screen_(full_screen),
- crit_sec_(CriticalSectionWrapper::CreateCriticalSection()) {}
-
-VideoRenderIosImpl::~VideoRenderIosImpl() {
- delete crit_sec_;
-}
-
-int32_t VideoRenderIosImpl::Init() {
- CriticalSectionScoped cs(crit_sec_);
-
- ptr_ios_render_.reset(new VideoRenderIosGles20(
- (__bridge VideoRenderIosView*)ptr_window_, full_screen_, id_));
-
- return ptr_ios_render_->Init();
- ;
-}
-
-int32_t VideoRenderIosImpl::ChangeWindow(void* window) {
- CriticalSectionScoped cs(crit_sec_);
- if (window == NULL) {
- return -1;
- }
-
- ptr_window_ = window;
-
- return ptr_ios_render_->ChangeWindow(ptr_window_);
-}
-
-VideoRenderCallback* VideoRenderIosImpl::AddIncomingRenderStream(
- const uint32_t stream_id,
- const uint32_t z_order,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- CriticalSectionScoped cs(crit_sec_);
- if (!ptr_window_) {
- return NULL;
- }
-
- return ptr_ios_render_->CreateEaglChannel(
- stream_id, z_order, left, top, right, bottom);
-}
-
-int32_t VideoRenderIosImpl::DeleteIncomingRenderStream(
- const uint32_t stream_id) {
- CriticalSectionScoped cs(crit_sec_);
-
- return ptr_ios_render_->DeleteEaglChannel(stream_id);
-}
-
-int32_t VideoRenderIosImpl::GetIncomingRenderStreamProperties(
- const uint32_t stream_id,
- uint32_t& z_order,
- float& left,
- float& top,
- float& right,
- float& bottom) const {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::StartRender() {
- return ptr_ios_render_->StartRender();
-}
-
-int32_t VideoRenderIosImpl::StopRender() {
- return ptr_ios_render_->StopRender();
-}
-
-VideoRenderType VideoRenderIosImpl::RenderType() { return kRenderiOS; }
-
-RawVideoType VideoRenderIosImpl::PerferedVideoType() { return kVideoI420; }
-
-bool VideoRenderIosImpl::FullScreen() { IOS_UNSUPPORTED(); }
-
-int32_t VideoRenderIosImpl::GetGraphicsMemory(
- uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const {
- return ptr_ios_render_->GetScreenResolution(screenWidth, screenHeight);
-}
-
-uint32_t VideoRenderIosImpl::RenderFrameRate(const uint32_t streamId) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- return ptr_ios_render_->SetStreamCropping(streamId, left, top, right, bottom);
-}
-
-int32_t VideoRenderIosImpl::ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::SetTransparentBackground(const bool enable) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom) {
- IOS_UNSUPPORTED();
-}
-
-int32_t VideoRenderIosImpl::FullScreenRender(void* window, const bool enable) {
- IOS_UNSUPPORTED();
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
deleted file mode 100644
index d110bc78bd2..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
-
-#import <UIKit/UIKit.h>
-#import <QuartzCore/QuartzCore.h>
-
-#include "webrtc/modules/video_render/ios/open_gles20.h"
-
-@interface VideoRenderIosView : UIView
-
-- (BOOL)createContext;
-- (BOOL)presentFramebuffer;
-- (BOOL)renderFrame:(webrtc::VideoFrame*)frameToRender;
-- (BOOL)setCoordinatesForZOrder:(const float)zOrder
- Left:(const float)left
- Top:(const float)top
- Right:(const float)right
- Bottom:(const float)bottom;
-
-@property(nonatomic, retain) EAGLContext* context;
-
-@end
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
deleted file mode 100644
index b106ffa5c4f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#include <memory>
-
-#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-@implementation VideoRenderIosView {
- EAGLContext* _context;
- std::unique_ptr<webrtc::OpenGles20> _gles_renderer20;
- int _frameBufferWidth;
- int _frameBufferHeight;
- unsigned int _defaultFrameBuffer;
- unsigned int _colorRenderBuffer;
-}
-
-@synthesize context = context_;
-
-+ (Class)layerClass {
- return [CAEAGLLayer class];
-}
-
-- (id)initWithCoder:(NSCoder*)coder {
- // init super class
- self = [super initWithCoder:coder];
- if (self) {
- _gles_renderer20.reset(new OpenGles20());
- }
- return self;
-}
-
-- (id)init {
- // init super class
- self = [super init];
- if (self) {
- _gles_renderer20.reset(new OpenGles20());
- }
- return self;
-}
-
-- (id)initWithFrame:(CGRect)frame {
- // init super class
- self = [super initWithFrame:frame];
- if (self) {
- _gles_renderer20.reset(new OpenGles20());
- }
- return self;
-}
-
-- (void)dealloc {
- if (_defaultFrameBuffer) {
- glDeleteFramebuffers(1, &_defaultFrameBuffer);
- _defaultFrameBuffer = 0;
- }
-
- if (_colorRenderBuffer) {
- glDeleteRenderbuffers(1, &_colorRenderBuffer);
- _colorRenderBuffer = 0;
- }
-
- [EAGLContext setCurrentContext:nil];
-}
-
-- (NSString*)description {
- return [NSString stringWithFormat:
- @"A WebRTC implemented subclass of UIView."
- "+Class method is overwritten, along with custom methods"];
-}
-
-- (BOOL)createContext {
- // create OpenGLES context from self layer class
- CAEAGLLayer* eagl_layer = (CAEAGLLayer*)self.layer;
- eagl_layer.opaque = YES;
- eagl_layer.drawableProperties =
- [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO],
- kEAGLDrawablePropertyRetainedBacking,
- kEAGLColorFormatRGBA8,
- kEAGLDrawablePropertyColorFormat,
- nil];
- _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
-
- if (!_context) {
- return NO;
- }
-
- if (![EAGLContext setCurrentContext:_context]) {
- return NO;
- }
-
- // generates and binds the OpenGLES buffers
- glGenFramebuffers(1, &_defaultFrameBuffer);
- glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
-
- // Create color render buffer and allocate backing store.
- glGenRenderbuffers(1, &_colorRenderBuffer);
- glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
- [_context renderbufferStorage:GL_RENDERBUFFER
- fromDrawable:(CAEAGLLayer*)self.layer];
- glGetRenderbufferParameteriv(
- GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_frameBufferWidth);
- glGetRenderbufferParameteriv(
- GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_frameBufferHeight);
- glFramebufferRenderbuffer(GL_FRAMEBUFFER,
- GL_COLOR_ATTACHMENT0,
- GL_RENDERBUFFER,
- _colorRenderBuffer);
-
- if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
- return NO;
- }
-
- // set the frame buffer
- glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
- glViewport(0, 0, self.frame.size.width, self.frame.size.height);
-
- return _gles_renderer20->Setup([self bounds].size.width,
- [self bounds].size.height);
-}
-
-- (BOOL)presentFramebuffer {
- if (![_context presentRenderbuffer:GL_RENDERBUFFER]) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideoRenderer,
- 0,
- "%s:%d [context present_renderbuffer] "
- "returned false",
- __FUNCTION__,
- __LINE__);
- }
- return YES;
-}
-
-- (BOOL)renderFrame:(VideoFrame*)frameToRender {
- if (![EAGLContext setCurrentContext:_context]) {
- return NO;
- }
-
- return _gles_renderer20->Render(*frameToRender);
-}
-
-- (BOOL)setCoordinatesForZOrder:(const float)zOrder
- Left:(const float)left
- Top:(const float)top
- Right:(const float)right
- Bottom:(const float)bottom {
- return _gles_renderer20->SetCoordinates(zOrder, left, top, right, bottom);
-}
-
-@end
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.cc b/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.cc
deleted file mode 100644
index 7e53dfdf809..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.cc
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
-
-#include "webrtc/modules/video_render/linux/video_x11_render.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-#include <X11/Xlib.h>
-
-namespace webrtc {
-
-VideoRenderLinuxImpl::VideoRenderLinuxImpl(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen) :
- _id(id),
- _renderLinuxCritsect(
- *CriticalSectionWrapper::CreateCriticalSection()),
- _ptrWindow(window), _ptrX11Render(NULL)
-{
-}
-
-VideoRenderLinuxImpl::~VideoRenderLinuxImpl()
-{
- if (_ptrX11Render)
- delete _ptrX11Render;
-
- delete &_renderLinuxCritsect;
-}
-
-int32_t VideoRenderLinuxImpl::Init()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
-
- CriticalSectionScoped cs(&_renderLinuxCritsect);
- _ptrX11Render = new VideoX11Render((Window) _ptrWindow);
- if (!_ptrX11Render)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s",
- "Failed to create instance of VideoX11Render object");
- return -1;
- }
- int retVal = _ptrX11Render->Init();
- if (retVal == -1)
- {
- return -1;
- }
-
- return 0;
-
-}
-
-int32_t VideoRenderLinuxImpl::ChangeWindow(void* window)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
-
- CriticalSectionScoped cs(&_renderLinuxCritsect);
- _ptrWindow = window;
-
- if (_ptrX11Render)
- {
- return _ptrX11Render->ChangeWindow((Window) window);
- }
-
- return -1;
-}
-
-VideoRenderCallback* VideoRenderLinuxImpl::AddIncomingRenderStream(
- const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_renderLinuxCritsect);
-
- VideoRenderCallback* renderCallback = NULL;
- if (_ptrX11Render)
- {
- VideoX11Channel* renderChannel =
- _ptrX11Render->CreateX11RenderChannel(streamId, zOrder, left,
- top, right, bottom);
- if (!renderChannel)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "Render channel creation failed for stream id: %d",
- streamId);
- return NULL;
- }
- renderCallback = (VideoRenderCallback *) renderChannel;
- }
- else
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "_ptrX11Render is NULL");
- return NULL;
- }
- return renderCallback;
-}
-
-int32_t VideoRenderLinuxImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_renderLinuxCritsect);
-
- if (_ptrX11Render)
- {
- return _ptrX11Render->DeleteX11RenderChannel(streamId);
- }
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_renderLinuxCritsect);
-
- if (_ptrX11Render)
- {
- return _ptrX11Render->GetIncomingStreamProperties(streamId, zOrder,
- left, top, right,
- bottom);
- }
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::StartRender()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- return 0;
-}
-
-int32_t VideoRenderLinuxImpl::StopRender()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
- __FUNCTION__);
- return 0;
-}
-
-VideoRenderType VideoRenderLinuxImpl::RenderType()
-{
- return kRenderX11;
-}
-
-RawVideoType VideoRenderLinuxImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool VideoRenderLinuxImpl::FullScreen()
-{
- return false;
-}
-
-int32_t VideoRenderLinuxImpl::GetGraphicsMemory(
- uint64_t& /*totalGraphicsMemory*/,
- uint64_t& /*availableGraphicsMemory*/) const
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::GetScreenResolution(
- uint32_t& /*screenWidth*/,
- uint32_t& /*screenHeight*/) const
-{
- return -1;
-}
-
-uint32_t VideoRenderLinuxImpl::RenderFrameRate(const uint32_t /*streamId*/)
-{
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::SetStreamCropping(
- const uint32_t /*streamId*/,
- const float /*left*/,
- const float /*top*/,
- const float /*right*/,
- const float /*bottom*/)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::SetTransparentBackground(const bool /*enable*/)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float rigth,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-int32_t VideoRenderLinuxImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s - not supported on Linux", __FUNCTION__);
- return -1;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.h b/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.h
deleted file mode 100644
index 0e9ae54c18f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_render_linux_impl.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
-
-#include "webrtc/modules/video_render/i_video_render.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-class VideoX11Render;
-
-// Class definitions
-class VideoRenderLinuxImpl: IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderLinuxImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen);
-
- virtual ~VideoRenderLinuxImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t
- GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float rigth, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom);
-
-private:
- int32_t _id;
- CriticalSectionWrapper& _renderLinuxCritsect;
-
- void* _ptrWindow;
-
- // X11 Render
- VideoX11Render* _ptrX11Render;
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.cc b/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.cc
deleted file mode 100644
index 8d86b7c72ad..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.cc
+++ /dev/null
@@ -1,315 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/linux/video_x11_channel.h"
-
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-#define DISP_MAX 128
-
-static Display *dispArray[DISP_MAX];
-static int dispCount = 0;
-
-
-VideoX11Channel::VideoX11Channel(int32_t id) :
- _crit(*CriticalSectionWrapper::CreateCriticalSection()), _display(NULL),
- _shminfo(), _image(NULL), _window(0L), _gc(NULL),
- _width(DEFAULT_RENDER_FRAME_WIDTH),
- _height(DEFAULT_RENDER_FRAME_HEIGHT), _outWidth(0), _outHeight(0),
- _xPos(0), _yPos(0), _prepared(false), _dispCount(0), _buffer(NULL),
- _top(0.0), _left(0.0), _right(0.0), _bottom(0.0),
- _Id(id)
-{
-}
-
-VideoX11Channel::~VideoX11Channel()
-{
- if (_prepared)
- {
- _crit.Enter();
- ReleaseWindow();
- _crit.Leave();
- }
- delete &_crit;
-}
-
-int32_t VideoX11Channel::RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(&_crit);
- if (_width != videoFrame.width() || _height
- != videoFrame.height()) {
- if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
- return -1;
- }
- }
- return DeliverFrame(videoFrame);
-}
-
-int32_t VideoX11Channel::FrameSizeChange(int32_t width,
- int32_t height,
- int32_t /*numberOfStreams */)
-{
- CriticalSectionScoped cs(&_crit);
- if (_prepared)
- {
- RemoveRenderer();
- }
- if (CreateLocalRenderer(width, height) == -1)
- {
- return -1;
- }
-
- return 0;
-}
-
-int32_t VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(&_crit);
- if (!_prepared) {
- return 0;
- }
-
- if (!dispArray[_dispCount]) {
- return -1;
- }
-
- ConvertFromI420(videoFrame, kARGB, 0, _buffer);
-
- // Put image in window.
- XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
- _height, True);
-
- // Very important for the image to update properly!
- XSync(_display, False);
- return 0;
-}
-
-int32_t VideoX11Channel::GetFrameSize(int32_t& width, int32_t& height)
-{
- width = _width;
- height = _height;
-
- return 0;
-}
-
-int32_t VideoX11Channel::Init(Window window, float left, float top,
- float right, float bottom)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_crit);
-
- _window = window;
- _left = left;
- _right = right;
- _top = top;
- _bottom = bottom;
-
- _display = XOpenDisplay(NULL); // Use default display
- if (!_window || !_display)
- {
- return -1;
- }
-
- if (dispCount < DISP_MAX)
- {
- dispArray[dispCount] = _display;
- _dispCount = dispCount;
- dispCount++;
- }
- else
- {
- return -1;
- }
-
- if ((1 < left || left < 0) || (1 < top || top < 0) || (1 < right || right
- < 0) || (1 < bottom || bottom < 0))
- {
- return -1;
- }
-
- // calculate position and size of rendered video
- int x, y;
- unsigned int winWidth, winHeight, borderwidth, depth;
- Window rootret;
- if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
- &winHeight, &borderwidth, &depth) == 0)
- {
- return -1;
- }
-
- _xPos = (int32_t) (winWidth * left);
- _yPos = (int32_t) (winHeight * top);
- _outWidth = (int32_t) (winWidth * (right - left));
- _outHeight = (int32_t) (winHeight * (bottom - top));
- if (_outWidth % 2)
- _outWidth++; // the renderer want's sizes that are multiples of two
- if (_outHeight % 2)
- _outHeight++;
-
- _gc = XCreateGC(_display, _window, 0, 0);
- if (!_gc) {
- // Failed to create the graphics context.
- assert(false);
- return -1;
- }
-
- if (CreateLocalRenderer(winWidth, winHeight) == -1)
- {
- return -1;
- }
- return 0;
-
-}
-
-int32_t VideoX11Channel::ChangeWindow(Window window)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_crit);
-
- // Stop the rendering, if we are rendering...
- RemoveRenderer();
- _window = window;
-
- // calculate position and size of rendered video
- int x, y;
- unsigned int winWidth, winHeight, borderwidth, depth;
- Window rootret;
- if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
- &winHeight, &borderwidth, &depth) == -1)
- {
- return -1;
- }
- _xPos = (int) (winWidth * _left);
- _yPos = (int) (winHeight * _top);
- _outWidth = (int) (winWidth * (_right - _left));
- _outHeight = (int) (winHeight * (_bottom - _top));
- if (_outWidth % 2)
- _outWidth++; // the renderer want's sizes that are multiples of two
- if (_outHeight % 2)
- _outHeight++;
-
- // Prepare rendering using the
- if (CreateLocalRenderer(_width, _height) == -1)
- {
- return -1;
- }
- return 0;
-}
-
-int32_t VideoX11Channel::ReleaseWindow()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_crit);
-
- RemoveRenderer();
- if (_gc) {
- XFreeGC(_display, _gc);
- _gc = NULL;
- }
- if (_display)
- {
- XCloseDisplay(_display);
- _display = NULL;
- }
- return 0;
-}
-
-int32_t VideoX11Channel::CreateLocalRenderer(int32_t width, int32_t height)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
- CriticalSectionScoped cs(&_crit);
-
- if (!_window || !_display)
- {
- return -1;
- }
-
- if (_prepared)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _Id,
- "Renderer already prepared, exits.");
- return -1;
- }
-
- _width = width;
- _height = height;
-
- // create shared memory image
- _image = XShmCreateImage(_display, CopyFromParent, 24, ZPixmap, NULL,
- &_shminfo, _width, _height); // this parameter needs to be the same for some reason.
- _shminfo.shmid = shmget(IPC_PRIVATE, (_image->bytes_per_line
- * _image->height), IPC_CREAT | 0777);
- _shminfo.shmaddr = _image->data = (char*) shmat(_shminfo.shmid, 0, 0);
- if (_image->data == reinterpret_cast<char*>(-1))
- {
- return -1;
- }
- _buffer = (unsigned char*) _image->data;
- _shminfo.readOnly = False;
-
- // attach image to display
- if (!XShmAttach(_display, &_shminfo))
- {
- //printf("XShmAttach failed !\n");
- return -1;
- }
- XSync(_display, False);
-
- _prepared = true;
- return 0;
-}
-
-int32_t VideoX11Channel::RemoveRenderer()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
-
- if (!_prepared)
- {
- return 0;
- }
- _prepared = false;
-
- // Free the memory.
- XShmDetach(_display, &_shminfo);
- XDestroyImage( _image );
- _image = NULL;
- shmdt(_shminfo.shmaddr);
- _shminfo.shmaddr = NULL;
- _buffer = NULL;
- shmctl(_shminfo.shmid, IPC_RMID, 0);
- _shminfo.shmid = 0;
- return 0;
-}
-
-int32_t VideoX11Channel::GetStreamProperties(uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
- __FUNCTION__);
-
- zOrder = 0; // no z-order support yet
- left = _left;
- top = _top;
- right = _right;
- bottom = _bottom;
-
- return 0;
-}
-
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.h b/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.h
deleted file mode 100644
index 6eb402e12ee..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_channel.h
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
-
-#include <sys/shm.h>
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#include <X11/Xlib.h>
-#include <X11/Xutil.h>
-#include <X11/extensions/XShm.h>
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-#define DEFAULT_RENDER_FRAME_WIDTH 352
-#define DEFAULT_RENDER_FRAME_HEIGHT 288
-
-
-class VideoX11Channel: public VideoRenderCallback
-{
-public:
- VideoX11Channel(int32_t id);
-
- virtual ~VideoX11Channel();
-
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- int32_t FrameSizeChange(int32_t width, int32_t height,
- int32_t numberOfStreams);
- int32_t DeliverFrame(const VideoFrame& videoFrame);
- int32_t GetFrameSize(int32_t& width, int32_t& height);
- int32_t Init(Window window, float left, float top, float right,
- float bottom);
- int32_t ChangeWindow(Window window);
- int32_t
- GetStreamProperties(uint32_t& zOrder, float& left,
- float& top, float& right, float& bottom) const;
- int32_t ReleaseWindow();
-
- bool IsPrepared()
- {
- return _prepared;
- }
-
-private:
-
- int32_t
- CreateLocalRenderer(int32_t width, int32_t height);
- int32_t RemoveRenderer();
-
- //FIXME a better place for this method? the GetWidthHeight no longer
- // supported by common_video.
- int GetWidthHeight(VideoType type, int bufferSize, int& width,
- int& height);
-
- CriticalSectionWrapper& _crit;
-
- Display* _display;
- XShmSegmentInfo _shminfo;
- XImage* _image;
- Window _window;
- GC _gc;
- int32_t _width; // incoming frame width
- int32_t _height; // incoming frame height
- int32_t _outWidth; // render frame width
- int32_t _outHeight; // render frame height
- int32_t _xPos; // position within window
- int32_t _yPos;
- bool _prepared; // true if ready to use
- int32_t _dispCount;
-
- unsigned char* _buffer;
- float _top;
- float _left;
- float _right;
- float _bottom;
-
- int32_t _Id;
-
-};
-
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.cc b/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.cc
deleted file mode 100644
index 5eb4f36f95f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.cc
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/linux/video_x11_channel.h"
-#include "webrtc/modules/video_render/linux/video_x11_render.h"
-
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoX11Render::VideoX11Render(Window window) :
- _window(window),
- _critSect(*CriticalSectionWrapper::CreateCriticalSection())
-{
-}
-
-VideoX11Render::~VideoX11Render()
-{
- delete &_critSect;
-}
-
-int32_t VideoX11Render::Init()
-{
- CriticalSectionScoped cs(&_critSect);
-
- _streamIdToX11ChannelMap.clear();
-
- return 0;
-}
-
-int32_t VideoX11Render::ChangeWindow(Window window)
-{
- CriticalSectionScoped cs(&_critSect);
- VideoX11Channel* renderChannel = NULL;
-
- std::map<int, VideoX11Channel*>::iterator iter =
- _streamIdToX11ChannelMap.begin();
-
- while (iter != _streamIdToX11ChannelMap.end())
- {
- renderChannel = iter->second;
- if (renderChannel)
- {
- renderChannel->ChangeWindow(window);
- }
- iter++;
- }
-
- _window = window;
-
- return 0;
-}
-
-VideoX11Channel* VideoX11Render::CreateX11RenderChannel(
- int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_critSect);
- VideoX11Channel* renderChannel = NULL;
-
- std::map<int, VideoX11Channel*>::iterator iter =
- _streamIdToX11ChannelMap.find(streamId);
-
- if (iter == _streamIdToX11ChannelMap.end())
- {
- renderChannel = new VideoX11Channel(streamId);
- if (!renderChannel)
- {
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideoRenderer,
- -1,
- "Failed to create VideoX11Channel for streamId : %d",
- streamId);
- return NULL;
- }
- renderChannel->Init(_window, left, top, right, bottom);
- _streamIdToX11ChannelMap[streamId] = renderChannel;
- }
- else
- {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
- "Render Channel already exists for streamId: %d", streamId);
- renderChannel = iter->second;
- }
-
- return renderChannel;
-}
-
-int32_t VideoX11Render::DeleteX11RenderChannel(int32_t streamId)
-{
- CriticalSectionScoped cs(&_critSect);
-
- std::map<int, VideoX11Channel*>::iterator iter =
- _streamIdToX11ChannelMap.find(streamId);
- if (iter != _streamIdToX11ChannelMap.end())
- {
- VideoX11Channel *renderChannel = iter->second;
- if (renderChannel)
- {
- renderChannel->ReleaseWindow();
- delete renderChannel;
- renderChannel = NULL;
- }
- _streamIdToX11ChannelMap.erase(iter);
- }
-
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "No VideoX11Channel object exists for stream id: %d",
- streamId);
- return -1;
-}
-
-int32_t VideoX11Render::GetIncomingStreamProperties(
- int32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom)
-{
- CriticalSectionScoped cs(&_critSect);
-
- std::map<int, VideoX11Channel*>::iterator iter =
- _streamIdToX11ChannelMap.find(streamId);
- if (iter != _streamIdToX11ChannelMap.end())
- {
- VideoX11Channel *renderChannel = iter->second;
- if (renderChannel)
- {
- renderChannel->GetStreamProperties(zOrder, left, top, right, bottom);
- }
- }
-
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "No VideoX11Channel object exists for stream id: %d",
- streamId);
- return -1;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.h b/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.h
deleted file mode 100644
index 23b83bd67bb..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/linux/video_x11_render.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#include <X11/Xlib.h>
-#include <map>
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-class VideoX11Channel;
-
-class VideoX11Render
-{
-
-public:
- VideoX11Render(Window window);
- ~VideoX11Render();
-
- int32_t Init();
- int32_t ChangeWindow(Window window);
-
- VideoX11Channel* CreateX11RenderChannel(int32_t streamId,
- int32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- int32_t DeleteX11RenderChannel(int32_t streamId);
-
- int32_t GetIncomingStreamProperties(int32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom);
-
-private:
- Window _window;
- CriticalSectionWrapper& _critSect;
- std::map<int, VideoX11Channel*> _streamIdToX11ChannelMap;
-
-};
-
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.h b/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.h
deleted file mode 100644
index c8e98bba674..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-//
-// cocoa_full_screen_window.h
-//
-//
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
-
-#import <Cocoa/Cocoa.h>
-//#define GRAB_ALL_SCREENS 1
-
-@interface CocoaFullScreenWindow : NSObject {
- NSWindow* _window;
-}
-
--(id)init;
--(void)grabFullScreen;
--(void)releaseFullScreen;
--(NSWindow*)window;
-
-@end
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm b/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm
deleted file mode 100644
index b57223b4dfd..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-@implementation CocoaFullScreenWindow
-
--(id)init{
-
- self = [super init];
- if(!self){
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d COULD NOT CREATE INSTANCE", __FUNCTION__, __LINE__);
- return nil;
- }
-
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__);
- return self;
-}
-
--(void)grabFullScreen{
-
-#ifdef GRAB_ALL_SCREENS
- if(CGCaptureAllDisplays() != kCGErrorSuccess)
-#else
- if(CGDisplayCapture(kCGDirectMainDisplay) != kCGErrorSuccess)
-#endif
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not capture main level", __FUNCTION__, __LINE__);
- }
-
- // get the shielding window level
- int windowLevel = CGShieldingWindowLevel();
-
- // get the screen rect of main display
- NSRect screenRect = [[NSScreen mainScreen]frame];
-
- _window = [[NSWindow alloc]initWithContentRect:screenRect
- styleMask:NSBorderlessWindowMask
- backing:NSBackingStoreBuffered
- defer:NO
- screen:[NSScreen mainScreen]];
-
- [_window setLevel:windowLevel];
- [_window setBackgroundColor:[NSColor blackColor]];
- [_window makeKeyAndOrderFront:nil];
-
-}
-
--(void)releaseFullScreen
-{
- [_window orderOut:self];
-
-#ifdef GRAB_ALL_SCREENS
- if(CGReleaseAllDisplays() != kCGErrorSuccess)
-#else
- if(CGDisplayRelease(kCGDirectMainDisplay) != kCGErrorSuccess)
-#endif
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not release the displays", __FUNCTION__, __LINE__);
- }
-}
-
-- (NSWindow*)window
-{
- return _window;
-}
-
-- (void) dealloc
-{
- [self releaseFullScreen];
- [super dealloc];
-}
-
-
-
-@end
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.h b/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.h
deleted file mode 100644
index 15a8108dec7..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.h
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-//
-// cocoa_render_view.h
-//
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
-
-#import <Cocoa/Cocoa.h>
-#import <OpenGL/gl.h>
-#import <OpenGL/glu.h>
-#import <OpenGL/OpenGL.h>
-
-@interface CocoaRenderView : NSOpenGLView {
- NSOpenGLContext* _nsOpenGLContext;
-}
-
--(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt;
--(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt;
--(NSOpenGLContext*)nsOpenGLContext;
-@end
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.mm b/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.mm
deleted file mode 100644
index 4631ff31a4f..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/cocoa_render_view.mm
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#import <Cocoa/Cocoa.h>
-#import <AppKit/AppKit.h>
-
-#include "webrtc/modules/video_render/mac/cocoa_render_view.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-@implementation CocoaRenderView
-
--(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt{
-
- self = [super initWithFrame:[self frame] pixelFormat:fmt];
- if (self == nil){
-
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__);
- }
-
-
- _nsOpenGLContext = [self openGLContext];
-
-}
-
--(NSOpenGLContext*)nsOpenGLContext {
- return _nsOpenGLContext;
-}
-
--(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt{
-
- NSRect screenRect = [[NSScreen mainScreen]frame];
-// [_windowRef setFrame:screenRect];
-// [_windowRef setBounds:screenRect];
- self = [super initWithFrame:screenRect pixelFormat:fmt];
- if (self == nil){
-
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__);
- }
-
- _nsOpenGLContext = [self openGLContext];
-
-}
-
-@end
-
-
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.cc b/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.cc
deleted file mode 100644
index 3243563b2bf..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.cc
+++ /dev/null
@@ -1,1987 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-
-#if defined(CARBON_RENDERING)
-
-#include "webrtc/modules/video_render/mac/video_render_agl.h"
-
-// includes
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-/*
- *
- * VideoChannelAGL
- *
- */
-
-#pragma mark VideoChannelAGL constructor
-
-VideoChannelAGL::VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner) :
- _aglContext( aglContext),
- _id( iId),
- _owner( owner),
- _width( 0),
- _height( 0),
- _stretchedWidth( 0),
- _stretchedHeight( 0),
- _startWidth( 0.0f),
- _startHeight( 0.0f),
- _stopWidth( 0.0f),
- _stopHeight( 0.0f),
- _xOldWidth( 0),
- _yOldHeight( 0),
- _oldStretchedHeight(0),
- _oldStretchedWidth( 0),
- _buffer( 0),
- _bufferSize( 0),
- _incomingBufferSize(0),
- _bufferIsUpdated( false),
- _sizeInitialized( false),
- _numberOfStreams( 0),
- _bVideoSizeStartedChanging(false),
- _pixelFormat( GL_RGBA),
- _pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
- _texture( 0)
-
-{
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Constructor", __FUNCTION__, __LINE__);
-}
-
-VideoChannelAGL::~VideoChannelAGL()
-{
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Destructor", __FUNCTION__, __LINE__);
- if (_buffer)
- {
- delete [] _buffer;
- _buffer = NULL;
- }
-
- aglSetCurrentContext(_aglContext);
-
- if (_texture != 0)
- {
- glDeleteTextures(1, (const GLuint*) &_texture);
- _texture = 0;
- }
-}
-
-int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
- VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
- if (_width != videoFrame.width() ||
- _height != videoFrame.height()) {
- if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize
- Change returned an error", __FUNCTION__, __LINE__);
- _owner->UnlockAGLCntx();
- return -1;
- }
- }
-
- _owner->UnlockAGLCntx();
- return DeliverFrame(videoFrame);
-}
-
-int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
-{
- _owner->LockAGLCntx();
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelAGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
-{
-
- _owner->LockAGLCntx();
- _stretchedHeight = stretchHeight;
- _stretchedWidth = stretchWidth;
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
-{
- // We'll get a new frame size from VideoAPI, prepare the buffer
-
- _owner->LockAGLCntx();
-
- if (width == _width && _height == height)
- {
- // We already have a correct buffer size
- _numberOfStreams = numberOfStreams;
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- _width = width;
- _height = height;
-
- // Delete the old buffer, create a new one with correct size.
- if (_buffer)
- {
- delete [] _buffer;
- _bufferSize = 0;
- }
-
- _incomingBufferSize = CalcBufferSize(kI420, _width, _height);
- _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
- _buffer = new unsigned char [_bufferSize];
- memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
-
- if (aglSetCurrentContext(_aglContext) == false)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Delete a possible old texture
- if (_texture != 0)
- {
- glDeleteTextures(1, (const GLuint*) &_texture);
- _texture = 0;
- }
-
- // Create a new texture
- glGenTextures(1, (GLuint *) &_texture);
-
- GLenum glErr = glGetError();
-
- if (glErr != GL_NO_ERROR)
- {
- }
-
- // Do the setup for both textures
- // Note: we setup two textures even if we're not running full screen
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
-
- // Set texture parameters
- glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
-
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
- //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
-
- glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
-
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
-
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
-
- // Maximum width/height for a texture
- GLint texSize;
- glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
-
- if (texSize < _width || texSize < _height)
- {
- // Image too big for memory
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Set up th texture type and size
- glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
- 0, // level
- GL_RGBA, // internal format
- _width, // width
- _height, // height
- 0, // border 0/1 = off/on
- _pixelFormat, // format, GL_BGRA
- _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
- _buffer); // pixel data
-
- glErr = glGetError();
- if (glErr != GL_NO_ERROR)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-// Called from video engine when a new frame should be rendered.
-int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
-
- if (_texture == 0) {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) !=
- _incomingBufferSize) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Setting stride = width.
- int rgbret = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
- if (rgbret < 0) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- aglSetCurrentContext(_aglContext);
-
- // Put the new frame into the graphic card texture.
- // Make sure this texture is the active one
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
- GLenum glErr = glGetError();
- if (glErr != GL_NO_ERROR) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Copy buffer to texture
- glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
- 0, // Level, not use
- 0, // start point x, (low left of pic)
- 0, // start point y,
- _width, // width
- _height, // height
- _pixelFormat, // pictue format for _buffer
- _pixelDataType, // data type of _buffer
- (const GLvoid*) _buffer); // the pixel data
-
- if (glGetError() != GL_NO_ERROR) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _bufferIsUpdated = true;
- _owner->UnlockAGLCntx();
-
- return 0;
-}
-
-int VideoChannelAGL::RenderOffScreenBuffer()
-{
-
- _owner->LockAGLCntx();
-
- if (_texture == 0)
- {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- GLfloat xStart = 2.0f * _startWidth - 1.0f;
- GLfloat xStop = 2.0f * _stopWidth - 1.0f;
- GLfloat yStart = 1.0f - 2.0f * _stopHeight;
- GLfloat yStop = 1.0f - 2.0f * _startHeight;
-
- aglSetCurrentContext(_aglContext);
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
-
- if(_stretchedWidth != _oldStretchedWidth || _stretchedHeight != _oldStretchedHeight)
- {
- glViewport(0, 0, _stretchedWidth, _stretchedHeight);
- }
- _oldStretchedHeight = _stretchedHeight;
- _oldStretchedWidth = _stretchedWidth;
-
- // Now really put the texture into the framebuffer
- glLoadIdentity();
-
- glEnable(GL_TEXTURE_RECTANGLE_EXT);
-
- glBegin(GL_POLYGON);
- {
- glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
- glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
- glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
- glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
- }
- glEnd();
-
- glDisable(GL_TEXTURE_RECTANGLE_EXT);
-
- _bufferIsUpdated = false;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelAGL::IsUpdated(bool& isUpdated)
-{
- _owner->LockAGLCntx();
- isUpdated = _bufferIsUpdated;
- _owner->UnlockAGLCntx();
-
- return 0;
-}
-
-int VideoChannelAGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
-
- _owner->LockAGLCntx();
-
- _startWidth = startWidth;
- _stopWidth = stopWidth;
- _startHeight = startHeight;
- _stopHeight = stopHeight;
-
- int oldWidth = _width;
- int oldHeight = _height;
- int oldNumberOfStreams = _numberOfStreams;
-
- _width = 0;
- _height = 0;
-
- int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
-
- _owner->UnlockAGLCntx();
-
- return retVal;
-}
-
-int VideoChannelAGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
-{
- return -1;
-}
-
-#pragma mark VideoRenderAGL WindowRef constructor
-
-VideoRenderAGL::VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId) :
-_hiviewRef( 0),
-_windowRef( windowRef),
-_fullScreen( fullscreen),
-_id( iId),
-_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
-_screenUpdateEvent( 0),
-_isHIViewRef( false),
-_aglContext( 0),
-_windowWidth( 0),
-_windowHeight( 0),
-_lastWindowWidth( -1),
-_lastWindowHeight( -1),
-_lastHiViewWidth( -1),
-_lastHiViewHeight( -1),
-_currentParentWindowHeight( 0),
-_currentParentWindowWidth( 0),
-_currentParentWindowBounds( ),
-_windowHasResized( false),
-_lastParentWindowBounds( ),
-_currentHIViewBounds( ),
-_lastHIViewBounds( ),
-_windowRect( ),
-_aglChannels( ),
-_zOrderToChannel( ),
-_hiviewEventHandlerRef( NULL),
-_windowEventHandlerRef( NULL),
-_currentViewBounds( ),
-_lastViewBounds( ),
-_renderingIsPaused( false),
-
-{
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s");
-
- _screenUpdateThread.reset(
- new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
- _screenUpdateEvent = EventWrapper::Create();
-
- if(!IsValidWindowPtr(_windowRef))
- {
- //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef:0x%x", __FUNCTION__, __LINE__, _windowRef);
- }
- else
- {
- //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d WindowRef 0x%x is valid", __FUNCTION__, __LINE__, _windowRef);
- }
-
- GetWindowRect(_windowRect);
-
- _lastViewBounds.origin.x = 0;
- _lastViewBounds.origin.y = 0;
- _lastViewBounds.size.width = 0;
- _lastViewBounds.size.height = 0;
-
-}
-
-// this is a static function. It has been registered (in class constructor) to be called on various window redrawing or resizing.
-// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
-#pragma mark WindowRef Event Handler
-pascal OSStatus VideoRenderAGL::sHandleWindowResized (EventHandlerCallRef /*nextHandler*/,
- EventRef theEvent,
- void* userData)
-{
- WindowRef windowRef = NULL;
-
- int eventType = GetEventKind(theEvent);
-
- // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
- GetEventParameter (theEvent,
- kEventParamDirectObject,
- typeWindowRef,
- NULL,
- sizeof (WindowRef),
- NULL,
- &windowRef);
-
- VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
-
- bool updateUI = true;
- if(kEventWindowBoundsChanged == eventType)
- {
- }
- else if(kEventWindowBoundsChanging == eventType)
- {
- }
- else if(kEventWindowZoomed == eventType)
- {
- }
- else if(kEventWindowExpanding == eventType)
- {
- }
- else if(kEventWindowExpanded == eventType)
- {
- }
- else if(kEventWindowClickResizeRgn == eventType)
- {
- }
- else if(kEventWindowClickDragRgn == eventType)
- {
- }
- else
- {
- updateUI = false;
- }
-
- if(true == updateUI)
- {
- obj->ParentWindowResized(windowRef);
- obj->UpdateClipping();
- obj->RenderOffScreenBuffers();
- }
-
- return noErr;
-}
-
-#pragma mark VideoRenderAGL HIViewRef constructor
-
-VideoRenderAGL::VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId) :
-_hiviewRef( windowRef),
-_windowRef( 0),
-_fullScreen( fullscreen),
-_id( iId),
-_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
-_screenUpdateEvent( 0),
-_isHIViewRef( false),
-_aglContext( 0),
-_windowWidth( 0),
-_windowHeight( 0),
-_lastWindowWidth( -1),
-_lastWindowHeight( -1),
-_lastHiViewWidth( -1),
-_lastHiViewHeight( -1),
-_currentParentWindowHeight( 0),
-_currentParentWindowWidth( 0),
-_currentParentWindowBounds( ),
-_windowHasResized( false),
-_lastParentWindowBounds( ),
-_currentHIViewBounds( ),
-_lastHIViewBounds( ),
-_windowRect( ),
-_aglChannels( ),
-_zOrderToChannel( ),
-_hiviewEventHandlerRef( NULL),
-_windowEventHandlerRef( NULL),
-_currentViewBounds( ),
-_lastViewBounds( ),
-_renderingIsPaused( false),
-{
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__);
- // _renderCritSec = CriticalSectionWrapper::CreateCriticalSection();
-
- _screenUpdateThread.reset(new rtc::PlatformThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
- _screenUpdateEvent = EventWrapper::Create();
-
- GetWindowRect(_windowRect);
-
- _lastViewBounds.origin.x = 0;
- _lastViewBounds.origin.y = 0;
- _lastViewBounds.size.width = 0;
- _lastViewBounds.size.height = 0;
-
-#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
- // This gets the parent window of the HIViewRef that's passed in and installs a WindowRef event handler on it
- // The event handler looks for window resize events and adjusts the offset of the controls.
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef's parent window", __FUNCTION__, __LINE__);
-
-
- static const EventTypeSpec windowEventTypes[] =
- {
- kEventClassWindow, kEventWindowBoundsChanged,
- kEventClassWindow, kEventWindowBoundsChanging,
- kEventClassWindow, kEventWindowZoomed,
- kEventClassWindow, kEventWindowExpanded,
- kEventClassWindow, kEventWindowClickResizeRgn,
- kEventClassWindow, kEventWindowClickDragRgn
- };
-
- WindowRef parentWindow = HIViewGetWindow(windowRef);
-
- InstallWindowEventHandler (parentWindow,
- NewEventHandlerUPP (sHandleWindowResized),
- GetEventTypeCount(windowEventTypes),
- windowEventTypes,
- (void *) this, // this is an arbitrary parameter that will be passed on to your event handler when it is called later
- &_windowEventHandlerRef);
-
-#endif
-
-#ifdef NEW_HIVIEW_EVENT_HANDLER
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef", __FUNCTION__, __LINE__);
-
- static const EventTypeSpec hiviewEventTypes[] =
- {
- kEventClassControl, kEventControlBoundsChanged,
- kEventClassControl, kEventControlDraw
- // kEventControlDragLeave
- // kEventControlDragReceive
- // kEventControlGetFocusPart
- // kEventControlApplyBackground
- // kEventControlDraw
- // kEventControlHit
-
- };
-
- HIViewInstallEventHandler(_hiviewRef,
- NewEventHandlerUPP(sHandleHiViewResized),
- GetEventTypeCount(hiviewEventTypes),
- hiviewEventTypes,
- (void *) this,
- &_hiviewEventHandlerRef);
-
-#endif
-}
-
-// this is a static function. It has been registered (in constructor) to be called on various window redrawing or resizing.
-// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
-#pragma mark HIViewRef Event Handler
-pascal OSStatus VideoRenderAGL::sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
-{
- //static int callbackCounter = 1;
- HIViewRef hiviewRef = NULL;
-
- // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
- int eventType = GetEventKind(theEvent);
- OSStatus status = noErr;
- status = GetEventParameter (theEvent,
- kEventParamDirectObject,
- typeControlRef,
- NULL,
- sizeof (ControlRef),
- NULL,
- &hiviewRef);
-
- VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
- WindowRef parentWindow = HIViewGetWindow(hiviewRef);
- bool updateUI = true;
-
- if(kEventControlBoundsChanged == eventType)
- {
- }
- else if(kEventControlDraw == eventType)
- {
- }
- else
- {
- updateUI = false;
- }
-
- if(true == updateUI)
- {
- obj->ParentWindowResized(parentWindow);
- obj->UpdateClipping();
- obj->RenderOffScreenBuffers();
- }
-
- return status;
-}
-
-VideoRenderAGL::~VideoRenderAGL()
-{
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Destructor", __FUNCTION__, __LINE__);
-
-
-#ifdef USE_EVENT_HANDLERS
- // remove event handlers
- OSStatus status;
- if(_isHIViewRef)
- {
- status = RemoveEventHandler(_hiviewEventHandlerRef);
- }
- else
- {
- status = RemoveEventHandler(_windowEventHandlerRef);
- }
- if(noErr != status)
- {
- if(_isHIViewRef)
- {
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
- }
- else
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
- }
- }
-
-#endif
-
- OSStatus status;
-#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
- if(_windowEventHandlerRef)
- {
- status = RemoveEventHandler(_windowEventHandlerRef);
- if(status != noErr)
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
- }
- }
-#endif
-
-#ifdef NEW_HIVIEW_EVENT_HANDLER
- if(_hiviewEventHandlerRef)
- {
- status = RemoveEventHandler(_hiviewEventHandlerRef);
- if(status != noErr)
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
- }
- }
-#endif
-
- // Signal event to exit thread, then delete it
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
-
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- _screenUpdateEvent->StopTimer();
-
- tmpPtr->Stop();
- delete tmpPtr;
- delete _screenUpdateEvent;
- _screenUpdateEvent = NULL;
- }
-
- if (_aglContext != 0)
- {
- aglSetCurrentContext(_aglContext);
- aglDestroyContext(_aglContext);
- _aglContext = 0;
- }
-
- // Delete all channels
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
- while (it!= _aglChannels.end())
- {
- delete it->second;
- _aglChannels.erase(it);
- it = _aglChannels.begin();
- }
- _aglChannels.clear();
-
- // Clean the zOrder map
- std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
- while(zIt != _zOrderToChannel.end())
- {
- _zOrderToChannel.erase(zIt);
- zIt = _zOrderToChannel.begin();
- }
- _zOrderToChannel.clear();
-
- //delete _renderCritSec;
-
-
-}
-
-int VideoRenderAGL::GetOpenGLVersion(int& aglMajor, int& aglMinor)
-{
- aglGetVersion((GLint *) &aglMajor, (GLint *) &aglMinor);
- return 0;
-}
-
-int VideoRenderAGL::Init()
-{
- LockAGLCntx();
-
- // Start rendering thread...
- if (!_screenUpdateThread)
- {
- UnlockAGLCntx();
- //WEBRTC_TRACE(kTraceError, "%s:%d Thread not created", __FUNCTION__, __LINE__);
- return -1;
- }
- _screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
-
- // Start the event triggering the render process
- unsigned int monitorFreq = 60;
- _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
-
- // Create mixing textures
- if (CreateMixingContext() == -1)
- {
- //WEBRTC_TRACE(kTraceError, "%s:%d Could not create a mixing context", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
-
- UnlockAGLCntx();
- return 0;
-}
-
-VideoChannelAGL* VideoRenderAGL::CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
-
- LockAGLCntx();
-
- //WEBRTC_TRACE(kTraceInfo, "%s:%d Creating AGL channel: %d", __FUNCTION__, __LINE__, channel);
-
- if (HasChannel(channel))
- {
- //WEBRTC_TRACE(kTraceError, "%s:%d Channel already exists", __FUNCTION__, __LINE__);
- UnlockAGLCntx();k
- return NULL;
- }
-
- if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
- {
- // There are already one channel using this zOrder
- // TODO: Allow multiple channels with same zOrder
- }
-
- VideoChannelAGL* newAGLChannel = new VideoChannelAGL(_aglContext, _id, this);
-
- if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
- {
- if (newAGLChannel)
- {
- delete newAGLChannel;
- newAGLChannel = NULL;
- }
- //WEBRTC_LOG(kTraceError, "Could not create AGL channel");
- //WEBRTC_TRACE(kTraceError, "%s:%d Could not create AGL channel", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return NULL;
- }
-k
- _aglChannels[channel] = newAGLChannel;
- _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
-
- UnlockAGLCntx();
- return newAGLChannel;
-}
-
-int VideoRenderAGL::DeleteAllAGLChannels()
-{
- CriticalSectionScoped cs(&_renderCritSec);
-
- //WEBRTC_TRACE(kTraceInfo, "%s:%d Deleting all AGL channels", __FUNCTION__, __LINE__);
- //int i = 0 ;
- std::map<int, VideoChannelAGL*>::iterator it;
- it = _aglChannels.begin();
-
- while (it != _aglChannels.end())
- {
- VideoChannelAGL* channel = it->second;
- if (channel)
- delete channel;
-
- _aglChannels.erase(it);
- it = _aglChannels.begin();
- }
- _aglChannels.clear();
- return 0;
-}
-
-int VideoRenderAGL::DeleteAGLChannel(int channel)
-{
- CriticalSectionScoped cs(&_renderCritSec);
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Deleting AGL channel %d", __FUNCTION__, __LINE__, channel);
-
- std::map<int, VideoChannelAGL*>::iterator it;
- it = _aglChannels.find(channel);
- if (it != _aglChannels.end())
- {
- delete it->second;
- _aglChannels.erase(it);
- }
- else
- {
- //WEBRTC_TRACE(kTraceWarning, "%s:%d Channel not found", __FUNCTION__, __LINE__);
- return -1;
- }
-
- std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
- while( zIt != _zOrderToChannel.end())
- {
- if (zIt->second == channel)
- {
- _zOrderToChannel.erase(zIt);
- break;
- }
- zIt++;// = _zOrderToChannel.begin();
- }
-
- return 0;
-}
-
-int VideoRenderAGL::StopThread()
-{
- CriticalSectionScoped cs(&_renderCritSec);
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
-
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- _renderCritSec.Leave();
- tmpPtr->Stop();
- delete tmpPtr;
- _renderCritSec.Enter();
- }
-
- delete _screenUpdateEvent;
- _screenUpdateEvent = NULL;
-
- return 0;
-}
-
-bool VideoRenderAGL::IsFullScreen()
-{
- CriticalSectionScoped cs(&_renderCritSec);
- return _fullScreen;
-}
-
-bool VideoRenderAGL::HasChannels()
-{
-
- CriticalSectionScoped cs(&_renderCritSec);
-
- if (_aglChannels.begin() != _aglChannels.end())
- {
- return true;
- }
-
- return false;
-}
-
-bool VideoRenderAGL::HasChannel(int channel)
-{
- CriticalSectionScoped cs(&_renderCritSec);
-
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
- if (it != _aglChannels.end())
- {
- return true;
- }
-
- return false;
-}
-
-int VideoRenderAGL::GetChannels(std::list<int>& channelList)
-{
-
- CriticalSectionScoped cs(&_renderCritSec);
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
-
- while (it != _aglChannels.end())
- {
- channelList.push_back(it->first);
- it++;
- }
-
- return 0;
-}
-
-VideoChannelAGL* VideoRenderAGL::ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
-
- CriticalSectionScoped cs(&_renderCritSec);
-
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
-
- if (it != _aglChannels.end())
- {
- VideoChannelAGL* aglChannel = it->second;
- if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
- {
- return NULL;
- }
-
- std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
- while(it != _zOrderToChannel.end())
- {
- if (it->second == channel)
- {
- if (it->first != zOrder)
- {
- _zOrderToChannel.erase(it);
- _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
- }
- break;
- }
- it++;
- }
- return aglChannel;
- }
-
- return NULL;
-}
-
-bool VideoRenderAGL::ScreenUpdateThreadProc(void* obj)
-{
- return static_cast<VideoRenderAGL*>(obj)->ScreenUpdateProcess();
-}
-
-bool VideoRenderAGL::ScreenUpdateProcess()
-{
- _screenUpdateEvent->Wait(100);
-
- LockAGLCntx();
-
- if (!_screenUpdateThread)
- {
- UnlockAGLCntx();
- return false;
- }
-
- if (aglSetCurrentContext(_aglContext) == GL_FALSE)
- {
- UnlockAGLCntx();
- return true;
- }
-
- if (GetWindowRect(_windowRect) == -1)
- {
- UnlockAGLCntx();
- return true;
- }
-
- if (_windowWidth != (_windowRect.right - _windowRect.left)
- || _windowHeight != (_windowRect.bottom - _windowRect.top))
- {
- // We have a new window size, update the context.
- if (aglUpdateContext(_aglContext) == GL_FALSE)
- {
- UnlockAGLCntx();
- return true;
- }
- _windowWidth = _windowRect.right - _windowRect.left;
- _windowHeight = _windowRect.bottom - _windowRect.top;
- }
-
- // this section will poll to see if the window size has changed
- // this is causing problem w/invalid windowRef
- // this code has been modified and exists now in the window event handler
-#ifndef NEW_HIVIEW_PARENT_EVENT_HANDLER
- if (_isHIViewRef)
- {
-
- if(FALSE == HIViewIsValid(_hiviewRef))
- {
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalid windowRef", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return true;
- }
- WindowRef window = HIViewGetWindow(_hiviewRef);
-
- if(FALSE == IsValidWindowPtr(window))
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalide hiviewRef", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return true;
- }
- if (window == NULL)
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d WindowRef = NULL", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return true;
- }
-
- if(FALSE == MacIsWindowVisible(window))
- {
- //WEBRTC_TRACE(kTraceDebug, "%s:%d MacIsWindowVisible == FALSE. Returning early", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return true;
- }
-
- HIRect viewBounds; // Placement and size for HIView
- int windowWidth = 0; // Parent window width
- int windowHeight = 0; // Parent window height
-
- // NOTE: Calling GetWindowBounds with kWindowStructureRgn will crash intermittentaly if the OS decides it needs to push it into the back for a moment.
- // To counter this, we get the titlebar height on class construction and then add it to the content region here. Content regions seems not to crash
- Rect contentBounds =
- { 0, 0, 0, 0}; // The bounds for the parent window
-
-#if defined(USE_CONTENT_RGN)
- GetWindowBounds(window, kWindowContentRgn, &contentBounds);
-#elif defined(USE_STRUCT_RGN)
- GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
-#endif
-
- Rect globalBounds =
- { 0, 0, 0, 0}; // The bounds for the parent window
- globalBounds.top = contentBounds.top;
- globalBounds.right = contentBounds.right;
- globalBounds.bottom = contentBounds.bottom;
- globalBounds.left = contentBounds.left;
-
- windowHeight = globalBounds.bottom - globalBounds.top;
- windowWidth = globalBounds.right - globalBounds.left;
-
- // Get the size of the HIViewRef
- HIViewGetBounds(_hiviewRef, &viewBounds);
- HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
-
- // Check if this is the first call..
- if (_lastWindowHeight == -1 &&
- _lastWindowWidth == -1)
- {
- _lastWindowWidth = windowWidth;
- _lastWindowHeight = windowHeight;
-
- _lastViewBounds.origin.x = viewBounds.origin.x;
- _lastViewBounds.origin.y = viewBounds.origin.y;
- _lastViewBounds.size.width = viewBounds.size.width;
- _lastViewBounds.size.height = viewBounds.size.height;
- }
- sfasdfasdf
-
- bool resized = false;
-
- // Check if parent window size has changed
- if (windowHeight != _lastWindowHeight ||
- windowWidth != _lastWindowWidth)
- {
- resized = true;
- }
-
- // Check if the HIView has new size or is moved in the parent window
- if (_lastViewBounds.origin.x != viewBounds.origin.x ||
- _lastViewBounds.origin.y != viewBounds.origin.y ||
- _lastViewBounds.size.width != viewBounds.size.width ||
- _lastViewBounds.size.height != viewBounds.size.height)
- {
- // The HiView is resized or has moved.
- resized = true;
- }
-
- if (resized)
- {
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d Window has resized", __FUNCTION__, __LINE__);
-
- // Calculate offset between the windows
- // {x, y, widht, height}, x,y = lower left corner
- const GLint offs[4] =
- { (int)(0.5f + viewBounds.origin.x),
- (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
- viewBounds.size.width, viewBounds.size.height};
-
- //WEBRTC_TRACE(kTraceDebug, "%s:%d contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, __LINE__,
- contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
- //WEBRTC_TRACE(kTraceDebug, "%s:%d windowHeight=%d", __FUNCTION__, __LINE__, windowHeight);
- //WEBRTC_TRACE(kTraceDebug, "%s:%d offs[4] = %d, %d, %d, %d", __FUNCTION__, __LINE__, offs[0], offs[1], offs[2], offs[3]);
-
- aglSetDrawable (_aglContext, GetWindowPort(window));
- aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
- aglEnable(_aglContext, AGL_BUFFER_RECT);
-
- // We need to change the viewport too if the HIView size has changed
- glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
-
- }
- _lastWindowWidth = windowWidth;
- _lastWindowHeight = windowHeight;
-
- _lastViewBounds.origin.x = viewBounds.origin.x;
- _lastViewBounds.origin.y = viewBounds.origin.y;
- _lastViewBounds.size.width = viewBounds.size.width;
- _lastViewBounds.size.height = viewBounds.size.height;
-
- }
-#endif
- if (_fullScreen)
- {
- // TODO
- // We use double buffers, must always update
- //RenderOffScreenBuffersToBackBuffer();
- }
- else
- {
- // Check if there are any updated buffers
- bool updated = false;
-
- // TODO: check if window size is updated!
- // TODO Improvement: Walk through the zOrder Map to only render the ones in need of update
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
- while (it != _aglChannels.end())
- {
-
- VideoChannelAGL* aglChannel = it->second;
- aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
- aglChannel->IsUpdated(updated);
- if (updated)
- {
- break;
- }
- it++;
- }
-
- if (updated)
- {
- // At least on buffers is updated, we need to repaint the texture
- if (RenderOffScreenBuffers() != -1)
- {
- // MF
- //SwapAndDisplayBuffers();
- }
- else
- {
- // Error updating the mixing texture, don't swap.
- }
- }
- }
-
- UnlockAGLCntx();
-
- //WEBRTC_LOG(kTraceDebug, "Leaving ScreenUpdateProcess()");
- return true;
-}
-
-void VideoRenderAGL::ParentWindowResized(WindowRef window)
-{
- //WEBRTC_LOG(kTraceDebug, "%s HIViewRef:%d owner window has resized", __FUNCTION__, (int)_hiviewRef);
-
- LockAGLCntx();
-k
- // set flag
- _windowHasResized = false;
-
- if(FALSE == HIViewIsValid(_hiviewRef))
- {
- //WEBRTC_LOG(kTraceDebug, "invalid windowRef");
- UnlockAGLCntx();
- return;
- }
-
- if(FALSE == IsValidWindowPtr(window))
- {
- //WEBRTC_LOG(kTraceError, "invalid windowRef");
- UnlockAGLCntx();
- return;
- }
-
- if (window == NULL)
- {
- //WEBRTC_LOG(kTraceError, "windowRef = NULL");
- UnlockAGLCntx();
- return;
- }
-
- if(FALSE == MacIsWindowVisible(window))
- {
- //WEBRTC_LOG(kTraceDebug, "MacIsWindowVisible = FALSE. Returning early.");
- UnlockAGLCntx();
- return;
- }
-
- Rect contentBounds =
- { 0, 0, 0, 0};
-
-#if defined(USE_CONTENT_RGN)
- GetWindowBounds(window, kWindowContentRgn, &contentBounds);
-#elif defined(USE_STRUCT_RGN)
- GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
-#endif
-
- //WEBRTC_LOG(kTraceDebug, "%s contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
-
- // update global vars
- _currentParentWindowBounds.top = contentBounds.top;
- _currentParentWindowBounds.left = contentBounds.left;
- _currentParentWindowBounds.bottom = contentBounds.bottom;
- _currentParentWindowBounds.right = contentBounds.right;
-
- _currentParentWindowWidth = _currentParentWindowBounds.right - _currentParentWindowBounds.left;
- _currentParentWindowHeight = _currentParentWindowBounds.bottom - _currentParentWindowBounds.top;
-
- _windowHasResized = true;
-
- // ********* update AGL offsets
- HIRect viewBounds;
- HIViewGetBounds(_hiviewRef, &viewBounds);
- HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
-
- const GLint offs[4] =
- { (int)(0.5f + viewBounds.origin.x),
- (int)(0.5f + _currentParentWindowHeight - (viewBounds.origin.y + viewBounds.size.height)),
- viewBounds.size.width, viewBounds.size.height};
- //WEBRTC_LOG(kTraceDebug, "%s _currentParentWindowHeight=%d", __FUNCTION__, _currentParentWindowHeight);
- //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
-
- aglSetCurrentContext(_aglContext);
- aglSetDrawable (_aglContext, GetWindowPort(window));
- aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
- aglEnable(_aglContext, AGL_BUFFER_RECT);
-
- // We need to change the viewport too if the HIView size has changed
- glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
-
- UnlockAGLCntx();
-
- return;
-}
-
-int VideoRenderAGL::CreateMixingContext()
-{
-
- LockAGLCntx();
-
- //WEBRTC_LOG(kTraceDebug, "Entering CreateMixingContext()");
-
- // Use both AGL_ACCELERATED and AGL_NO_RECOVERY to make sure
- // a hardware renderer is used and not a software renderer.
-
- GLint attributes[] =
- {
- AGL_DOUBLEBUFFER,
- AGL_WINDOW,
- AGL_RGBA,
- AGL_NO_RECOVERY,
- AGL_ACCELERATED,
- AGL_RED_SIZE, 8,
- AGL_GREEN_SIZE, 8,
- AGL_BLUE_SIZE, 8,
- AGL_ALPHA_SIZE, 8,
- AGL_DEPTH_SIZE, 24,
- AGL_NONE,
- };
-
- AGLPixelFormat aglPixelFormat;
-
- // ***** Set up the OpenGL Context *****
-
- // Get a pixel format for the attributes above
- aglPixelFormat = aglChoosePixelFormat(NULL, 0, attributes);
- if (NULL == aglPixelFormat)
- {
- //WEBRTC_LOG(kTraceError, "Could not create pixel format");
- UnlockAGLCntx();
- return -1;
- }
-
- // Create an AGL context
- _aglContext = aglCreateContext(aglPixelFormat, NULL);
- if (_aglContext == NULL)
- {
- //WEBRTC_LOG(kTraceError, "Could no create AGL context");
- UnlockAGLCntx();
- return -1;
- }
-
- // Release the pixel format memory
- aglDestroyPixelFormat(aglPixelFormat);
-
- // Set the current AGL context for the rest of the settings
- if (aglSetCurrentContext(_aglContext) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set current context: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
-
- if (_isHIViewRef)
- {
- //---------------------------
- // BEGIN: new test code
-#if 0
- // Don't use this one!
- // There seems to be an OS X bug that can't handle
- // movements and resizing of the parent window
- // and or the HIView
- if (aglSetHIViewRef(_aglContext,_hiviewRef) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
-#else
-
- // Get the parent window for this control
- WindowRef window = GetControlOwner(_hiviewRef);
-
- Rect globalBounds =
- { 0,0,0,0}; // The bounds for the parent window
- HIRect viewBounds; // Placemnt in the parent window and size.
- int windowHeight = 0;
-
- // Rect titleBounds = {0,0,0,0};
- // GetWindowBounds(window, kWindowTitleBarRgn, &titleBounds);
- // _titleBarHeight = titleBounds.top - titleBounds.bottom;
- // if(0 == _titleBarHeight)
- // {
- // //WEBRTC_LOG(kTraceError, "Titlebar height = 0");
- // //return -1;
- // }
-
-
- // Get the bounds for the parent window
-#if defined(USE_CONTENT_RGN)
- GetWindowBounds(window, kWindowContentRgn, &globalBounds);
-#elif defined(USE_STRUCT_RGN)
- GetWindowBounds(window, kWindowStructureRgn, &globalBounds);
-#endif
- windowHeight = globalBounds.bottom - globalBounds.top;
-
- // Get the bounds for the HIView
- HIViewGetBounds(_hiviewRef, &viewBounds);
-
- HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
-
- const GLint offs[4] =
- { (int)(0.5f + viewBounds.origin.x),
- (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
- viewBounds.size.width, viewBounds.size.height};
-
- //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
-
-
- aglSetDrawable (_aglContext, GetWindowPort(window));
- aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
- aglEnable(_aglContext, AGL_BUFFER_RECT);
-
- GLint surfaceOrder = 1; // 1: above window, -1 below.
- //OSStatus status = aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
- aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
-
- glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
-#endif
-
- }
- else
- {
- if(GL_FALSE == aglSetDrawable (_aglContext, GetWindowPort(_windowRef)))
- {
- //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
- }
-
- _windowWidth = _windowRect.right - _windowRect.left;
- _windowHeight = _windowRect.bottom - _windowRect.top;
-
- // opaque surface
- int surfaceOpacity = 1;
- if (aglSetInteger(_aglContext, AGL_SURFACE_OPACITY, (const GLint *) &surfaceOpacity) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set surface opacity: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
-
- // 1 -> sync to screen rat, slow...
- //int swapInterval = 0; // 0 don't sync with vertical trace
- int swapInterval = 0; // 1 sync with vertical trace
- if (aglSetInteger(_aglContext, AGL_SWAP_INTERVAL, (const GLint *) &swapInterval) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set swap interval: %d", aglGetError());
- UnlockAGLCntx();
- return -1;
- }
-
- // Update the rect with the current size
- if (GetWindowRect(_windowRect) == -1)
- {
- //WEBRTC_LOG(kTraceError, "Could not get window size");
- UnlockAGLCntx();
- return -1;
- }
-
- // Disable not needed functionality to increase performance
- glDisable(GL_DITHER);
- glDisable(GL_ALPHA_TEST);
- glDisable(GL_STENCIL_TEST);
- glDisable(GL_FOG);
- glDisable(GL_TEXTURE_2D);
- glPixelZoom(1.0, 1.0);
-
- glDisable(GL_BLEND);
- glDisable(GL_DEPTH_TEST);
- glDepthMask(GL_FALSE);
- glDisable(GL_CULL_FACE);
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- GLenum glErr = glGetError();
-
- if (glErr)
- {
- }
-
- UpdateClipping();
-
- //WEBRTC_LOG(kTraceDebug, "Leaving CreateMixingContext()");
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderAGL::RenderOffScreenBuffers()
-{
- LockAGLCntx();
-
- // Get the current window size, it might have changed since last render.
- if (GetWindowRect(_windowRect) == -1)
- {
- //WEBRTC_LOG(kTraceError, "Could not get window rect");
- UnlockAGLCntx();
- return -1;
- }
-
- if (aglSetCurrentContext(_aglContext) == false)
- {
- //WEBRTC_LOG(kTraceError, "Could not set current context for rendering");
- UnlockAGLCntx();
- return -1;
- }
-
- // HERE - onl if updated!
- glClear(GL_COLOR_BUFFER_BIT);
-
- // Loop through all channels starting highest zOrder ending with lowest.
- for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
- rIt != _zOrderToChannel.rend();
- rIt++)
- {
- int channelId = rIt->second;
- std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channelId);
-
- VideoChannelAGL* aglChannel = it->second;
-
- aglChannel->RenderOffScreenBuffer();
- }
-
- SwapAndDisplayBuffers();
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderAGL::SwapAndDisplayBuffers()
-{
-
- LockAGLCntx();
- if (_fullScreen)
- {
- // TODO:
- // Swap front and back buffers, rendering taking care of in the same call
- //aglSwapBuffers(_aglContext);
- // Update buffer index to the idx for the next rendering!
- //_textureIdx = (_textureIdx + 1) & 1;
- }
- else
- {
- // Single buffer rendering, only update context.
- glFlush();
- aglSwapBuffers(_aglContext);
- HIViewSetNeedsDisplay(_hiviewRef, true);
- }
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderAGL::GetWindowRect(Rect& rect)
-{
-
- LockAGLCntx();
-
- if (_isHIViewRef)
- {
- if (_hiviewRef)
- {
- HIRect HIViewRect1;
- if(FALSE == HIViewIsValid(_hiviewRef))
- {
- rect.top = 0;
- rect.left = 0;
- rect.right = 0;
- rect.bottom = 0;
- //WEBRTC_LOG(kTraceError,"GetWindowRect() HIViewIsValid() returned false");
- UnlockAGLCntx();
- }
- HIViewGetBounds(_hiviewRef,&HIViewRect1);
- HIRectConvert(&HIViewRect1, 1, NULL, 2, NULL);
- if(HIViewRect1.origin.x < 0)
- {
- rect.top = 0;
- //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.top = 0");
- }
- else
- {
- rect.top = HIViewRect1.origin.x;
- }
-
- if(HIViewRect1.origin.y < 0)
- {
- rect.left = 0;
- //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.left = 0");
- }
- else
- {
- rect.left = HIViewRect1.origin.y;
- }
-
- if(HIViewRect1.size.width < 0)
- {
- rect.right = 0;
- //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.right = 0");
- }
- else
- {
- rect.right = HIViewRect1.size.width;
- }
-
- if(HIViewRect1.size.height < 0)
- {
- rect.bottom = 0;
- //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.bottom = 0");
- }
- else
- {
- rect.bottom = HIViewRect1.size.height;
- }
-
- ////WEBRTC_LOG(kTraceDebug,"GetWindowRect() HIViewRef: rect.top = %d, rect.left = %d, rect.right = %d, rect.bottom =%d in GetWindowRect", rect.top,rect.left,rect.right,rect.bottom);
- UnlockAGLCntx();
- }
- else
- {
- //WEBRTC_LOG(kTraceError, "invalid HIViewRef");
- UnlockAGLCntx();
- }
- }
- else
- {
- if (_windowRef)
- {
- GetWindowBounds(_windowRef, kWindowContentRgn, &rect);
- UnlockAGLCntx();
- }
- else
- {
- //WEBRTC_LOG(kTraceError, "No WindowRef");
- UnlockAGLCntx();
- }
- }
-}
-
-int VideoRenderAGL::UpdateClipping()
-{
- //WEBRTC_LOG(kTraceDebug, "Entering UpdateClipping()");
- LockAGLCntx();
-
- if(_isHIViewRef)
- {
- if(FALSE == HIViewIsValid(_hiviewRef))
- {
- //WEBRTC_LOG(kTraceError, "UpdateClipping() _isHIViewRef is invalid. Returning -1");
- UnlockAGLCntx();
- return -1;
- }
-
- RgnHandle visibleRgn = NewRgn();
- SetEmptyRgn (visibleRgn);
-
- if(-1 == CalculateVisibleRegion((ControlRef)_hiviewRef, visibleRgn, true))
- {
- }
-
- if(GL_FALSE == aglSetCurrentContext(_aglContext))
- {
- GLenum glErr = aglGetError();
- //WEBRTC_LOG(kTraceError, "aglSetCurrentContext returned FALSE with error code %d at line %d", glErr, __LINE__);
- }
-
- if(GL_FALSE == aglEnable(_aglContext, AGL_CLIP_REGION))
- {
- GLenum glErr = aglGetError();
- //WEBRTC_LOG(kTraceError, "aglEnable returned FALSE with error code %d at line %d\n", glErr, __LINE__);
- }
-
- if(GL_FALSE == aglSetInteger(_aglContext, AGL_CLIP_REGION, (const GLint*)visibleRgn))
- {
- GLenum glErr = aglGetError();
- //WEBRTC_LOG(kTraceError, "aglSetInteger returned FALSE with error code %d at line %d\n", glErr, __LINE__);
- }
-
- DisposeRgn(visibleRgn);
- }
- else
- {
- //WEBRTC_LOG(kTraceDebug, "Not using a hiviewref!\n");
- }
-
- //WEBRTC_LOG(kTraceDebug, "Leaving UpdateClipping()");
- UnlockAGLCntx();
- return true;
-}
-
-int VideoRenderAGL::CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren)
-{
-
- // LockAGLCntx();
-
- //WEBRTC_LOG(kTraceDebug, "Entering CalculateVisibleRegion()");
- OSStatus osStatus = 0;
- OSErr osErr = 0;
-
- RgnHandle tempRgn = NewRgn();
- if (IsControlVisible(control))
- {
- RgnHandle childRgn = NewRgn();
- WindowRef window = GetControlOwner(control);
- ControlRef rootControl;
- GetRootControl(window, &rootControl); // 'wvnc'
- ControlRef masterControl;
- osStatus = GetSuperControl(rootControl, &masterControl);
- // //WEBRTC_LOG(kTraceDebug, "IBM GetSuperControl=%d", osStatus);
-
- if (masterControl != NULL)
- {
- CheckValidRegion(visibleRgn);
- // init visibleRgn with region of 'wvnc'
- osStatus = GetControlRegion(rootControl, kControlStructureMetaPart, visibleRgn);
- // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
- //GetSuperControl(rootControl, &rootControl);
- ControlRef tempControl = control, lastControl = 0;
- while (tempControl != masterControl) // current control != master
-
- {
- CheckValidRegion(tempRgn);
-
- // //WEBRTC_LOG(kTraceDebug, "IBM tempControl=%d masterControl=%d", tempControl, masterControl);
- ControlRef subControl;
-
- osStatus = GetControlRegion(tempControl, kControlStructureMetaPart, tempRgn); // intersect the region of the current control with visibleRgn
- // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
- CheckValidRegion(tempRgn);
-
- osErr = HIViewConvertRegion(tempRgn, tempControl, rootControl);
- // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d : %d", osErr, __LINE__);
- CheckValidRegion(tempRgn);
-
- SectRgn(tempRgn, visibleRgn, visibleRgn);
- CheckValidRegion(tempRgn);
- CheckValidRegion(visibleRgn);
- if (EmptyRgn(visibleRgn)) // if the region is empty, bail
- break;
-
- if (clipChildren || tempControl != control) // clip children if true, cut out the tempControl if it's not one passed to this function
-
- {
- UInt16 numChildren;
- osStatus = CountSubControls(tempControl, &numChildren); // count the subcontrols
- // //WEBRTC_LOG(kTraceDebug, "IBM CountSubControls=%d : %d", osStatus, __LINE__);
-
- // //WEBRTC_LOG(kTraceDebug, "IBM numChildren=%d", numChildren);
- for (int i = 0; i < numChildren; i++)
- {
- osErr = GetIndexedSubControl(tempControl, numChildren - i, &subControl); // retrieve the subcontrol in order by zorder
- // //WEBRTC_LOG(kTraceDebug, "IBM GetIndexedSubControls=%d : %d", osErr, __LINE__);
- if ( subControl == lastControl ) // break because of zorder
-
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM breaking because of zorder %d", __LINE__);
- break;
- }
-
- if (!IsControlVisible(subControl)) // dont' clip invisible controls
-
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM continue. Control is not visible %d", __LINE__);
- continue;
- }
-
- if(!subControl) continue;
-
- osStatus = GetControlRegion(subControl, kControlStructureMetaPart, tempRgn); //get the region of the current control and union to childrg
- // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d %d", osStatus, __LINE__);
- CheckValidRegion(tempRgn);
- if(osStatus != 0)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
- continue;
- }
- if(!tempRgn)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !tempRgn %d", osStatus, __LINE__);
- continue;
- }
-
- osStatus = HIViewConvertRegion(tempRgn, subControl, rootControl);
- CheckValidRegion(tempRgn);
- // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d %d", osStatus, __LINE__);
- if(osStatus != 0)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
- continue;
- }
- if(!rootControl)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !rootControl %d", osStatus, __LINE__);
- continue;
- }
-
- UnionRgn(tempRgn, childRgn, childRgn);
- CheckValidRegion(tempRgn);
- CheckValidRegion(childRgn);
- CheckValidRegion(visibleRgn);
- if(!childRgn)
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !childRgn %d", osStatus, __LINE__);
- continue;
- }
-
- } // next child control
- }
- lastControl = tempControl;
- GetSuperControl(tempControl, &subControl);
- tempControl = subControl;
- }
-
- DiffRgn(visibleRgn, childRgn, visibleRgn);
- CheckValidRegion(visibleRgn);
- CheckValidRegion(childRgn);
- DisposeRgn(childRgn);
- }
- else
- {
- CopyRgn(tempRgn, visibleRgn);
- CheckValidRegion(tempRgn);
- CheckValidRegion(visibleRgn);
- }
- DisposeRgn(tempRgn);
- }
-
- //WEBRTC_LOG(kTraceDebug, "Leaving CalculateVisibleRegion()");
- //_aglCritPtr->Leave();
- return 0;
-}
-
-bool VideoRenderAGL::CheckValidRegion(RgnHandle rHandle)
-{
-
- Handle hndSize = (Handle)rHandle;
- long size = GetHandleSize(hndSize);
- if(0 == size)
- {
-
- OSErr memErr = MemError();
- if(noErr != memErr)
- {
- // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle. MemError() returned %d", memErr);
- }
- else
- {
- // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle yet MemError() returned noErr");
- }
-
- }
- else
- {
- // //WEBRTC_LOG(kTraceDebug, "IBM handleSize = %d", size);
- }
-
- if(false == IsValidRgnHandle(rHandle))
- {
- // //WEBRTC_LOG(kTraceError, "IBM ERROR Invalid Region found : $%d", rHandle);
- assert(false);
- }
-
- int err = QDError();
- switch(err)
- {
- case 0:
- break;
- case -147:
- //WEBRTC_LOG(kTraceError, "ERROR region too big");
- assert(false);
- break;
-
- case -149:
- //WEBRTC_LOG(kTraceError, "ERROR not enough stack");
- assert(false);
- break;
-
- default:
- //WEBRTC_LOG(kTraceError, "ERROR Unknown QDError %d", err);
- assert(false);
- break;
- }
-
- return true;
-}
-
-int VideoRenderAGL::ChangeWindow(void* newWindowRef)
-{
-
- LockAGLCntx();
-
- UnlockAGLCntx();
- return -1;
-}
-
-int32_t VideoRenderAGL::StartRender()
-{
-
- LockAGLCntx();
- const unsigned int MONITOR_FREQ = 60;
- if(TRUE == _renderingIsPaused)
- {
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Rendering is paused. Restarting now", __FUNCTION__, __LINE__);
-
- // we already have the thread. Most likely StopRender() was called and they were paused
- if(FALSE == _screenUpdateThread->Start())
- {
- //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
- if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
- {
- //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
-
- return 0;
- }
-
- _screenUpdateThread.reset(
- new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
- _screenUpdateEvent = EventWrapper::Create();
-
- if (!_screenUpdateThread)
- {
- //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
-
- _screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
- _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ);
-
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__);
-
- UnlockAGLCntx();
- return 0;
-}
-
-int32_t VideoRenderAGL::StopRender()
-{
- LockAGLCntx();
-
- if(!_screenUpdateThread || !_screenUpdateEvent)
- {
- _renderingIsPaused = TRUE;
- UnlockAGLCntx();
- return 0;
- }
-
- if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
- {
- _renderingIsPaused = FALSE;
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Could not stop either: screenUpdateThread or screenUpdateEvent", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return -1;
- }
-
- _renderingIsPaused = TRUE;
-
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Stopped screenUpdateThread", __FUNCTION__, __LINE__);
- UnlockAGLCntx();
- return 0;
-}
-
-int32_t VideoRenderAGL::DeleteAGLChannel(const uint32_t streamID)
-{
-
- LockAGLCntx();
-
- std::map<int, VideoChannelAGL*>::iterator it;
- it = _aglChannels.begin();
-
- while (it != _aglChannels.end())
- {
- VideoChannelAGL* channel = it->second;
- //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Deleting channel %d", __FUNCTION__, __LINE__, streamID);
- delete channel;
- it++;
- }
- _aglChannels.clear();
-
- UnlockAGLCntx();
- return 0;
-}
-
-int32_t VideoRenderAGL::GetChannelProperties(const uint16_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom)
-{
-
- LockAGLCntx();
- UnlockAGLCntx();
- return -1;
-
-}
-
-void VideoRenderAGL::LockAGLCntx()
-{
- _renderCritSec.Enter();
-}
-void VideoRenderAGL::UnlockAGLCntx()
-{
- _renderCritSec.Leave();
-}
-
-} // namespace webrtc
-
-#endif // CARBON_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.h b/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.h
deleted file mode 100644
index c0a60597e98..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_agl.h
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-
-#if defined(CARBON_RENDERING)
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
-
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1
-#define NEW_HIVIEW_EVENT_HANDLER 1
-#define USE_STRUCT_RGN
-
-#include <AGL/agl.h>
-#include <Carbon/Carbon.h>
-#include <OpenGL/OpenGL.h>
-#include <OpenGL/glext.h>
-#include <OpenGL/glu.h>
-#include <list>
-#include <map>
-#include <memory>
-
-class VideoRenderAGL;
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class EventWrapper;
-
-class VideoChannelAGL : public VideoRenderCallback {
- public:
-
- VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
- virtual ~VideoChannelAGL();
- virtual int FrameSizeChange(int width, int height, int numberOfStreams);
- virtual int DeliverFrame(const VideoFrame& videoFrame);
- virtual int UpdateSize(int width, int height);
- int SetStreamSettings(int streamId, float startWidth, float startHeight,
- float stopWidth, float stopHeight);
- int SetStreamCropSettings(int streamId, float startWidth, float startHeight,
- float stopWidth, float stopHeight);
- int RenderOffScreenBuffer();
- int IsUpdated(bool& isUpdated);
- virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
- virtual int32_t RenderFrame(const uint32_t streamId, VideoFrame& videoFrame);
-
- private:
-
- AGLContext _aglContext;
- int _id;
- VideoRenderAGL* _owner;
- int _width;
- int _height;
- int _stretchedWidth;
- int _stretchedHeight;
- float _startHeight;
- float _startWidth;
- float _stopWidth;
- float _stopHeight;
- int _xOldWidth;
- int _yOldHeight;
- int _oldStretchedHeight;
- int _oldStretchedWidth;
- unsigned char* _buffer;
- size_t _bufferSize;
- size_t _incomingBufferSize;
- bool _bufferIsUpdated;
- bool _sizeInitialized;
- int _numberOfStreams;
- bool _bVideoSizeStartedChanging;
- GLenum _pixelFormat;
- GLenum _pixelDataType;
- unsigned int _texture;
-};
-
-class VideoRenderAGL {
- public:
- VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId);
- VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId);
- ~VideoRenderAGL();
-
- int Init();
- VideoChannelAGL* CreateAGLChannel(int channel, int zOrder, float startWidth,
- float startHeight, float stopWidth,
- float stopHeight);
- VideoChannelAGL* ConfigureAGLChannel(int channel, int zOrder,
- float startWidth, float startHeight,
- float stopWidth, float stopHeight);
- int DeleteAGLChannel(int channel);
- int DeleteAllAGLChannels();
- int StopThread();
- bool IsFullScreen();
- bool HasChannels();
- bool HasChannel(int channel);
- int GetChannels(std::list<int>& channelList);
- void LockAGLCntx();
- void UnlockAGLCntx();
-
- static int GetOpenGLVersion(int& aglMajor, int& aglMinor);
-
- // ********** new module functions ************ //
- int ChangeWindow(void* newWindowRef);
- int32_t StartRender();
- int32_t StopRender();
- int32_t DeleteAGLChannel(const uint32_t streamID);
- int32_t GetChannelProperties(const uint16_t streamId, uint32_t& zOrder,
- float& left, float& top, float& right,
- float& bottom);
-
- protected:
- static bool ScreenUpdateThreadProc(void* obj);
- bool ScreenUpdateProcess();
- int GetWindowRect(Rect& rect);
-
- private:
- int CreateMixingContext();
- int RenderOffScreenBuffers();
- int SwapAndDisplayBuffers();
- int UpdateClipping();
- int CalculateVisibleRegion(ControlRef control, RgnHandle& visibleRgn,
- bool clipChildren);
- bool CheckValidRegion(RgnHandle rHandle);
- void ParentWindowResized(WindowRef window);
-
- // Carbon GUI event handlers
- static pascal OSStatus sHandleWindowResized(
- EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
- static pascal OSStatus sHandleHiViewResized(
- EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
-
- HIViewRef _hiviewRef;
- WindowRef _windowRef;
- bool _fullScreen;
- int _id;
- webrtc::CriticalSectionWrapper& _renderCritSec;
- // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
- std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
- webrtc::EventWrapper* _screenUpdateEvent;
- bool _isHIViewRef;
- AGLContext _aglContext;
- int _windowWidth;
- int _windowHeight;
- int _lastWindowWidth;
- int _lastWindowHeight;
- int _lastHiViewWidth;
- int _lastHiViewHeight;
- int _currentParentWindowHeight;
- int _currentParentWindowWidth;
- Rect _currentParentWindowBounds;
- bool _windowHasResized;
- Rect _lastParentWindowBounds;
- Rect _currentHIViewBounds;
- Rect _lastHIViewBounds;
- Rect _windowRect;
- std::map<int, VideoChannelAGL*> _aglChannels;
- std::multimap<int, int> _zOrderToChannel;
- EventHandlerRef _hiviewEventHandlerRef;
- EventHandlerRef _windowEventHandlerRef;
- HIRect _currentViewBounds;
- HIRect _lastViewBounds;
- bool _renderingIsPaused;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
-
-#endif // CARBON_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc b/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc
deleted file mode 100644
index f85be5fb5e2..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc
+++ /dev/null
@@ -1,280 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(CARBON_RENDERING)
-
-#include <AGL/agl.h>
-#include "webrtc/modules/video_render/mac/video_render_agl.h"
-#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoRenderMacCarbonImpl::VideoRenderMacCarbonImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
-_id(id),
-_renderMacCarbonCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
-_fullScreen(fullscreen),
-_ptrWindow(window)
-{
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
-
-}
-
-VideoRenderMacCarbonImpl::~VideoRenderMacCarbonImpl()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
- delete &_renderMacCarbonCritsect;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::Init()
-{
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
-
- if (!_ptrWindow)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
- return -1;
- }
-
- // We don't know if the user passed us a WindowRef or a HIViewRef, so test.
- bool referenceIsValid = false;
-
- // Check if it's a valid WindowRef
- //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef before WindowRef cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
- WindowRef* windowRef = static_cast<WindowRef*>(_ptrWindow);
- //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef after cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
- if (IsValidWindowPtr(*windowRef))
- {
- _ptrCarbonRender = new VideoRenderAGL(*windowRef, _fullScreen, _id);
- referenceIsValid = true;
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with WindowRef:%x", __FUNCTION__, __LINE__, *windowRef);
- }
- else
- {
- HIViewRef* hiviewRef = static_cast<HIViewRef*>(_ptrWindow);
- if (HIViewIsValid(*hiviewRef))
- {
- _ptrCarbonRender = new VideoRenderAGL(*hiviewRef, _fullScreen, _id);
- referenceIsValid = true;
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with HIViewRef:%x", __FUNCTION__, __LINE__, hiviewRef);
- }
- }
-
- if(!referenceIsValid)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef/HIViewRef Returning -1", __FUNCTION__, __LINE__);
- return -1;
- }
-
- if(!_ptrCarbonRender)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to create an instance of VideoRenderAGL. Returning -1", __FUNCTION__, __LINE__);
- }
-
- int retVal = _ptrCarbonRender->Init();
- if (retVal == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to init CarbonRenderer", __FUNCTION__, __LINE__);
- return -1;
- }
-
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::ChangeWindow(void* window)
-{
- return -1;
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
-
- if (window == NULL)
- {
- return -1;
- }
- _ptrWindow = window;
-
-
- _ptrWindow = window;
-
- return 0;
-}
-
-VideoRenderCallback*
-VideoRenderMacCarbonImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
-
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- VideoChannelAGL* AGLChannel = NULL;
-
- if(!_ptrWindow)
- {
- }
-
- if(!AGLChannel)
- {
- AGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
- }
-
- return AGLChannel;
-
-}
-
-int32_t
-VideoRenderMacCarbonImpl::DeleteIncomingRenderStream(const uint32_t streamId)
-{
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- _ptrCarbonRender->DeleteAGLChannel(streamId);
-
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- return -1;
- return _ptrCarbonRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
-}
-
-int32_t
-VideoRenderMacCarbonImpl::StartRender()
-{
- return _ptrCarbonRender->StartRender();
-}
-
-int32_t
-VideoRenderMacCarbonImpl::StopRender()
-{
- return _ptrCarbonRender->StopRender();
-}
-
-VideoRenderType
-VideoRenderMacCarbonImpl::RenderType()
-{
- return kRenderCarbon;
-}
-
-RawVideoType
-VideoRenderMacCarbonImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool
-VideoRenderMacCarbonImpl::FullScreen()
-{
- return false;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const
-{
- totalGraphicsMemory = 0;
- availableGraphicsMemory = 0;
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- //NSScreen* mainScreen = [NSScreen mainScreen];
-
- //NSRect frame = [mainScreen frame];
-
- //screenWidth = frame.size.width;
- //screenHeight = frame.size.height;
- return 0;
-}
-
-uint32_t
-VideoRenderMacCarbonImpl::RenderFrameRate(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_renderMacCarbonCritsect);
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCarbonImpl::ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t
-VideoRenderMacCarbonImpl::SetTransparentBackground(const bool enable)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCarbonImpl::SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCarbonImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-
-} // namespace webrtc
-
-#endif // CARBON_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h b/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h
deleted file mode 100644
index 9ad3a6cdd18..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(CARBON_RENDERING)
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
-
-#include "webrtc/modules/video_render/i_video_render.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class VideoRenderAGL;
-
-// Class definitions
-class VideoRenderMacCarbonImpl : IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderMacCarbonImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- virtual ~VideoRenderMacCarbonImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
- virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t FullScreenRender(void* window, const bool enable)
- {
- // not supported in Carbon at this time
- return -1;
- }
-
-private:
- int32_t _id;
- CriticalSectionWrapper& _renderMacCarbonCritsect;
- bool _fullScreen;
- void* _ptrWindow;
- VideoRenderAGL* _ptrCarbonRender;
-
-};
-
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
-#endif // CARBON_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h b/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h
deleted file mode 100644
index 21add272bbd..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-
-#if defined(COCOA_RENDERING)
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
-
-#include "webrtc/modules/video_render/i_video_render.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class VideoRenderNSOpenGL;
-
-// Class definitions
-class VideoRenderMacCocoaImpl : IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderMacCocoaImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen);
-
- virtual ~VideoRenderMacCocoaImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
- virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t FullScreenRender(void* window, const bool enable);
-
-private:
- int32_t _id;
- CriticalSectionWrapper& _renderMacCocoaCritsect;
- bool _fullScreen;
- void* _ptrWindow;
- VideoRenderNSOpenGL* _ptrCocoaRender;
-
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
-#endif // COCOA_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm b/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm
deleted file mode 100644
index 5b017fecc0c..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(COCOA_RENDERING)
-
-#include "webrtc/modules/video_render/mac/cocoa_render_view.h"
-#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
-#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoRenderMacCocoaImpl::VideoRenderMacCocoaImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
-_id(id),
-_renderMacCocoaCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
-_fullScreen(fullscreen),
-_ptrWindow(window)
-{
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
-}
-
-VideoRenderMacCocoaImpl::~VideoRenderMacCocoaImpl()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
- delete &_renderMacCocoaCritsect;
- if (_ptrCocoaRender)
- {
- delete _ptrCocoaRender;
- _ptrCocoaRender = NULL;
- }
-}
-
-int32_t
-VideoRenderMacCocoaImpl::Init()
-{
-
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
-
- // cast ptrWindow from void* to CocoaRenderer. Void* was once NSOpenGLView, and CocoaRenderer is NSOpenGLView.
- _ptrCocoaRender = new VideoRenderNSOpenGL((CocoaRenderView*)_ptrWindow, _fullScreen, _id);
- if (!_ptrWindow)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
- return -1;
- }
- int retVal = _ptrCocoaRender->Init();
- if (retVal == -1)
- {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Failed to init %s:%d", __FUNCTION__, __LINE__);
- return -1;
- }
-
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::ChangeWindow(void* window)
-{
-
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
-
- if (window == NULL)
- {
- return -1;
- }
- _ptrWindow = window;
-
-
- _ptrWindow = window;
- _ptrCocoaRender->ChangeWindow((CocoaRenderView*)_ptrWindow);
-
- return 0;
-}
-
-VideoRenderCallback*
-VideoRenderMacCocoaImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
- VideoChannelNSOpenGL* nsOpenGLChannel = NULL;
-
- if(!_ptrWindow)
- {
- }
-
- if(!nsOpenGLChannel)
- {
- nsOpenGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
- }
-
- return nsOpenGLChannel;
-
-}
-
-int32_t
-VideoRenderMacCocoaImpl::DeleteIncomingRenderStream(const uint32_t streamId)
-{
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- _ptrCocoaRender->DeleteNSGLChannel(streamId);
-
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- return _ptrCocoaRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
-}
-
-int32_t
-VideoRenderMacCocoaImpl::StartRender()
-{
- return _ptrCocoaRender->StartRender();
-}
-
-int32_t
-VideoRenderMacCocoaImpl::StopRender()
-{
- return _ptrCocoaRender->StopRender();
-}
-
-VideoRenderType
-VideoRenderMacCocoaImpl::RenderType()
-{
- return kRenderCocoa;
-}
-
-RawVideoType
-VideoRenderMacCocoaImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool
-VideoRenderMacCocoaImpl::FullScreen()
-{
- return false;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const
-{
- totalGraphicsMemory = 0;
- availableGraphicsMemory = 0;
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- NSScreen* mainScreen = [NSScreen mainScreen];
-
- NSRect frame = [mainScreen frame];
-
- screenWidth = frame.size.width;
- screenHeight = frame.size.height;
- return 0;
-}
-
-uint32_t
-VideoRenderMacCocoaImpl::RenderFrameRate(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_renderMacCocoaCritsect);
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCocoaImpl::ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t
-VideoRenderMacCocoaImpl::SetTransparentBackground(const bool enable)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCocoaImpl::SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return _ptrCocoaRender->SetText(textId, text, textLength, textColorRef, backgroundColorRef, left, top, right, bottom);
-}
-
-int32_t VideoRenderMacCocoaImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- return 0;
-}
-
-int32_t VideoRenderMacCocoaImpl::FullScreenRender(void* window, const bool enable)
-{
- return -1;
-}
-
-} // namespace webrtc
-
-#endif // COCOA_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.h b/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.h
deleted file mode 100644
index 457557dad64..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.h
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(COCOA_RENDERING)
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
-
-#import <Cocoa/Cocoa.h>
-#import <OpenGL/OpenGL.h>
-#import <OpenGL/glext.h>
-#import <OpenGL/glu.h>
-#include <QuickTime/QuickTime.h>
-#include <list>
-#include <map>
-#include <memory>
-
-#include "webrtc/base/thread_annotations.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#import "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
-#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
-
-class Trace;
-
-namespace rtc {
-class PlatformThread;
-} // namespace rtc
-
-namespace webrtc {
-class EventTimerWrapper;
-class VideoRenderNSOpenGL;
-class CriticalSectionWrapper;
-
-class VideoChannelNSOpenGL : public VideoRenderCallback {
-public:
- VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner);
- virtual ~VideoChannelNSOpenGL();
-
- // A new frame is delivered
- virtual int DeliverFrame(const VideoFrame& videoFrame);
-
- // Called when the incoming frame size and/or number of streams in mix
- // changes.
- virtual int FrameSizeChange(int width, int height, int numberOfStreams);
-
- virtual int UpdateSize(int width, int height);
-
- // Setup
- int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
- int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
-
- // Called when it's time to render the last frame for the channel
- int RenderOffScreenBuffer();
-
- // Returns true if a new buffer has been delivered to the texture
- int IsUpdated(bool& isUpdated);
- virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
-
- // ********** new module functions ************ //
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- // ********** new module helper functions ***** //
- int ChangeContext(NSOpenGLContext *nsglContext);
- int32_t GetChannelProperties(float& left,
- float& top,
- float& right,
- float& bottom);
-
-private:
-
- NSOpenGLContext* _nsglContext;
- const int _id;
- VideoRenderNSOpenGL* _owner;
- int32_t _width;
- int32_t _height;
- float _startWidth;
- float _startHeight;
- float _stopWidth;
- float _stopHeight;
- int _stretchedWidth;
- int _stretchedHeight;
- int _oldStretchedHeight;
- int _oldStretchedWidth;
- unsigned char* _buffer;
- size_t _bufferSize;
- size_t _incomingBufferSize;
- bool _bufferIsUpdated;
- int _numberOfStreams;
- GLenum _pixelFormat;
- GLenum _pixelDataType;
- unsigned int _texture;
-};
-
-class VideoRenderNSOpenGL
-{
-
-public: // methods
- VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId);
- ~VideoRenderNSOpenGL();
-
- static int GetOpenGLVersion(int& nsglMajor, int& nsglMinor);
-
- // Allocates textures
- int Init();
- VideoChannelNSOpenGL* CreateNSGLChannel(int streamID, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
- VideoChannelNSOpenGL* ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
- int DeleteNSGLChannel(int channel);
- int DeleteAllNSGLChannels();
- int StopThread();
- bool IsFullScreen();
- bool HasChannels();
- bool HasChannel(int channel);
- int GetChannels(std::list<int>& channelList);
- void LockAGLCntx() EXCLUSIVE_LOCK_FUNCTION(_nsglContextCritSec);
- void UnlockAGLCntx() UNLOCK_FUNCTION(_nsglContextCritSec);
-
- // ********** new module functions ************ //
- int ChangeWindow(CocoaRenderView* newWindowRef);
- int32_t StartRender();
- int32_t StopRender();
- int32_t DeleteNSGLChannel(const uint32_t streamID);
- int32_t GetChannelProperties(const uint16_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom);
-
- int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- // ********** new module helper functions ***** //
- int configureNSOpenGLEngine();
- int configureNSOpenGLView();
- int setRenderTargetWindow();
- int setRenderTargetFullScreen();
-
-protected: // methods
- static bool ScreenUpdateThreadProc(void* obj);
- bool ScreenUpdateProcess();
- int GetWindowRect(Rect& rect);
-
-private: // methods
-
- int CreateMixingContext();
- int RenderOffScreenBuffers();
- int DisplayBuffers();
-
-private: // variables
-
-
- CocoaRenderView* _windowRef;
- bool _fullScreen;
- int _id;
- CriticalSectionWrapper& _nsglContextCritSec;
- // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
- std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
- EventTimerWrapper* _screenUpdateEvent;
- NSOpenGLContext* _nsglContext;
- NSOpenGLContext* _nsglFullScreenContext;
- CocoaFullScreenWindow* _fullScreenWindow;
- Rect _windowRect; // The size of the window
- int _windowWidth;
- int _windowHeight;
- std::map<int, VideoChannelNSOpenGL*> _nsglChannels;
- std::multimap<int, int> _zOrderToChannel;
- bool _renderingIsPaused;
- NSView* _windowRefSuperView;
- NSRect _windowRefSuperViewFrame;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
-#endif // COCOA_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.mm b/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.mm
deleted file mode 100644
index b7683a96af4..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/mac/video_render_nsopengl.mm
+++ /dev/null
@@ -1,1247 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#if defined(COCOA_RENDERING)
-
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoChannelNSOpenGL::VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner) :
-_nsglContext( nsglContext),
-_id( iId),
-_owner( owner),
-_width( 0),
-_height( 0),
-_startWidth( 0.0f),
-_startHeight( 0.0f),
-_stopWidth( 0.0f),
-_stopHeight( 0.0f),
-_stretchedWidth( 0),
-_stretchedHeight( 0),
-_oldStretchedHeight( 0),
-_oldStretchedWidth( 0),
-_buffer( 0),
-_bufferSize( 0),
-_incomingBufferSize( 0),
-_bufferIsUpdated( false),
-_numberOfStreams( 0),
-_pixelFormat( GL_RGBA),
-_pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
-_texture( 0)
-{
-
-}
-
-VideoChannelNSOpenGL::~VideoChannelNSOpenGL()
-{
- if (_buffer)
- {
- delete [] _buffer;
- _buffer = NULL;
- }
-
- if (_texture != 0)
- {
- [_nsglContext makeCurrentContext];
- glDeleteTextures(1, (const GLuint*) &_texture);
- _texture = 0;
- }
-}
-
-int VideoChannelNSOpenGL::ChangeContext(NSOpenGLContext *nsglContext)
-{
- _owner->LockAGLCntx();
-
- _nsglContext = nsglContext;
- [_nsglContext makeCurrentContext];
-
- _owner->UnlockAGLCntx();
- return 0;
-
-}
-
-int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top,
- float& right, float& bottom)
-{
-
- _owner->LockAGLCntx();
-
- left = _startWidth;
- top = _startHeight;
- right = _stopWidth;
- bottom = _stopHeight;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int32_t VideoChannelNSOpenGL::RenderFrame(const uint32_t /*streamId*/,
- const VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
-
- if(_width != videoFrame.width() ||
- _height != videoFrame.height()) {
- if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
- _owner->UnlockAGLCntx();
- return -1;
- }
- }
- int ret = DeliverFrame(videoFrame);
-
- _owner->UnlockAGLCntx();
- return ret;
-}
-
-int VideoChannelNSOpenGL::UpdateSize(int width, int height)
-{
- _owner->LockAGLCntx();
- _width = width;
- _height = height;
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
-{
-
- _owner->LockAGLCntx();
- _stretchedHeight = stretchHeight;
- _stretchedWidth = stretchWidth;
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStreams)
-{
- // We got a new frame size from VideoAPI, prepare the buffer
-
- _owner->LockAGLCntx();
-
- if (width == _width && _height == height)
- {
- // We already have a correct buffer size
- _numberOfStreams = numberOfStreams;
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- _width = width;
- _height = height;
-
- // Delete the old buffer, create a new one with correct size.
- if (_buffer)
- {
- delete [] _buffer;
- _bufferSize = 0;
- }
-
- _incomingBufferSize = CalcBufferSize(kI420, _width, _height);
- _bufferSize = CalcBufferSize(kARGB, _width, _height);
- _buffer = new unsigned char [_bufferSize];
- memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
-
- [_nsglContext makeCurrentContext];
-
- if(glIsTexture(_texture))
- {
- glDeleteTextures(1, (const GLuint*) &_texture);
- _texture = 0;
- }
-
- // Create a new texture
- glGenTextures(1, (GLuint *) &_texture);
-
- GLenum glErr = glGetError();
-
- if (glErr != GL_NO_ERROR)
- {
-
- }
-
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
-
- GLint texSize;
- glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
-
- if (texSize < _width || texSize < _height)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Set up th texture type and size
- glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
- 0, // level
- GL_RGBA, // internal format
- _width, // width
- _height, // height
- 0, // border 0/1 = off/on
- _pixelFormat, // format, GL_RGBA
- _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
- _buffer); // pixel data
-
- glErr = glGetError();
- if (glErr != GL_NO_ERROR)
- {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) {
- _owner->LockAGLCntx();
-
- if (_texture == 0) {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) !=
- _incomingBufferSize) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- // Using the VideoFrame for YV12: YV12 is YVU; I420 assumes
- // YUV.
- // TODO(mikhal) : Use appropriate functionality.
- // TODO(wu): See if we are using glTexSubImage2D correctly.
- int rgbRet = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
- if (rgbRet < 0) {
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- [_nsglContext makeCurrentContext];
-
- // Make sure this texture is the active one
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
- GLenum glErr = glGetError();
- if (glErr != GL_NO_ERROR) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "ERROR %d while calling glBindTexture", glErr);
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
- 0, // Level, not use
- 0, // start point x, (low left of pic)
- 0, // start point y,
- _width, // width
- _height, // height
- _pixelFormat, // pictue format for _buffer
- _pixelDataType, // data type of _buffer
- (const GLvoid*) _buffer); // the pixel data
-
- glErr = glGetError();
- if (glErr != GL_NO_ERROR) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "ERROR %d while calling glTexSubImage2d", glErr);
- _owner->UnlockAGLCntx();
- return -1;
- }
-
- _bufferIsUpdated = true;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::RenderOffScreenBuffer()
-{
-
- _owner->LockAGLCntx();
-
- if (_texture == 0)
- {
- _owner->UnlockAGLCntx();
- return 0;
- }
-
- // if(_fullscreen)
- // {
- // NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
- // _width = mainDisplayRect.size.width;
- // _height = mainDisplayRect.size.height;
- // glViewport(0, 0, mainDisplayRect.size.width, mainDisplayRect.size.height);
- // float newX = mainDisplayRect.size.width/_width;
- // float newY = mainDisplayRect.size.height/_height;
-
- // convert from 0.0 <= size <= 1.0 to
- // open gl world -1.0 < size < 1.0
- GLfloat xStart = 2.0f * _startWidth - 1.0f;
- GLfloat xStop = 2.0f * _stopWidth - 1.0f;
- GLfloat yStart = 1.0f - 2.0f * _stopHeight;
- GLfloat yStop = 1.0f - 2.0f * _startHeight;
-
- [_nsglContext makeCurrentContext];
-
- glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
- _oldStretchedHeight = _stretchedHeight;
- _oldStretchedWidth = _stretchedWidth;
-
- glLoadIdentity();
- glEnable(GL_TEXTURE_RECTANGLE_EXT);
- glBegin(GL_POLYGON);
- {
- glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
- glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
- glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
- glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
- }
- glEnd();
-
- glDisable(GL_TEXTURE_RECTANGLE_EXT);
-
- _bufferIsUpdated = false;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::IsUpdated(bool& isUpdated)
-{
- _owner->LockAGLCntx();
-
- isUpdated = _bufferIsUpdated;
-
- _owner->UnlockAGLCntx();
- return 0;
-}
-
-int VideoChannelNSOpenGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
- _owner->LockAGLCntx();
-
- _startWidth = startWidth;
- _stopWidth = stopWidth;
- _startHeight = startHeight;
- _stopHeight = stopHeight;
-
- int oldWidth = _width;
- int oldHeight = _height;
- int oldNumberOfStreams = _numberOfStreams;
-
- _width = 0;
- _height = 0;
-
- int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
-
- _owner->UnlockAGLCntx();
- return retVal;
-}
-
-int VideoChannelNSOpenGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
-{
- return -1;
-}
-
-/*
- *
- * VideoRenderNSOpenGL
- *
- */
-
-VideoRenderNSOpenGL::VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId) :
-_windowRef( (CocoaRenderView*)windowRef),
-_fullScreen( fullScreen),
-_id( iId),
-_nsglContextCritSec( *CriticalSectionWrapper::CreateCriticalSection()),
-_screenUpdateEvent(EventTimerWrapper::Create()),
-_nsglContext( 0),
-_nsglFullScreenContext( 0),
-_fullScreenWindow( nil),
-_windowRect( ),
-_windowWidth( 0),
-_windowHeight( 0),
-_nsglChannels( ),
-_zOrderToChannel( ),
-_renderingIsPaused (FALSE),
-_windowRefSuperView(NULL),
-_windowRefSuperViewFrame(NSMakeRect(0,0,0,0))
-{
- _screenUpdateThread.reset(new rtc::PlatformThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateNSOpenGL"));
-}
-
-int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
-{
-
- LockAGLCntx();
-
- _windowRef = newWindowRef;
-
- if(CreateMixingContext() == -1)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- int error = 0;
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
- while (it!= _nsglChannels.end())
- {
- error |= (it->second)->ChangeContext(_nsglContext);
- it++;
- }
- if(error != 0)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- UnlockAGLCntx();
- return 0;
-}
-
-/* Check if the thread and event already exist.
- * If so then they will simply be restarted
- * If not then create them and continue
- */
-int32_t VideoRenderNSOpenGL::StartRender()
-{
-
- LockAGLCntx();
-
- const unsigned int MONITOR_FREQ = 60;
- if(TRUE == _renderingIsPaused)
- {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUpdateThread");
-
- // we already have the thread. Most likely StopRender() was called and they were paused
- _screenUpdateThread->Start();
- if (FALSE ==
- _screenUpdateEvent->StartTimer(true, 1000 / MONITOR_FREQ)) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to restart screenUpdateThread or screenUpdateEvent");
- UnlockAGLCntx();
- return -1;
- }
-
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
-
- UnlockAGLCntx();
- return 0;
- }
-
-
- if (!_screenUpdateThread)
- {
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "failed start screenUpdateThread");
- UnlockAGLCntx();
- return -1;
- }
-
-
- UnlockAGLCntx();
- return 0;
-}
-int32_t VideoRenderNSOpenGL::StopRender()
-{
-
- LockAGLCntx();
-
- /* The code below is functional
- * but it pauses for several seconds
- */
-
- // pause the update thread and the event timer
- if(!_screenUpdateThread || !_screenUpdateEvent)
- {
- _renderingIsPaused = TRUE;
-
- UnlockAGLCntx();
- return 0;
- }
-
- _screenUpdateThread->Stop();
- if (FALSE == _screenUpdateEvent->StopTimer()) {
- _renderingIsPaused = FALSE;
-
- UnlockAGLCntx();
- return -1;
- }
-
- _renderingIsPaused = TRUE;
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderNSOpenGL::configureNSOpenGLView()
-{
- return 0;
-
-}
-
-int VideoRenderNSOpenGL::configureNSOpenGLEngine()
-{
-
- LockAGLCntx();
-
- // Disable not needed functionality to increase performance
- glDisable(GL_DITHER);
- glDisable(GL_ALPHA_TEST);
- glDisable(GL_STENCIL_TEST);
- glDisable(GL_FOG);
- glDisable(GL_TEXTURE_2D);
- glPixelZoom(1.0, 1.0);
- glDisable(GL_BLEND);
- glDisable(GL_DEPTH_TEST);
- glDepthMask(GL_FALSE);
- glDisable(GL_CULL_FACE);
-
- // Set texture parameters
- glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
- glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
-
- if (GetWindowRect(_windowRect) == -1)
- {
- UnlockAGLCntx();
- return true;
- }
-
- if (_windowWidth != (_windowRect.right - _windowRect.left)
- || _windowHeight != (_windowRect.bottom - _windowRect.top))
- {
- _windowWidth = _windowRect.right - _windowRect.left;
- _windowHeight = _windowRect.bottom - _windowRect.top;
- }
- glViewport(0, 0, _windowWidth, _windowHeight);
-
- // Synchronize buffer swaps with vertical refresh rate
- GLint swapInt = 1;
- [_nsglContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderNSOpenGL::setRenderTargetWindow()
-{
- LockAGLCntx();
-
-
- GLuint attribs[] =
- {
- NSOpenGLPFAColorSize, 24,
- NSOpenGLPFAAlphaSize, 8,
- NSOpenGLPFADepthSize, 16,
- NSOpenGLPFAAccelerated,
- 0
- };
-
- NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes:
- (NSOpenGLPixelFormatAttribute*) attribs] autorelease];
-
- if(_windowRef)
- {
- [_windowRef initCocoaRenderView:fmt];
- }
- else
- {
- UnlockAGLCntx();
- return -1;
- }
-
- _nsglContext = [_windowRef nsOpenGLContext];
- [_nsglContext makeCurrentContext];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
-
- DisplayBuffers();
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderNSOpenGL::setRenderTargetFullScreen()
-{
- LockAGLCntx();
-
-
- GLuint attribs[] =
- {
- NSOpenGLPFAColorSize, 24,
- NSOpenGLPFAAlphaSize, 8,
- NSOpenGLPFADepthSize, 16,
- NSOpenGLPFAAccelerated,
- 0
- };
-
- NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes:
- (NSOpenGLPixelFormatAttribute*) attribs] autorelease];
-
- // Store original superview and frame for use when exiting full screens
- _windowRefSuperViewFrame = [_windowRef frame];
- _windowRefSuperView = [_windowRef superview];
-
-
- // create new fullscreen window
- NSRect screenRect = [[NSScreen mainScreen]frame];
- [_windowRef setFrame:screenRect];
- [_windowRef setBounds:screenRect];
-
-
- _fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
- [_fullScreenWindow grabFullScreen];
- [[[_fullScreenWindow window] contentView] addSubview:_windowRef];
-
- if(_windowRef)
- {
- [_windowRef initCocoaRenderViewFullScreen:fmt];
- }
- else
- {
- UnlockAGLCntx();
- return -1;
- }
-
- _nsglContext = [_windowRef nsOpenGLContext];
- [_nsglContext makeCurrentContext];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- DisplayBuffers();
-
- UnlockAGLCntx();
- return 0;
-}
-
-VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
-{
-
- if(_fullScreen)
- {
- if(_fullScreenWindow)
- {
- // Detach CocoaRenderView from full screen view back to
- // it's original parent.
- [_windowRef removeFromSuperview];
- if(_windowRefSuperView)
- {
- [_windowRefSuperView addSubview:_windowRef];
- [_windowRef setFrame:_windowRefSuperViewFrame];
- }
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
- [_fullScreenWindow releaseFullScreen];
-
- }
- }
-
- // Signal event to exit thread, then delete it
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
-
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- _screenUpdateEvent->StopTimer();
-
- tmpPtr->Stop();
- delete tmpPtr;
- delete _screenUpdateEvent;
- _screenUpdateEvent = NULL;
- }
-
- if (_nsglContext != 0)
- {
- [_nsglContext makeCurrentContext];
- _nsglContext = nil;
- }
-
- // Delete all channels
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
- while (it!= _nsglChannels.end())
- {
- delete it->second;
- _nsglChannels.erase(it);
- it = _nsglChannels.begin();
- }
- _nsglChannels.clear();
-
- // Clean the zOrder map
- std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
- while(zIt != _zOrderToChannel.end())
- {
- _zOrderToChannel.erase(zIt);
- zIt = _zOrderToChannel.begin();
- }
- _zOrderToChannel.clear();
-
-}
-
-/* static */
-int VideoRenderNSOpenGL::GetOpenGLVersion(int& /*nsglMajor*/, int& /*nsglMinor*/)
-{
- return -1;
-}
-
-int VideoRenderNSOpenGL::Init()
-{
-
- LockAGLCntx();
- if (!_screenUpdateThread)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- _screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
-
- // Start the event triggering the render process
- unsigned int monitorFreq = 60;
- _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
-
- if (CreateMixingContext() == -1)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- UnlockAGLCntx();
- return 0;
-}
-
-VideoChannelNSOpenGL* VideoRenderNSOpenGL::CreateNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- if (HasChannel(channel))
- {
- return NULL;
- }
-
- if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
- {
-
- }
-
- VideoChannelNSOpenGL* newAGLChannel = new VideoChannelNSOpenGL(_nsglContext, _id, this);
- if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
- {
- if (newAGLChannel)
- {
- delete newAGLChannel;
- newAGLChannel = NULL;
- }
-
- return NULL;
- }
-
- _nsglChannels[channel] = newAGLChannel;
- _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
-
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s successfully created NSGL channel number %d", __FUNCTION__, channel);
-
- return newAGLChannel;
-}
-
-int VideoRenderNSOpenGL::DeleteAllNSGLChannels()
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it;
- it = _nsglChannels.begin();
-
- while (it != _nsglChannels.end())
- {
- VideoChannelNSOpenGL* channel = it->second;
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Deleting channel %d", __FUNCTION__, channel);
- delete channel;
- it++;
- }
- _nsglChannels.clear();
- return 0;
-}
-
-int32_t VideoRenderNSOpenGL::DeleteNSGLChannel(const uint32_t channel)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it;
- it = _nsglChannels.find(channel);
- if (it != _nsglChannels.end())
- {
- delete it->second;
- _nsglChannels.erase(it);
- }
- else
- {
- return -1;
- }
-
- std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
- while( zIt != _zOrderToChannel.end())
- {
- if (zIt->second == (int)channel)
- {
- _zOrderToChannel.erase(zIt);
- break;
- }
- zIt++;
- }
-
- return 0;
-}
-
-int32_t VideoRenderNSOpenGL::GetChannelProperties(const uint16_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- bool channelFound = false;
-
- // Loop through all channels until we find a match.
- // From that, get zorder.
- // From that, get T, L, R, B
- for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
- rIt != _zOrderToChannel.rend();
- rIt++)
- {
- if(streamId == rIt->second)
- {
- channelFound = true;
-
- zOrder = rIt->second;
-
- std::map<int, VideoChannelNSOpenGL*>::iterator rIt = _nsglChannels.find(streamId);
- VideoChannelNSOpenGL* tempChannel = rIt->second;
-
- if(-1 == tempChannel->GetChannelProperties(left, top, right, bottom) )
- {
- return -1;
- }
- break;
- }
- }
-
- if(false == channelFound)
- {
-
- return -1;
- }
-
- return 0;
-}
-
-int VideoRenderNSOpenGL::StopThread()
-{
-
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
- "%s Stopping thread ", __FUNCTION__, tmpPtr);
-
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- tmpPtr->Stop();
- delete tmpPtr;
- }
-
- delete _screenUpdateEvent;
- _screenUpdateEvent = NULL;
-
- return 0;
-}
-
-bool VideoRenderNSOpenGL::IsFullScreen()
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
- return _fullScreen;
-}
-
-bool VideoRenderNSOpenGL::HasChannels()
-{
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- if (_nsglChannels.begin() != _nsglChannels.end())
- {
- return true;
- }
- return false;
-}
-
-bool VideoRenderNSOpenGL::HasChannel(int channel)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
-
- if (it != _nsglChannels.end())
- {
- return true;
- }
- return false;
-}
-
-int VideoRenderNSOpenGL::GetChannels(std::list<int>& channelList)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
-
- while (it != _nsglChannels.end())
- {
- channelList.push_back(it->first);
- it++;
- }
-
- return 0;
-}
-
-VideoChannelNSOpenGL* VideoRenderNSOpenGL::ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
-
- if (it != _nsglChannels.end())
- {
- VideoChannelNSOpenGL* aglChannel = it->second;
- if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s failed to set stream settings: channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
- __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
- return NULL;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Configuring channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
- __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
-
- std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
- while(it != _zOrderToChannel.end())
- {
- if (it->second == channel)
- {
- if (it->first != zOrder)
- {
- _zOrderToChannel.erase(it);
- _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
- }
- break;
- }
- it++;
- }
- return aglChannel;
- }
-
- return NULL;
-}
-
-/*
- *
- * Rendering process
- *
- */
-
-bool VideoRenderNSOpenGL::ScreenUpdateThreadProc(void* obj)
-{
- return static_cast<VideoRenderNSOpenGL*>(obj)->ScreenUpdateProcess();
-}
-
-bool VideoRenderNSOpenGL::ScreenUpdateProcess()
-{
-
- _screenUpdateEvent->Wait(10);
- LockAGLCntx();
-
- if (!_screenUpdateThread)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s no screen update thread", __FUNCTION__);
- UnlockAGLCntx();
- return false;
- }
-
- [_nsglContext makeCurrentContext];
-
- if (GetWindowRect(_windowRect) == -1)
- {
- UnlockAGLCntx();
- return true;
- }
-
- if (_windowWidth != (_windowRect.right - _windowRect.left)
- || _windowHeight != (_windowRect.bottom - _windowRect.top))
- {
- _windowWidth = _windowRect.right - _windowRect.left;
- _windowHeight = _windowRect.bottom - _windowRect.top;
- glViewport(0, 0, _windowWidth, _windowHeight);
- }
-
- // Check if there are any updated buffers
- bool updated = false;
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
- while (it != _nsglChannels.end())
- {
-
- VideoChannelNSOpenGL* aglChannel = it->second;
- aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
- aglChannel->IsUpdated(updated);
- if (updated)
- {
- break;
- }
- it++;
- }
-
- if (updated)
- {
-
- // At least on buffers is updated, we need to repaint the texture
- if (RenderOffScreenBuffers() != -1)
- {
- UnlockAGLCntx();
- return true;
- }
- }
- // }
- UnlockAGLCntx();
- return true;
-}
-
-/*
- *
- * Functions for creating mixing buffers and screen settings
- *
- */
-
-int VideoRenderNSOpenGL::CreateMixingContext()
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- if(_fullScreen)
- {
- if(-1 == setRenderTargetFullScreen())
- {
- return -1;
- }
- }
- else
- {
-
- if(-1 == setRenderTargetWindow())
- {
- return -1;
- }
- }
-
- configureNSOpenGLEngine();
-
- DisplayBuffers();
-
- GLenum glErr = glGetError();
- if (glErr)
- {
- }
-
- return 0;
-}
-
-/*
- *
- * Rendering functions
- *
- */
-
-int VideoRenderNSOpenGL::RenderOffScreenBuffers()
-{
- LockAGLCntx();
-
- // Get the current window size, it might have changed since last render.
- if (GetWindowRect(_windowRect) == -1)
- {
- UnlockAGLCntx();
- return -1;
- }
-
- [_nsglContext makeCurrentContext];
- glClear(GL_COLOR_BUFFER_BIT);
-
- // Loop through all channels starting highest zOrder ending with lowest.
- for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
- rIt != _zOrderToChannel.rend();
- rIt++)
- {
- int channelId = rIt->second;
- std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channelId);
-
- VideoChannelNSOpenGL* aglChannel = it->second;
-
- aglChannel->RenderOffScreenBuffer();
- }
-
- DisplayBuffers();
-
- UnlockAGLCntx();
- return 0;
-}
-
-/*
- *
- * Help functions
- *
- * All help functions assumes external protections
- *
- */
-
-int VideoRenderNSOpenGL::DisplayBuffers()
-{
-
- LockAGLCntx();
-
- glFinish();
- [_nsglContext flushBuffer];
-
- WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s glFinish and [_nsglContext flushBuffer]", __FUNCTION__);
-
- UnlockAGLCntx();
- return 0;
-}
-
-int VideoRenderNSOpenGL::GetWindowRect(Rect& rect)
-{
-
- CriticalSectionScoped cs(&_nsglContextCritSec);
-
- if (_windowRef)
- {
- if(_fullScreen)
- {
- NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
- rect.bottom = 0;
- rect.left = 0;
- rect.right = mainDisplayRect.size.width;
- rect.top = mainDisplayRect.size.height;
- }
- else
- {
- rect.top = [_windowRef frame].origin.y;
- rect.left = [_windowRef frame].origin.x;
- rect.bottom = [_windowRef frame].origin.y + [_windowRef frame].size.height;
- rect.right = [_windowRef frame].origin.x + [_windowRef frame].size.width;
- }
-
- return 0;
- }
- else
- {
- return -1;
- }
-}
-
-int32_t VideoRenderNSOpenGL::SetText(const uint8_t /*textId*/,
- const uint8_t* /*text*/,
- const int32_t /*textLength*/,
- const uint32_t /*textColorRef*/,
- const uint32_t /*backgroundColorRef*/,
- const float /*left*/,
- const float /*top*/,
- const float /*right*/,
- const float /*bottom*/)
-{
-
- return 0;
-
-}
-
-void VideoRenderNSOpenGL::LockAGLCntx()
-{
- _nsglContextCritSec.Enter();
-}
-void VideoRenderNSOpenGL::UnlockAGLCntx()
-{
- _nsglContextCritSec.Leave();
-}
-
-/*
-
- bool VideoRenderNSOpenGL::SetFullScreen(bool fullscreen)
- {
- NSRect mainDisplayRect, viewRect;
-
- // Create a screen-sized window on the display you want to take over
- // Note, mainDisplayRect has a non-zero origin if the key window is on a secondary display
- mainDisplayRect = [[NSScreen mainScreen] frame];
- fullScreenWindow = [[NSWindow alloc] initWithContentRect:mainDisplayRect styleMask:NSBorderlessWindowMask
- backing:NSBackingStoreBuffered defer:YES];
-
- // Set the window level to be above the menu bar
- [fullScreenWindow setLevel:NSMainMenuWindowLevel+1];
-
- // Perform any other window configuration you desire
- [fullScreenWindow setOpaque:YES];
- [fullScreenWindow setHidesOnDeactivate:YES];
-
- // Create a view with a double-buffered OpenGL context and attach it to the window
- // By specifying the non-fullscreen context as the shareContext, we automatically inherit the OpenGL objects (textures, etc) it has defined
- viewRect = NSMakeRect(0.0, 0.0, mainDisplayRect.size.width, mainDisplayRect.size.height);
- fullScreenView = [[MyOpenGLView alloc] initWithFrame:viewRect shareContext:[openGLView openGLContext]];
- [fullScreenWindow setContentView:fullScreenView];
-
- // Show the window
- [fullScreenWindow makeKeyAndOrderFront:self];
-
- // Set the scene with the full-screen viewport and viewing transformation
- [scene setViewportRect:viewRect];
-
- // Assign the view's MainController to self
- [fullScreenView setMainController:self];
-
- if (!isAnimating) {
- // Mark the view as needing drawing to initalize its contents
- [fullScreenView setNeedsDisplay:YES];
- }
- else {
- // Start playing the animation
- [fullScreenView startAnimation];
- }
-
- }
-
-
-
- */
-
-
-} // namespace webrtc
-
-#endif // COCOA_RENDERING
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp b/chromium/third_party/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp
deleted file mode 100644
index c443a58f6cb..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp
+++ /dev/null
Binary files differ
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.cc b/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.cc
deleted file mode 100644
index cea2f6b56fe..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.cc
+++ /dev/null
@@ -1,645 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_render/test/testAPI/testAPI.h"
-
-#include <stdio.h>
-
-#if defined(_WIN32)
-#include <tchar.h>
-#include <windows.h>
-#include <assert.h>
-#include <fstream>
-#include <iostream>
-#include <string>
-#include <windows.h>
-#include <ddraw.h>
-
-#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
-
-#include <X11/Xlib.h>
-#include <X11/Xutil.h>
-#include <iostream>
-#include <sys/time.h>
-
-#endif
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/utility/include/process_thread.h"
-#include "webrtc/modules/video_render/video_render.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/system_wrappers/include/sleep.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor);
-int TestSingleStream(VideoRender* renderModule);
-int TestFullscreenStream(VideoRender* &renderModule,
- void* window,
- const VideoRenderType videoRenderType);
-int TestBitmapText(VideoRender* renderModule);
-int TestMultipleStreams(VideoRender* renderModule);
-int TestExternalRender(VideoRender* renderModule);
-
-#define TEST_FRAME_RATE 30
-#define TEST_TIME_SECOND 5
-#define TEST_FRAME_NUM (TEST_FRAME_RATE*TEST_TIME_SECOND)
-#define TEST_STREAM0_START_COLOR 0
-#define TEST_STREAM1_START_COLOR 64
-#define TEST_STREAM2_START_COLOR 128
-#define TEST_STREAM3_START_COLOR 192
-
-#if defined(WEBRTC_LINUX)
-
-#define GET_TIME_IN_MS timeGetTime()
-
-unsigned long timeGetTime()
-{
- struct timeval tv;
- struct timezone tz;
- unsigned long val;
-
- gettimeofday(&tv, &tz);
- val= tv.tv_sec*1000+ tv.tv_usec/1000;
- return(val);
-}
-
-#elif defined(WEBRTC_MAC)
-
-#include <unistd.h>
-
-#define GET_TIME_IN_MS timeGetTime()
-
-unsigned long timeGetTime()
-{
- return 0;
-}
-
-#else
-
-#define GET_TIME_IN_MS ::timeGetTime()
-
-#endif
-
-using namespace std;
-
-#if defined(_WIN32)
-LRESULT CALLBACK WebRtcWinProc( HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)
-{
- switch(uMsg)
- {
- case WM_DESTROY:
- break;
- case WM_COMMAND:
- break;
- }
- return DefWindowProc(hWnd,uMsg,wParam,lParam);
-}
-
-int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height)
-{
- HINSTANCE hinst = GetModuleHandle(0);
- WNDCLASSEX wcx;
- wcx.hInstance = hinst;
- wcx.lpszClassName = TEXT("VideoRenderTest");
- wcx.lpfnWndProc = (WNDPROC)WebRtcWinProc;
- wcx.style = CS_DBLCLKS;
- wcx.hIcon = LoadIcon (NULL, IDI_APPLICATION);
- wcx.hIconSm = LoadIcon (NULL, IDI_APPLICATION);
- wcx.hCursor = LoadCursor (NULL, IDC_ARROW);
- wcx.lpszMenuName = NULL;
- wcx.cbSize = sizeof (WNDCLASSEX);
- wcx.cbClsExtra = 0;
- wcx.cbWndExtra = 0;
- wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
-
- // Register our window class with the operating system.
- // If there is an error, exit program.
- if ( !RegisterClassEx (&wcx) )
- {
- MessageBox( 0, TEXT("Failed to register window class!"),TEXT("Error!"), MB_OK|MB_ICONERROR );
- return 0;
- }
-
- // Create the main window.
- hwndMain = CreateWindowEx(
- 0, // no extended styles
- TEXT("VideoRenderTest"), // class name
- TEXT("VideoRenderTest Window"), // window name
- WS_OVERLAPPED |WS_THICKFRAME, // overlapped window
- 800, // horizontal position
- 0, // vertical position
- width, // width
- height, // height
- (HWND) NULL, // no parent or owner window
- (HMENU) NULL, // class menu used
- hinst, // instance handle
- NULL); // no window creation data
-
- if (!hwndMain)
- return -1;
-
- // Show the window using the flag specified by the program
- // that started the application, and send the application
- // a WM_PAINT message.
-
- ShowWindow(hwndMain, SW_SHOWDEFAULT);
- UpdateWindow(hwndMain);
- return 0;
-}
-
-#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
-
-int WebRtcCreateWindow(Window *outWindow, Display **outDisplay, int winNum, int width, int height) // unsigned char* title, int titleLength)
-
-{
- int screen, xpos = 10, ypos = 10;
- XEvent evnt;
- XSetWindowAttributes xswa; // window attribute struct
- XVisualInfo vinfo; // screen visual info struct
- unsigned long mask; // attribute mask
-
- // get connection handle to xserver
- Display* _display = XOpenDisplay( NULL );
-
- // get screen number
- screen = DefaultScreen(_display);
-
- // put desired visual info for the screen in vinfo
- if( XMatchVisualInfo(_display, screen, 24, TrueColor, &vinfo) != 0 )
- {
- //printf( "Screen visual info match!\n" );
- }
-
- // set window attributes
- xswa.colormap = XCreateColormap(_display, DefaultRootWindow(_display), vinfo.visual, AllocNone);
- xswa.event_mask = StructureNotifyMask | ExposureMask;
- xswa.background_pixel = 0;
- xswa.border_pixel = 0;
-
- // value mask for attributes
- mask = CWBackPixel | CWBorderPixel | CWColormap | CWEventMask;
-
- switch( winNum )
- {
- case 0:
- xpos = 200;
- ypos = 200;
- break;
- case 1:
- xpos = 300;
- ypos = 200;
- break;
- default:
- break;
- }
-
- // create a subwindow for parent (defroot)
- Window _window = XCreateWindow(_display, DefaultRootWindow(_display),
- xpos, ypos,
- width,
- height,
- 0, vinfo.depth,
- InputOutput,
- vinfo.visual,
- mask, &xswa);
-
- // Set window name
- if( winNum == 0 )
- {
- XStoreName(_display, _window, "VE MM Local Window");
- XSetIconName(_display, _window, "VE MM Local Window");
- }
- else if( winNum == 1 )
- {
- XStoreName(_display, _window, "VE MM Remote Window");
- XSetIconName(_display, _window, "VE MM Remote Window");
- }
-
- // make x report events for mask
- XSelectInput(_display, _window, StructureNotifyMask);
-
- // map the window to the display
- XMapWindow(_display, _window);
-
- // wait for map event
- do
- {
- XNextEvent(_display, &evnt);
- }
- while (evnt.type != MapNotify || evnt.xmap.event != _window);
-
- *outWindow = _window;
- *outDisplay = _display;
-
- return 0;
-}
-#endif // WEBRTC_LINUX
-
-// Note: Mac code is in testApi_mac.mm.
-
-class MyRenderCallback: public VideoRenderCallback
-{
-public:
- MyRenderCallback() :
- _cnt(0)
- {
- }
- ;
- ~MyRenderCallback()
- {
- }
- ;
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- _cnt++;
- if (_cnt % 100 == 0)
- {
- printf("Render callback %d \n",_cnt);
- }
- return 0;
- }
- int32_t _cnt;
-};
-
-void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor) {
- // changing color
- static uint8_t color = startColor;
-
- memset(frame->buffer(kYPlane), color, frame->allocated_size(kYPlane));
- memset(frame->buffer(kUPlane), color, frame->allocated_size(kUPlane));
- memset(frame->buffer(kVPlane), color, frame->allocated_size(kVPlane));
-
- ++color;
-}
-
-int TestSingleStream(VideoRender* renderModule) {
- int error = 0;
- // Add settings for a stream to render
- printf("Add stream 0 to entire window\n");
- const int streamId0 = 0;
- VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
- assert(renderCallback0 != NULL);
-
- printf("Start render\n");
- error = renderModule->StartRender(streamId0);
- if (error != 0) {
- // TODO(phoglund): This test will not work if compiled in release mode.
- // This rather silly construct here is to avoid compilation errors when
- // compiling in release. Release => no asserts => unused 'error' variable.
- assert(false);
- }
-
- // Loop through an I420 file and render each frame
- const int width = 352;
- const int half_width = (width + 1) / 2;
- const int height = 288;
-
- VideoFrame videoFrame0;
- videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
-
- const uint32_t renderDelayMs = 500;
-
- for (int i=0; i<TEST_FRAME_NUM; i++) {
- GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
- // Render this frame with the specified delay
- videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp()
- + renderDelayMs);
- renderCallback0->RenderFrame(streamId0, videoFrame0);
- SleepMs(1000/TEST_FRAME_RATE);
- }
-
-
- // Shut down
- printf("Closing...\n");
- error = renderModule->StopRender(streamId0);
- assert(error == 0);
-
- error = renderModule->DeleteIncomingRenderStream(streamId0);
- assert(error == 0);
-
- return 0;
-}
-
-int TestFullscreenStream(VideoRender* &renderModule,
- void* window,
- const VideoRenderType videoRenderType) {
- VideoRender::DestroyVideoRender(renderModule);
- renderModule = VideoRender::CreateVideoRender(12345, window, true, videoRenderType);
-
- TestSingleStream(renderModule);
-
- VideoRender::DestroyVideoRender(renderModule);
- renderModule = VideoRender::CreateVideoRender(12345, window, false, videoRenderType);
-
- return 0;
-}
-
-int TestBitmapText(VideoRender* renderModule) {
-#if defined(WIN32)
-
- int error = 0;
- // Add settings for a stream to render
- printf("Add stream 0 to entire window\n");
- const int streamId0 = 0;
- VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
- assert(renderCallback0 != NULL);
-
- printf("Adding Bitmap\n");
- DDCOLORKEY ColorKey; // black
- ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0);
- ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0);
- HBITMAP hbm = (HBITMAP)LoadImage(NULL,
- (LPCTSTR)_T("renderStartImage.bmp"),
- IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE);
- renderModule->SetBitmap(hbm, 0, &ColorKey, 0.0f, 0.0f, 0.3f,
- 0.3f);
-
- printf("Adding Text\n");
- renderModule->SetText(1, (uint8_t*) "WebRtc Render Demo App", 20,
- RGB(255, 0, 0), RGB(0, 0, 0), 0.25f, 0.1f, 1.0f,
- 1.0f);
-
- printf("Start render\n");
- error = renderModule->StartRender(streamId0);
- assert(error == 0);
-
- // Loop through an I420 file and render each frame
- const int width = 352;
- const int half_width = (width + 1) / 2;
- const int height = 288;
-
- VideoFrame videoFrame0;
- videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
-
- const uint32_t renderDelayMs = 500;
-
- for (int i=0; i<TEST_FRAME_NUM; i++) {
- GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
- // Render this frame with the specified delay
- videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback0->RenderFrame(streamId0, videoFrame0);
- SleepMs(1000/TEST_FRAME_RATE);
- }
- // Sleep and let all frames be rendered before closing
- SleepMs(renderDelayMs*2);
-
-
- // Shut down
- printf("Closing...\n");
- ColorKey.dwColorSpaceHighValue = RGB(0,0,0);
- ColorKey.dwColorSpaceLowValue = RGB(0,0,0);
- renderModule->SetBitmap(NULL, 0, &ColorKey, 0.0f, 0.0f, 0.0f, 0.0f);
- renderModule->SetText(1, NULL, 20, RGB(255,255,255),
- RGB(0,0,0), 0.0f, 0.0f, 0.0f, 0.0f);
-
- error = renderModule->StopRender(streamId0);
- assert(error == 0);
-
- error = renderModule->DeleteIncomingRenderStream(streamId0);
- assert(error == 0);
-#endif
-
- return 0;
-}
-
-int TestMultipleStreams(VideoRender* renderModule) {
- int error = 0;
-
- // Add settings for a stream to render
- printf("Add stream 0\n");
- const int streamId0 = 0;
- VideoRenderCallback* renderCallback0 =
- renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 0.45f, 0.45f);
- assert(renderCallback0 != NULL);
- printf("Add stream 1\n");
- const int streamId1 = 1;
- VideoRenderCallback* renderCallback1 =
- renderModule->AddIncomingRenderStream(streamId1, 0, 0.55f, 0.0f, 1.0f, 0.45f);
- assert(renderCallback1 != NULL);
- printf("Add stream 2\n");
- const int streamId2 = 2;
- VideoRenderCallback* renderCallback2 =
- renderModule->AddIncomingRenderStream(streamId2, 0, 0.0f, 0.55f, 0.45f, 1.0f);
- assert(renderCallback2 != NULL);
- printf("Add stream 3\n");
- const int streamId3 = 3;
- VideoRenderCallback* renderCallback3 =
- renderModule->AddIncomingRenderStream(streamId3, 0, 0.55f, 0.55f, 1.0f, 1.0f);
- assert(renderCallback3 != NULL);
- error = renderModule->StartRender(streamId0);
- if (error != 0) {
- // TODO(phoglund): This test will not work if compiled in release mode.
- // This rather silly construct here is to avoid compilation errors when
- // compiling in release. Release => no asserts => unused 'error' variable.
- assert(false);
- }
- error = renderModule->StartRender(streamId1);
- assert(error == 0);
- error = renderModule->StartRender(streamId2);
- assert(error == 0);
- error = renderModule->StartRender(streamId3);
- assert(error == 0);
-
- // Loop through an I420 file and render each frame
- const int width = 352;
- const int half_width = (width + 1) / 2;
- const int height = 288;
-
- VideoFrame videoFrame0;
- videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
- VideoFrame videoFrame1;
- videoFrame1.CreateEmptyFrame(width, height, width, half_width, half_width);
- VideoFrame videoFrame2;
- videoFrame2.CreateEmptyFrame(width, height, width, half_width, half_width);
- VideoFrame videoFrame3;
- videoFrame3.CreateEmptyFrame(width, height, width, half_width, half_width);
-
- const uint32_t renderDelayMs = 500;
-
- // Render frames with the specified delay.
- for (int i=0; i<TEST_FRAME_NUM; i++) {
- GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
-
- videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback0->RenderFrame(streamId0, videoFrame0);
-
- GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR);
- videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback1->RenderFrame(streamId1, videoFrame1);
-
- GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR);
- videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback2->RenderFrame(streamId2, videoFrame2);
-
- GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR);
- videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback3->RenderFrame(streamId3, videoFrame3);
-
- SleepMs(1000/TEST_FRAME_RATE);
- }
-
- // Shut down
- printf("Closing...\n");
- error = renderModule->StopRender(streamId0);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId0);
- assert(error == 0);
- error = renderModule->StopRender(streamId1);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId1);
- assert(error == 0);
- error = renderModule->StopRender(streamId2);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId2);
- assert(error == 0);
- error = renderModule->StopRender(streamId3);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId3);
- assert(error == 0);
-
- return 0;
-}
-
-int TestExternalRender(VideoRender* renderModule) {
- int error = 0;
- MyRenderCallback *externalRender = new MyRenderCallback();
-
- const int streamId0 = 0;
- VideoRenderCallback* renderCallback0 =
- renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f,
- 1.0f, 1.0f);
- assert(renderCallback0 != NULL);
- error = renderModule->AddExternalRenderCallback(streamId0, externalRender);
- if (error != 0) {
- // TODO(phoglund): This test will not work if compiled in release mode.
- // This rather silly construct here is to avoid compilation errors when
- // compiling in release. Release => no asserts => unused 'error' variable.
- assert(false);
- }
-
- error = renderModule->StartRender(streamId0);
- assert(error == 0);
-
- const int width = 352;
- const int half_width = (width + 1) / 2;
- const int height = 288;
- VideoFrame videoFrame0;
- videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
-
- const uint32_t renderDelayMs = 500;
- int frameCount = TEST_FRAME_NUM;
- for (int i=0; i<frameCount; i++) {
- videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
- renderDelayMs);
- renderCallback0->RenderFrame(streamId0, videoFrame0);
- SleepMs(33);
- }
-
- // Sleep and let all frames be rendered before closing
- SleepMs(2*renderDelayMs);
-
- // Shut down
- printf("Closing...\n");
- error = renderModule->StopRender(streamId0);
- assert(error == 0);
- error = renderModule->DeleteIncomingRenderStream(streamId0);
- assert(error == 0);
- assert(frameCount == externalRender->_cnt);
-
- delete externalRender;
- externalRender = NULL;
-
- return 0;
-}
-
-void RunVideoRenderTests(void* window, VideoRenderType windowType) {
- int myId = 12345;
-
- // Create the render module
- printf("Create render module\n");
- VideoRender* renderModule = NULL;
- renderModule = VideoRender::CreateVideoRender(myId,
- window,
- false,
- windowType);
- assert(renderModule != NULL);
-
- // ##### Test single stream rendering ####
- printf("#### TestSingleStream ####\n");
- if (TestSingleStream(renderModule) != 0) {
- printf ("TestSingleStream failed\n");
- }
-
- // ##### Test fullscreen rendering ####
- printf("#### TestFullscreenStream ####\n");
- if (TestFullscreenStream(renderModule, window, windowType) != 0) {
- printf ("TestFullscreenStream failed\n");
- }
-
- // ##### Test bitmap and text ####
- printf("#### TestBitmapText ####\n");
- if (TestBitmapText(renderModule) != 0) {
- printf ("TestBitmapText failed\n");
- }
-
- // ##### Test multiple streams ####
- printf("#### TestMultipleStreams ####\n");
- if (TestMultipleStreams(renderModule) != 0) {
- printf ("TestMultipleStreams failed\n");
- }
-
- // ##### Test multiple streams ####
- printf("#### TestExternalRender ####\n");
- if (TestExternalRender(renderModule) != 0) {
- printf ("TestExternalRender failed\n");
- }
-
- delete renderModule;
- renderModule = NULL;
-
- printf("VideoRender unit tests passed.\n");
-}
-
-// Note: The Mac main is implemented in testApi_mac.mm.
-#if defined(_WIN32)
-int _tmain(int argc, _TCHAR* argv[])
-#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
-int main(int argc, char* argv[])
-#endif
-#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
-{
- // Create a window for testing.
- void* window = NULL;
-#if defined (_WIN32)
- HWND testHwnd;
- WebRtcCreateWindow(testHwnd, 0, 352, 288);
- window = (void*)testHwnd;
- VideoRenderType windowType = kRenderWindows;
-#elif defined(WEBRTC_LINUX)
- Window testWindow;
- Display* display;
- WebRtcCreateWindow(&testWindow, &display, 0, 352, 288);
- VideoRenderType windowType = kRenderX11;
- window = (void*)testWindow;
-#endif // WEBRTC_LINUX
-
- RunVideoRenderTests(window, windowType);
- return 0;
-}
-#endif // !WEBRTC_MAC
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.h b/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.h
deleted file mode 100644
index 0655a5b4343..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI.h
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
-
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-void RunVideoRenderTests(void* window, webrtc::VideoRenderType windowType);
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
diff --git a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm b/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
deleted file mode 100644
index dfee4c72983..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testAPI.h"
-
-#include <iostream>
-
-#import <Foundation/Foundation.h>
-#import <Cocoa/Cocoa.h>
-#import <AppKit/AppKit.h>
-#import <QTKit/QTKit.h>
-#include <sys/time.h>
-
-#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/include/module_common_types.h"
-#include "webrtc/modules/utility/include/process_thread.h"
-#include "webrtc/modules/video_render/video_render.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-using namespace webrtc;
-
-int WebRtcCreateWindow(CocoaRenderView*& cocoaRenderer, int winNum, int width, int height)
-{
- // In Cocoa, rendering is not done directly to a window like in Windows and Linux.
- // It is rendererd to a Subclass of NSOpenGLView
-
- // create cocoa container window
- NSRect outWindowFrame = NSMakeRect(200, 800, width + 20, height + 20);
- NSWindow* outWindow = [[NSWindow alloc] initWithContentRect:outWindowFrame
- styleMask:NSTitledWindowMask
- backing:NSBackingStoreBuffered
- defer:NO];
- [outWindow orderOut:nil];
- [outWindow setTitle:@"Cocoa Renderer"];
- [outWindow setBackgroundColor:[NSColor blueColor]];
-
- // create renderer and attach to window
- NSRect cocoaRendererFrame = NSMakeRect(10, 10, width, height);
- cocoaRenderer = [[CocoaRenderView alloc] initWithFrame:cocoaRendererFrame];
- [[outWindow contentView] addSubview:(NSView*)cocoaRenderer];
-
- [outWindow makeKeyAndOrderFront:NSApp];
-
- return 0;
-}
-
-int main (int argc, const char * argv[]) {
- NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
- [NSApplication sharedApplication];
-
- CocoaRenderView* testWindow;
- WebRtcCreateWindow(testWindow, 0, 352, 288);
- VideoRenderType windowType = kRenderCocoa;
- void* window = (void*)testWindow;
-
- RunVideoRenderTests(window, windowType);
-
- [pool release];
-}
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render.gypi b/chromium/third_party/webrtc/modules/video_render/video_render.gypi
deleted file mode 100644
index e8cc03a4b02..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render.gypi
+++ /dev/null
@@ -1,218 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'targets': [
- {
- # Note this library is missing an implementation for the video render.
- # For that targets must link with 'video_render' or
- # 'video_render_module_internal_impl' if they want to compile and use
- # the internal render as the default renderer.
- 'target_name': 'video_render_module',
- 'type': 'static_library',
- 'dependencies': [
- 'webrtc_utility',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/common_video/common_video.gyp:common_video',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- ],
- 'sources': [
- 'external/video_render_external_impl.cc',
- 'external/video_render_external_impl.h',
- 'i_video_render.h',
- 'video_render.h',
- 'video_render_defines.h',
- 'video_render_impl.h',
- ],
- },
- {
- # Default video_render_module implementation that only supports external
- # renders.
- 'target_name': 'video_render',
- 'type': 'static_library',
- 'dependencies': [
- 'video_render_module',
- ],
- 'sources': [
- 'video_render_impl.cc',
- ],
- },
- ], # targets
-
- 'conditions': [
- ['build_with_chromium==0', {
- 'targets': [
- {
- # video_render_module implementation that supports the internal
- # video_render implementation.
- 'target_name': 'video_render_module_internal_impl',
- 'type': 'static_library',
- 'dependencies': [
- '<(webrtc_root)/common.gyp:webrtc_common',
- 'video_render_module',
- ],
- 'sources': [
- 'video_render_internal_impl.cc',
- ],
- # TODO(andrew): with the proper suffix, these files will be excluded
- # automatically.
- 'conditions': [
- ['OS=="android"', {
- 'sources': [
- 'android/video_render_android_impl.h',
- 'android/video_render_android_native_opengl2.h',
- 'android/video_render_android_surface_view.h',
- 'android/video_render_opengles20.h',
- 'android/video_render_android_impl.cc',
- 'android/video_render_android_native_opengl2.cc',
- 'android/video_render_android_surface_view.cc',
- 'android/video_render_opengles20.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-lGLESv2',
- ],
- },
- }],
- ['OS=="ios"', {
- 'sources': [
- # iOS
- 'ios/open_gles20.h',
- 'ios/open_gles20.mm',
- 'ios/video_render_ios_channel.h',
- 'ios/video_render_ios_channel.mm',
- 'ios/video_render_ios_gles20.h',
- 'ios/video_render_ios_gles20.mm',
- 'ios/video_render_ios_impl.h',
- 'ios/video_render_ios_impl.mm',
- 'ios/video_render_ios_view.h',
- 'ios/video_render_ios_view.mm',
- ],
- 'xcode_settings': {
- 'CLANG_ENABLE_OBJC_ARC': 'YES',
- },
- 'all_dependent_settings': {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework OpenGLES',
- '-framework QuartzCore',
- '-framework UIKit',
- ],
- },
- },
- }],
- ['OS=="linux"', {
- 'sources': [
- 'linux/video_render_linux_impl.h',
- 'linux/video_x11_channel.h',
- 'linux/video_x11_render.h',
- 'linux/video_render_linux_impl.cc',
- 'linux/video_x11_channel.cc',
- 'linux/video_x11_render.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-lXext',
- ],
- },
- }],
- ['OS=="mac"', {
- 'sources': [
- 'mac/cocoa_full_screen_window.h',
- 'mac/cocoa_render_view.h',
- 'mac/video_render_agl.h',
- 'mac/video_render_mac_carbon_impl.h',
- 'mac/video_render_mac_cocoa_impl.h',
- 'mac/video_render_nsopengl.h',
- 'mac/video_render_nsopengl.mm',
- 'mac/video_render_mac_cocoa_impl.mm',
- 'mac/video_render_agl.cc',
- 'mac/video_render_mac_carbon_impl.cc',
- 'mac/cocoa_render_view.mm',
- 'mac/cocoa_full_screen_window.mm',
- ],
- }],
- ['OS=="win"', {
- 'sources': [
- 'windows/i_video_render_win.h',
- 'windows/video_render_direct3d9.h',
- 'windows/video_render_windows_impl.h',
- 'windows/video_render_direct3d9.cc',
- 'windows/video_render_windows_impl.cc',
- ],
- 'include_dirs': [
- '<(directx_sdk_path)/Include',
- ],
- }],
- ['OS=="win" and clang==1', {
- 'msvs_settings': {
- 'VCCLCompilerTool': {
- 'AdditionalOptions': [
- # Disable warnings failing when compiling with Clang on Windows.
- # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
- '-Wno-comment',
- '-Wno-reorder',
- '-Wno-unused-value',
- '-Wno-unused-private-field',
- ],
- },
- },
- }],
- ] # conditions
- },
- ],
- }], # build_with_chromium==0
- ['include_tests==1 and OS!="ios"', {
- 'targets': [
- {
- # Does not compile on iOS: webrtc:4755.
- 'target_name': 'video_render_tests',
- 'type': 'executable',
- 'dependencies': [
- 'video_render_module_internal_impl',
- 'webrtc_utility',
- '<(webrtc_root)/common.gyp:webrtc_common',
- '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
- '<(webrtc_root)/common_video/common_video.gyp:common_video',
- ],
- 'sources': [
- 'test/testAPI/testAPI.cc',
- 'test/testAPI/testAPI.h',
- 'test/testAPI/testAPI_android.cc',
- 'test/testAPI/testAPI_mac.mm',
- ],
- 'conditions': [
- ['OS=="mac" or OS=="linux"', {
- 'cflags': [
- '-Wno-write-strings',
- ],
- 'ldflags': [
- '-lpthread -lm',
- ],
- }],
- ['OS=="linux"', {
- 'link_settings': {
- 'libraries': [
- '-lX11',
- ],
- },
- }],
- ['OS=="mac"', {
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL',
- ],
- },
- }],
- ] # conditions
- }, # video_render_module_test
- ], # targets
- }], # include_tests==1 and OS!=ios
- ], # conditions
-}
-
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render.h b/chromium/third_party/webrtc/modules/video_render/video_render.h
deleted file mode 100644
index 84c9536e35c..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render.h
+++ /dev/null
@@ -1,268 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
-
-/*
- * video_render.h
- *
- * This header file together with module.h and module_common_types.h
- * contains all of the APIs that are needed for using the video render
- * module class.
- *
- */
-
-#include "webrtc/modules/include/module.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-namespace webrtc {
-
-// Class definitions
-class VideoRender: public Module
-{
-public:
- /*
- * Create a video render module object
- *
- * id - unique identifier of this video render module object
- * window - pointer to the window to render to
- * fullscreen - true if this is a fullscreen renderer
- * videoRenderType - type of renderer to create
- */
- static VideoRender
- * CreateVideoRender(
- const int32_t id,
- void* window,
- const bool fullscreen,
- const VideoRenderType videoRenderType =
- kRenderDefault);
-
- /*
- * Destroy a video render module object
- *
- * module - object to destroy
- */
- static void DestroyVideoRender(VideoRender* module);
-
- int64_t TimeUntilNextProcess() override = 0;
- void Process() override = 0;
-
- /**************************************************************************
- *
- * Window functions
- *
- ***************************************************************************/
-
- /*
- * Get window for this renderer
- */
- virtual void* Window() = 0;
-
- /*
- * Change render window
- *
- * window - the new render window, assuming same type as originally created.
- */
- virtual int32_t ChangeWindow(void* window) = 0;
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- /*
- * Add incoming render stream
- *
- * streamID - id of the stream to add
- * zOrder - relative render order for the streams, 0 = on top
- * left - position of the stream in the window, [0.0f, 1.0f]
- * top - position of the stream in the window, [0.0f, 1.0f]
- * right - position of the stream in the window, [0.0f, 1.0f]
- * bottom - position of the stream in the window, [0.0f, 1.0f]
- *
- * Return - callback class to use for delivering new frames to render.
- */
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom) = 0;
- /*
- * Delete incoming render stream
- *
- * streamID - id of the stream to add
- */
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId) = 0;
-
- /*
- * Add incoming render callback, used for external rendering
- *
- * streamID - id of the stream the callback is used for
- * renderObject - the VideoRenderCallback to use for this stream, NULL to remove
- *
- * Return - callback class to use for delivering new frames to render.
- */
- virtual int32_t
- AddExternalRenderCallback(const uint32_t streamId,
- VideoRenderCallback* renderObject) = 0;
-
- /*
- * Get the porperties for an incoming render stream
- *
- * streamID - [in] id of the stream to get properties for
- * zOrder - [out] relative render order for the streams, 0 = on top
- * left - [out] position of the stream in the window, [0.0f, 1.0f]
- * top - [out] position of the stream in the window, [0.0f, 1.0f]
- * right - [out] position of the stream in the window, [0.0f, 1.0f]
- * bottom - [out] position of the stream in the window, [0.0f, 1.0f]
- */
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const = 0;
- /*
- * The incoming frame rate to the module, not the rate rendered in the window.
- */
- virtual uint32_t
- GetIncomingFrameRate(const uint32_t streamId) = 0;
-
- /*
- * Returns the number of incoming streams added to this render module
- */
- virtual uint32_t GetNumIncomingRenderStreams() const = 0;
-
- /*
- * Returns true if this render module has the streamId added, false otherwise.
- */
- virtual bool
- HasIncomingRenderStream(const uint32_t streamId) const = 0;
-
- /*
- * Registers a callback to get raw images in the same time as sent
- * to the renderer. To be used for external rendering.
- */
- virtual int32_t
- RegisterRawFrameCallback(const uint32_t streamId,
- VideoRenderCallback* callbackObj) = 0;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- /*
- * Starts rendering the specified stream
- */
- virtual int32_t StartRender(const uint32_t streamId) = 0;
-
- /*
- * Stops the renderer
- */
- virtual int32_t StopRender(const uint32_t streamId) = 0;
-
- /*
- * Resets the renderer
- * No streams are removed. The state should be as after AddStream was called.
- */
- virtual int32_t ResetRender() = 0;
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- /*
- * Returns the preferred render video type
- */
- virtual RawVideoType PreferredVideoType() const = 0;
-
- /*
- * Returns true if the renderer is in fullscreen mode, otherwise false.
- */
- virtual bool IsFullScreen() = 0;
-
- /*
- * Gets screen resolution in pixels
- */
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const = 0;
-
- /*
- * Get the actual render rate for this stream. I.e rendered frame rate,
- * not frames delivered to the renderer.
- */
- virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
-
- /*
- * Set cropping of incoming stream
- */
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- /*
- * re-configure renderer
- */
-
- // Set the expected time needed by the graphics card or external renderer,
- // i.e. frames will be released for rendering |delay_ms| before set render
- // time in the video frame.
- virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
- int32_t delay_ms) = 0;
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t SetTransparentBackground(const bool enable) = 0;
-
- virtual int32_t FullScreenRender(void* window, const bool enable) = 0;
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- /*
- * Set a start image. The image is rendered before the first image has been delivered
- */
- virtual int32_t SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame) = 0;
-
- /*
- * Set a timout image. The image is rendered if no videoframe has been delivered
- */
- virtual int32_t SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout) = 0;
-};
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_defines.h b/chromium/third_party/webrtc/modules/video_render/video_render_defines.h
deleted file mode 100644
index 999707cb6e5..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_defines.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/common_video/include/incoming_video_stream.h"
-#include "webrtc/modules/include/module_common_types.h"
-
-namespace webrtc
-{
-// Defines
-#ifndef NULL
-#define NULL 0
-#endif
-
-// Enums
-enum VideoRenderType
-{
- kRenderExternal = 0, // External
- kRenderWindows = 1, // Windows
- kRenderCocoa = 2, // Mac
- kRenderCarbon = 3,
- kRenderiOS = 4, // iPhone
- kRenderAndroid = 5, // Android
- kRenderX11 = 6, // Linux
- kRenderDefault
-};
-
-// Runtime errors
-enum VideoRenderError
-{
- kRenderShutDown = 0,
- kRenderPerformanceAlarm = 1
-};
-
-// Feedback class to be implemented by module user
-class VideoRenderFeedback
-{
-public:
- virtual void OnRenderError(const int32_t streamId,
- const VideoRenderError error) = 0;
-
-protected:
- virtual ~VideoRenderFeedback()
- {
- }
-};
-
-// Mobile enums
-enum StretchMode
-{
- kStretchToInsideEdge = 1,
- kStretchToOutsideEdge = 2,
- kStretchMatchWidth = 3,
- kStretchMatchHeight = 4,
- kStretchNone = 5
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_impl.cc b/chromium/third_party/webrtc/modules/video_render/video_render_impl.cc
deleted file mode 100644
index 75403f8dd53..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_impl.cc
+++ /dev/null
@@ -1,602 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-
-#include "webrtc/common_video/include/incoming_video_stream.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/external/video_render_external_impl.h"
-#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/modules/video_render/video_render_impl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-VideoRender*
-VideoRender::CreateVideoRender(const int32_t id,
- void* window,
- const bool fullscreen,
- const VideoRenderType videoRenderType/*=kRenderDefault*/)
-{
- VideoRenderType resultVideoRenderType = videoRenderType;
- if (videoRenderType == kRenderDefault)
- {
- resultVideoRenderType = kRenderExternal;
- }
- return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
- fullscreen);
-}
-
-void VideoRender::DestroyVideoRender(
- VideoRender* module)
-{
- if (module)
- {
- delete module;
- }
-}
-
-ModuleVideoRenderImpl::ModuleVideoRenderImpl(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
- _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
-{
-
- // Create platform specific renderer
- switch (videoRenderType)
- {
- case kRenderExternal:
- {
- VideoRenderExternalImpl* ptrRenderer(NULL);
- ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
- window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- break;
- default:
- // Error...
- break;
- }
- if (_ptrRenderer)
- {
- if (_ptrRenderer->Init() == -1)
- {
- }
- }
-}
-
-ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
-{
- delete &_moduleCrit;
-
- for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
- it != _streamRenderMap.end();
- ++it) {
- delete it->second;
- }
-
- // Delete platform specific renderer
- if (_ptrRenderer)
- {
- VideoRenderType videoRenderType = _ptrRenderer->RenderType();
-
- switch (videoRenderType)
- {
- case kRenderExternal:
- {
- VideoRenderExternalImpl
- * ptrRenderer =
- reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-
- default:
- // Error...
- break;
- }
- }
-}
-
-int64_t ModuleVideoRenderImpl::TimeUntilNextProcess()
-{
- // Not used
- return 50;
-}
-void ModuleVideoRenderImpl::Process() {}
-
-void*
-ModuleVideoRenderImpl::Window()
-{
- CriticalSectionScoped cs(&_moduleCrit);
- return _ptrWindow;
-}
-
-int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
-{
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::Id()
-{
- CriticalSectionScoped cs(&_moduleCrit);
- return _id;
-}
-
-uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
-
- if (it == _streamRenderMap.end()) {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: stream doesn't exist",
- __FUNCTION__);
- return 0;
- }
- assert(it->second != NULL);
- return it->second->IncomingRate();
-}
-
-VideoRenderCallback*
-ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return NULL;
- }
-
- if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
- // The stream already exists...
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream already exists", __FUNCTION__);
- return NULL;
- }
-
- VideoRenderCallback* ptrRenderCallback =
- _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
- right, bottom);
- if (ptrRenderCallback == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Can't create incoming stream in renderer",
- __FUNCTION__);
- return NULL;
- }
-
- // Create platform independant code
- IncomingVideoStream* ptrIncomingStream =
- new IncomingVideoStream(streamId, false);
- ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
- VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
-
- // Store the stream
- _streamRenderMap[streamId] = ptrIncomingStream;
-
- return moduleCallback;
-}
-
-int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
-
- delete item->second;
-
- _ptrRenderer->DeleteIncomingRenderStream(streamId);
-
- _streamRenderMap.erase(item);
-
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
- const uint32_t streamId,
- VideoRenderCallback* renderObject) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
-
- if (item->second == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get stream", __FUNCTION__);
- return -1;
- }
- item->second->SetExternalCallback(renderObject);
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
- left, top, right,
- bottom);
-}
-
-uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- return static_cast<uint32_t>(_streamRenderMap.size());
-}
-
-bool ModuleVideoRenderImpl::HasIncomingRenderStream(
- const uint32_t streamId) const {
- CriticalSectionScoped cs(&_moduleCrit);
-
- return _streamRenderMap.find(streamId) != _streamRenderMap.end();
-}
-
-int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
- const uint32_t streamId,
- VideoRenderCallback* callbackObj) {
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- // Start the stream
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- return -1;
- }
-
- if (item->second->Start() == -1)
- {
- return -1;
- }
-
- // Start the HW renderer
- if (_ptrRenderer->StartRender() == -1)
- {
- return -1;
- }
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s(%d): No renderer", __FUNCTION__, streamId);
- return -1;
- }
-
- // Stop the incoming stream
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- return -1;
- }
-
- if (item->second->Stop() == -1)
- {
- return -1;
- }
-
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::ResetRender()
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- int32_t ret = 0;
- // Loop through all incoming streams and reset them
- for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
- it != _streamRenderMap.end();
- ++it) {
- if (it->second->Reset() == -1)
- ret = -1;
- }
- return ret;
-}
-
-RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (_ptrRenderer == NULL)
- {
- return kVideoI420;
- }
-
- return _ptrRenderer->PerferedVideoType();
-}
-
-bool ModuleVideoRenderImpl::IsFullScreen()
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->FullScreen();
-}
-
-int32_t ModuleVideoRenderImpl::GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
-}
-
-uint32_t ModuleVideoRenderImpl::RenderFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->RenderFrameRate(streamId);
-}
-
-int32_t ModuleVideoRenderImpl::SetStreamCropping(
- const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->SetTransparentBackground(enable);
-}
-
-int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
-{
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
- backgroundColorRef, left, top, right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
- right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
- uint32_t stream_id, int32_t delay_ms) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(stream_id);
- if (item == _streamRenderMap.end()) {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
- delay_ms);
- return -1;
- }
-
- assert(item->second != NULL);
- return item->second->SetExpectedRenderDelay(delay_ms);
-}
-
-int32_t ModuleVideoRenderImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
- bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
- assert (item->second != NULL);
- item->second->SetStartImage(videoFrame);
- return 0;
-
-}
-
-int32_t ModuleVideoRenderImpl::SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
- assert(item->second != NULL);
- item->second->SetTimeoutImage(videoFrame, timeout);
- return 0;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_impl.h b/chromium/third_party/webrtc/modules/video_render/video_render_impl.h
deleted file mode 100644
index 12244a60b82..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_impl.h
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
-
-#include <map>
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/video_render.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class IncomingVideoStream;
-class IVideoRender;
-
-// Class definitions
-class ModuleVideoRenderImpl: public VideoRender
-{
-public:
- /*
- * VideoRenderer constructor/destructor
- */
- ModuleVideoRenderImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen);
-
- virtual ~ModuleVideoRenderImpl();
-
- virtual int64_t TimeUntilNextProcess();
- virtual void Process();
-
- /*
- * Returns the render window
- */
- virtual void* Window();
-
- /*
- * Change render window
- */
- virtual int32_t ChangeWindow(void* window);
-
- /*
- * Returns module id
- */
- int32_t Id();
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- /*
- * Add incoming render stream
- */
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
- /*
- * Delete incoming render stream
- */
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId);
-
- /*
- * Add incoming render callback, used for external rendering
- */
- virtual int32_t
- AddExternalRenderCallback(const uint32_t streamId,
- VideoRenderCallback* renderObject);
-
- /*
- * Get the porperties for an incoming render stream
- */
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
- /*
- * Incoming frame rate for the specified stream.
- */
- virtual uint32_t GetIncomingFrameRate(const uint32_t streamId);
-
- /*
- * Returns the number of incoming streams added to this render module
- */
- virtual uint32_t GetNumIncomingRenderStreams() const;
-
- /*
- * Returns true if this render module has the streamId added, false otherwise.
- */
- virtual bool HasIncomingRenderStream(const uint32_t streamId) const;
-
- /*
- *
- */
- virtual int32_t
- RegisterRawFrameCallback(const uint32_t streamId,
- VideoRenderCallback* callbackObj);
-
- virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
- int32_t delay_ms);
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- /*
- * Starts rendering the specified stream
- */
- virtual int32_t StartRender(const uint32_t streamId);
-
- /*
- * Stops the renderer
- */
- virtual int32_t StopRender(const uint32_t streamId);
-
- /*
- * Sets the renderer in start state, no streams removed.
- */
- virtual int32_t ResetRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- /*
- * Returns the prefered render video type
- */
- virtual RawVideoType PreferredVideoType() const;
-
- /*
- * Returns true if the renderer is in fullscreen mode, otherwise false.
- */
- virtual bool IsFullScreen();
-
- /*
- * Gets screen resolution in pixels
- */
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- /*
- * Get the actual render rate for this stream. I.e rendered frame rate,
- * not frames delivered to the renderer.
- */
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- /*
- * Set cropping of incoming stream
- */
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t FullScreenRender(void* window, const bool enable);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- virtual int32_t SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout);
-
-private:
- int32_t _id;
- CriticalSectionWrapper& _moduleCrit;
- void* _ptrWindow;
- bool _fullScreen;
-
- IVideoRender* _ptrRenderer;
- typedef std::map<uint32_t, IncomingVideoStream*> IncomingVideoStreamMap;
- IncomingVideoStreamMap _streamRenderMap;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_internal.h b/chromium/third_party/webrtc/modules/video_render/video_render_internal.h
deleted file mode 100644
index 0508c1a7087..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_internal.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
-
-#ifdef ANDROID
-#include <jni.h>
-
-namespace webrtc {
-
-// In order to be able to use the internal webrtc video render
-// for android, the jvm objects must be set via this method.
-int32_t SetRenderAndroidVM(JavaVM* javaVM);
-
-} // namespace webrtc
-
-#endif // ANDROID
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_internal_impl.cc b/chromium/third_party/webrtc/modules/video_render/video_render_internal_impl.cc
deleted file mode 100644
index 2090fce5f86..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/video_render_internal_impl.cc
+++ /dev/null
@@ -1,825 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <assert.h>
-
-#include "webrtc/common_video/include/incoming_video_stream.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/modules/video_render/video_render_impl.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-#if defined (_WIN32)
-#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
-#define STANDARD_RENDERING kRenderWindows
-
-// WEBRTC_IOS should go before WEBRTC_MAC because WEBRTC_MAC
-// gets defined if WEBRTC_IOS is defined
-#elif defined(WEBRTC_IOS)
-#define STANDARD_RENDERING kRenderiOS
-#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
-#elif defined(WEBRTC_MAC)
-#if defined(COCOA_RENDERING)
-#define STANDARD_RENDERING kRenderCocoa
-#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
-#elif defined(CARBON_RENDERING)
-#define STANDARD_RENDERING kRenderCarbon
-#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
-#endif
-
-#elif defined(WEBRTC_ANDROID)
-#include "webrtc/modules/video_render/android/video_render_android_impl.h"
-#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
-#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
-#define STANDARD_RENDERING kRenderAndroid
-
-#elif defined(WEBRTC_LINUX)
-#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
-#define STANDARD_RENDERING kRenderX11
-
-#else
-//Other platforms
-#endif
-
-// For external rendering
-#include "webrtc/modules/video_render/external/video_render_external_impl.h"
-#ifndef STANDARD_RENDERING
-#define STANDARD_RENDERING kRenderExternal
-#endif // STANDARD_RENDERING
-
-namespace webrtc {
-
-VideoRender*
-VideoRender::CreateVideoRender(const int32_t id,
- void* window,
- const bool fullscreen,
- const VideoRenderType videoRenderType/*=kRenderDefault*/)
-{
- VideoRenderType resultVideoRenderType = videoRenderType;
- if (videoRenderType == kRenderDefault)
- {
- resultVideoRenderType = STANDARD_RENDERING;
- }
- return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
- fullscreen);
-}
-
-void VideoRender::DestroyVideoRender(
- VideoRender* module)
-{
- if (module)
- {
- delete module;
- }
-}
-
-ModuleVideoRenderImpl::ModuleVideoRenderImpl(
- const int32_t id,
- const VideoRenderType videoRenderType,
- void* window,
- const bool fullscreen) :
- _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
- _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
-{
-
- // Create platform specific renderer
- switch (videoRenderType)
- {
-#if defined(_WIN32)
- case kRenderWindows:
- {
- VideoRenderWindowsImpl* ptrRenderer;
- ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-
-#elif defined(WEBRTC_IOS)
- case kRenderiOS:
- {
- VideoRenderIosImpl* ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-
-#elif defined(WEBRTC_MAC)
-
-#if defined(COCOA_RENDERING)
- case kRenderCocoa:
- {
- VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
-
- break;
-#elif defined(CARBON_RENDERING)
- case kRenderCarbon:
- {
- VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
- if(ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- }
- }
- break;
-#endif
-
-#elif defined(WEBRTC_ANDROID)
- case kRenderAndroid:
- {
- if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
- {
- AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
- ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- else
- {
- AndroidSurfaceViewRenderer* ptrRenderer = NULL;
- ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
-
- }
- break;
-#elif defined(WEBRTC_LINUX)
- case kRenderX11:
- {
- VideoRenderLinuxImpl* ptrRenderer = NULL;
- ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
- if ( ptrRenderer )
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- break;
-
-#else
- // Other platforms
-#endif
- case kRenderExternal:
- {
- VideoRenderExternalImpl* ptrRenderer(NULL);
- ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
- window, _fullScreen);
- if (ptrRenderer)
- {
- _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
- }
- }
- break;
- default:
- // Error...
- break;
- }
- if (_ptrRenderer)
- {
- if (_ptrRenderer->Init() == -1)
- {
- }
- }
-}
-
-ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
-{
- delete &_moduleCrit;
-
- for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
- it != _streamRenderMap.end();
- ++it) {
- delete it->second;
- }
-
- // Delete platform specific renderer
- if (_ptrRenderer)
- {
- VideoRenderType videoRenderType = _ptrRenderer->RenderType();
-
- switch (videoRenderType)
- {
- case kRenderExternal:
- {
- VideoRenderExternalImpl
- * ptrRenderer =
- reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#if defined(_WIN32)
- case kRenderWindows:
- {
- VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(WEBRTC_IOS)
- case kRenderiOS:
- {
- VideoRenderIosImpl* ptrRenderer = reinterpret_cast<VideoRenderIosImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(WEBRTC_MAC)
-
-#if defined(COCOA_RENDERING)
- case kRenderCocoa:
- {
- VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#elif defined(CARBON_RENDERING)
- case kRenderCarbon:
- {
- VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#endif
-
-#elif defined(WEBRTC_ANDROID)
- case kRenderAndroid:
- {
- VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-
-#elif defined(WEBRTC_LINUX)
- case kRenderX11:
- {
- VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
- _ptrRenderer = NULL;
- delete ptrRenderer;
- }
- break;
-#else
- //other platforms
-#endif
-
- default:
- // Error...
- break;
- }
- }
-}
-
-int64_t ModuleVideoRenderImpl::TimeUntilNextProcess()
-{
- // Not used
- return 50;
-}
-void ModuleVideoRenderImpl::Process() {}
-
-void*
-ModuleVideoRenderImpl::Window()
-{
- CriticalSectionScoped cs(&_moduleCrit);
- return _ptrWindow;
-}
-
-int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
-{
-
- CriticalSectionScoped cs(&_moduleCrit);
-
-#if defined(WEBRTC_IOS) // WEBRTC_IOS must go before WEBRTC_MAC
- _ptrRenderer = NULL;
- delete _ptrRenderer;
-
- VideoRenderIosImpl* ptrRenderer;
- ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
- if (!ptrRenderer)
- {
- return -1;
- }
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- return _ptrRenderer->ChangeWindow(window);
-#elif defined(WEBRTC_MAC)
-
- _ptrRenderer = NULL;
- delete _ptrRenderer;
-
-#if defined(COCOA_RENDERING)
- VideoRenderMacCocoaImpl* ptrRenderer;
- ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
-#elif defined(CARBON_RENDERING)
- VideoRenderMacCarbonImpl* ptrRenderer;
- ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
-#endif
- if (!ptrRenderer)
- {
- return -1;
- }
- _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
- return _ptrRenderer->ChangeWindow(window);
-
-#else
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->ChangeWindow(window);
-
-#endif
-}
-
-int32_t ModuleVideoRenderImpl::Id()
-{
- CriticalSectionScoped cs(&_moduleCrit);
- return _id;
-}
-
-uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
-
- if (it == _streamRenderMap.end()) {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError,
- kTraceVideoRenderer,
- _id,
- "%s: stream doesn't exist",
- __FUNCTION__);
- return 0;
- }
- assert(it->second != NULL);
- return it->second->IncomingRate();
-}
-
-VideoRenderCallback*
-ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return NULL;
- }
-
- if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
- // The stream already exists...
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream already exists", __FUNCTION__);
- return NULL;
- }
-
- VideoRenderCallback* ptrRenderCallback =
- _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
- right, bottom);
- if (ptrRenderCallback == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: Can't create incoming stream in renderer",
- __FUNCTION__);
- return NULL;
- }
-
- // Create platform independant code
- IncomingVideoStream* ptrIncomingStream =
- new IncomingVideoStream(streamId, false);
- ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
- VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
-
- // Store the stream
- _streamRenderMap[streamId] = ptrIncomingStream;
-
- return moduleCallback;
-}
-
-int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
-
- delete item->second;
-
- _ptrRenderer->DeleteIncomingRenderStream(streamId);
-
- _streamRenderMap.erase(item);
-
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
- const uint32_t streamId,
- VideoRenderCallback* renderObject) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
-
- if (item->second == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: could not get stream", __FUNCTION__);
- return -1;
- }
- item->second->SetExternalCallback(renderObject);
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
- left, top, right,
- bottom);
-}
-
-uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- return static_cast<uint32_t>(_streamRenderMap.size());
-}
-
-bool ModuleVideoRenderImpl::HasIncomingRenderStream(
- const uint32_t streamId) const {
- CriticalSectionScoped cs(&_moduleCrit);
-
- return _streamRenderMap.find(streamId) != _streamRenderMap.end();
-}
-
-int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
- const uint32_t streamId,
- VideoRenderCallback* callbackObj) {
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- // Start the stream
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- return -1;
- }
-
- if (item->second->Start() == -1)
- {
- return -1;
- }
-
- // Start the HW renderer
- if (_ptrRenderer->StartRender() == -1)
- {
- return -1;
- }
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s(%d): No renderer", __FUNCTION__, streamId);
- return -1;
- }
-
- // Stop the incoming stream
- IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
-
- if (item == _streamRenderMap.end())
- {
- return -1;
- }
-
- if (item->second->Stop() == -1)
- {
- return -1;
- }
-
- return 0;
-}
-
-int32_t ModuleVideoRenderImpl::ResetRender()
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- int32_t ret = 0;
- // Loop through all incoming streams and reset them
- for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
- it != _streamRenderMap.end();
- ++it) {
- if (it->second->Reset() == -1)
- ret = -1;
- }
- return ret;
-}
-
-RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (_ptrRenderer == NULL)
- {
- return kVideoI420;
- }
-
- return _ptrRenderer->PerferedVideoType();
-}
-
-bool ModuleVideoRenderImpl::IsFullScreen()
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->FullScreen();
-}
-
-int32_t ModuleVideoRenderImpl::GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
-}
-
-uint32_t ModuleVideoRenderImpl::RenderFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->RenderFrameRate(streamId);
-}
-
-int32_t ModuleVideoRenderImpl::SetStreamCropping(
- const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->SetTransparentBackground(enable);
-}
-
-int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
-{
- return -1;
-}
-
-int32_t ModuleVideoRenderImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
- backgroundColorRef, left, top, right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
- return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
- right, bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
- uint32_t stream_id, int32_t delay_ms) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(stream_id);
- if (item == _streamRenderMap.end()) {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
- delay_ms);
- return -1;
- }
-
- assert(item->second != NULL);
- return item->second->SetExpectedRenderDelay(delay_ms);
-}
-
-int32_t ModuleVideoRenderImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return false;
- }
- return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
- bottom);
-}
-
-int32_t ModuleVideoRenderImpl::SetStartImage(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
- assert (item->second != NULL);
- item->second->SetStartImage(videoFrame);
- return 0;
-
-}
-
-int32_t ModuleVideoRenderImpl::SetTimeoutImage(const uint32_t streamId,
- const VideoFrame& videoFrame,
- const uint32_t timeout) {
- CriticalSectionScoped cs(&_moduleCrit);
-
- if (!_ptrRenderer)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: No renderer", __FUNCTION__);
- return -1;
- }
-
- IncomingVideoStreamMap::const_iterator item =
- _streamRenderMap.find(streamId);
- if (item == _streamRenderMap.end())
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return -1;
- }
- assert(item->second != NULL);
- item->second->SetTimeoutImage(videoFrame, timeout);
- return 0;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/i_video_render_win.h b/chromium/third_party/webrtc/modules/video_render/windows/i_video_render_win.h
deleted file mode 100644
index 6dbb4fd3cb9..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/i_video_render_win.h
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
-
-#include "webrtc/modules/video_render/video_render.h"
-
-namespace webrtc {
-
-// Class definitions
-class IVideoRenderWin
-{
-public:
- /**************************************************************************
- *
- * Constructor/destructor
- *
- ***************************************************************************/
- virtual ~IVideoRenderWin()
- {
- };
-
- virtual int32_t Init() = 0;
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * CreateChannel(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t DeleteChannel(const uint32_t streamId) = 0;
-
- virtual int32_t GetStreamSettings(const uint32_t channel,
- const uint16_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) = 0;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender() = 0;
-
- virtual int32_t StopRender() = 0;
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual bool IsFullScreen() = 0;
-
- virtual int32_t SetCropping(const uint32_t channel,
- const uint16_t streamId,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- virtual int32_t ConfigureRenderer(const uint32_t channel,
- const uint16_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom) = 0;
-
- virtual int32_t SetTransparentBackground(const bool enable) = 0;
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t colorText,
- const uint32_t colorBg,
- const float left, const float top,
- const float rigth, const float bottom) = 0;
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom) = 0;
-
- virtual int32_t ChangeWindow(void* window) = 0;
-
- virtual int32_t GetGraphicsMemory(uint64_t& totalMemory,
- uint64_t& availableMemory) = 0;
-
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.cc b/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.cc
deleted file mode 100644
index b59b944e483..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.cc
+++ /dev/null
@@ -1,1160 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Own include file
-#include "webrtc/modules/video_render/windows/video_render_direct3d9.h"
-
-// System include files
-#include <windows.h>
-
-// WebRtc include files
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/event_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-// A structure for our custom vertex type
-struct CUSTOMVERTEX
-{
- FLOAT x, y, z;
- DWORD color; // The vertex color
- FLOAT u, v;
-};
-
-// Our custom FVF, which describes our custom vertex structure
-#define D3DFVF_CUSTOMVERTEX (D3DFVF_XYZ|D3DFVF_DIFFUSE|D3DFVF_TEX1)
-
-/*
- *
- * D3D9Channel
- *
- */
-D3D9Channel::D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
- CriticalSectionWrapper* critSect,
- Trace* trace) :
- _width(0),
- _height(0),
- _pd3dDevice(pd3DDevice),
- _pTexture(NULL),
- _bufferIsUpdated(false),
- _critSect(critSect),
- _streamId(0),
- _zOrder(0),
- _startWidth(0),
- _startHeight(0),
- _stopWidth(0),
- _stopHeight(0)
-{
-
-}
-
-D3D9Channel::~D3D9Channel()
-{
- //release the texture
- if (_pTexture != NULL)
- {
- _pTexture->Release();
- _pTexture = NULL;
- }
-}
-
-void D3D9Channel::SetStreamSettings(uint16_t streamId,
- uint32_t zOrder,
- float startWidth,
- float startHeight,
- float stopWidth,
- float stopHeight)
-{
- _streamId = streamId;
- _zOrder = zOrder;
- _startWidth = startWidth;
- _startHeight = startHeight;
- _stopWidth = stopWidth;
- _stopHeight = stopHeight;
-}
-
-int D3D9Channel::GetStreamSettings(uint16_t streamId,
- uint32_t& zOrder,
- float& startWidth,
- float& startHeight,
- float& stopWidth,
- float& stopHeight)
-{
- streamId = _streamId;
- zOrder = _zOrder;
- startWidth = _startWidth;
- startHeight = _startHeight;
- stopWidth = _stopWidth;
- stopHeight = _stopHeight;
- return 0;
-}
-
-int D3D9Channel::GetTextureWidth()
-{
- return _width;
-}
-
-int D3D9Channel::GetTextureHeight()
-{
- return _height;
-}
-
-// Called from video engine when a the frame size changed
-int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams)
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
- "FrameSizeChange, wifth: %d, height: %d, streams: %d", width,
- height, numberOfStreams);
-
- CriticalSectionScoped cs(_critSect);
- _width = width;
- _height = height;
-
- //clean the previous texture
- if (_pTexture != NULL)
- {
- _pTexture->Release();
- _pTexture = NULL;
- }
-
- HRESULT ret = E_POINTER;
-
- if (_pd3dDevice)
- ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
- D3DPOOL_MANAGED, &_pTexture, NULL);
-
- if (FAILED(ret))
- {
- _pTexture = NULL;
- return -1;
- }
-
- return 0;
-}
-
-int32_t D3D9Channel::RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame) {
- CriticalSectionScoped cs(_critSect);
- if (_width != videoFrame.width() || _height != videoFrame.height())
- {
- if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1)
- {
- return -1;
- }
- }
- return DeliverFrame(videoFrame);
-}
-
-// Called from video engine when a new frame should be rendered.
-int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "DeliverFrame to D3D9Channel");
-
- CriticalSectionScoped cs(_critSect);
-
- // FIXME if _bufferIsUpdated is still true (not be renderred), do we want to
- // update the texture? probably not
- if (_bufferIsUpdated) {
- WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "Last frame hasn't been rendered yet. Drop this frame.");
- return -1;
- }
-
- if (!_pd3dDevice) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "D3D for rendering not initialized.");
- return -1;
- }
-
- if (!_pTexture) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Texture for rendering not initialized.");
- return -1;
- }
-
- D3DLOCKED_RECT lr;
-
- if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0))) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to lock a texture in D3D9 Channel.");
- return -1;
- }
- UCHAR* pRect = (UCHAR*) lr.pBits;
-
- ConvertFromI420(videoFrame, kARGB, 0, pRect);
-
- if (FAILED(_pTexture->UnlockRect(0))) {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to unlock a texture in D3D9 Channel.");
- return -1;
- }
-
- _bufferIsUpdated = true;
- return 0;
-}
-
-// Called by d3d channel owner to indicate the frame/texture has been rendered off
-int D3D9Channel::RenderOffFrame()
-{
- WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
- "Frame has been rendered to the screen.");
- CriticalSectionScoped cs(_critSect);
- _bufferIsUpdated = false;
- return 0;
-}
-
-// Called by d3d channel owner to check if the texture is updated
-int D3D9Channel::IsUpdated(bool& isUpdated)
-{
- CriticalSectionScoped cs(_critSect);
- isUpdated = _bufferIsUpdated;
- return 0;
-}
-
-// Called by d3d channel owner to get the texture
-LPDIRECT3DTEXTURE9 D3D9Channel::GetTexture()
-{
- CriticalSectionScoped cs(_critSect);
- return _pTexture;
-}
-
-int D3D9Channel::ReleaseTexture()
-{
- CriticalSectionScoped cs(_critSect);
-
- //release the texture
- if (_pTexture != NULL)
- {
- _pTexture->Release();
- _pTexture = NULL;
- }
- _pd3dDevice = NULL;
- return 0;
-}
-
-int D3D9Channel::RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice)
-{
- CriticalSectionScoped cs(_critSect);
-
- _pd3dDevice = pd3DDevice;
-
- if (_pTexture != NULL)
- {
- _pTexture->Release();
- _pTexture = NULL;
- }
-
- HRESULT ret;
-
- ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
- D3DPOOL_MANAGED, &_pTexture, NULL);
-
- if (FAILED(ret))
- {
- _pTexture = NULL;
- return -1;
- }
-
- return 0;
-}
-
-/*
- *
- * VideoRenderDirect3D9
- *
- */
-VideoRenderDirect3D9::VideoRenderDirect3D9(Trace* trace,
- HWND hWnd,
- bool fullScreen) :
- _refD3DCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
- _trace(trace),
- _hWnd(hWnd),
- _fullScreen(fullScreen),
- _pTextureLogo(NULL),
- _pVB(NULL),
- _pd3dDevice(NULL),
- _pD3D(NULL),
- _d3dChannels(),
- _d3dZorder(),
- _screenUpdateEvent(NULL),
- _logoLeft(0),
- _logoTop(0),
- _logoRight(0),
- _logoBottom(0),
- _pd3dSurface(NULL),
- _totalMemory(0),
- _availableMemory(0)
-{
- _screenUpdateThread.reset(new rtc::PlatformThread(
- ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
- _screenUpdateEvent = EventTimerWrapper::Create();
- SetRect(&_originalHwndRect, 0, 0, 0, 0);
-}
-
-VideoRenderDirect3D9::~VideoRenderDirect3D9()
-{
- //NOTE: we should not enter CriticalSection in here!
-
- // Signal event to exit thread, then delete it
- rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
- if (tmpPtr)
- {
- _screenUpdateEvent->Set();
- _screenUpdateEvent->StopTimer();
-
- tmpPtr->Stop();
- delete tmpPtr;
- }
- delete _screenUpdateEvent;
-
- //close d3d device
- CloseDevice();
-
- // Delete all channels
- std::map<int, D3D9Channel*>::iterator it = _d3dChannels.begin();
- while (it != _d3dChannels.end())
- {
- delete it->second;
- it = _d3dChannels.erase(it);
- }
- // Clean the zOrder map
- _d3dZorder.clear();
-
- if (_fullScreen)
- {
- // restore hwnd to original size and position
- ::SetWindowPos(_hWnd, HWND_NOTOPMOST, _originalHwndRect.left,
- _originalHwndRect.top, _originalHwndRect.right
- - _originalHwndRect.left,
- _originalHwndRect.bottom - _originalHwndRect.top,
- SWP_FRAMECHANGED);
- ::RedrawWindow(_hWnd, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
- | RDW_ERASE);
- ::RedrawWindow(NULL, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
- | RDW_ERASE);
- }
-
- delete &_refD3DCritsect;
-}
-
-DWORD VideoRenderDirect3D9::GetVertexProcessingCaps()
-{
- D3DCAPS9 caps;
- DWORD dwVertexProcessing = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
- if (SUCCEEDED(_pD3D->GetDeviceCaps(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL,
- &caps)))
- {
- if ((caps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
- == D3DDEVCAPS_HWTRANSFORMANDLIGHT)
- {
- dwVertexProcessing = D3DCREATE_HARDWARE_VERTEXPROCESSING;
- }
- }
- return dwVertexProcessing;
-}
-
-int VideoRenderDirect3D9::InitializeD3D(HWND hWnd,
- D3DPRESENT_PARAMETERS* pd3dpp)
-{
- // initialize Direct3D
- if (NULL == (_pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
- {
- return -1;
- }
-
- // determine what type of vertex processing to use based on the device capabilities
- DWORD dwVertexProcessing = GetVertexProcessingCaps();
-
- // get the display mode
- D3DDISPLAYMODE d3ddm;
- _pD3D->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &d3ddm);
- pd3dpp->BackBufferFormat = d3ddm.Format;
-
- // create the D3D device
- if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
- dwVertexProcessing | D3DCREATE_MULTITHREADED
- | D3DCREATE_FPU_PRESERVE, pd3dpp,
- &_pd3dDevice)))
- {
- //try the ref device
- if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_REF,
- hWnd, dwVertexProcessing
- | D3DCREATE_MULTITHREADED
- | D3DCREATE_FPU_PRESERVE,
- pd3dpp, &_pd3dDevice)))
- {
- return -1;
- }
- }
-
- return 0;
-}
-
-int VideoRenderDirect3D9::ResetDevice()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
- "VideoRenderDirect3D9::ResetDevice");
-
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- //release the channel texture
- std::map<int, D3D9Channel*>::iterator it;
- it = _d3dChannels.begin();
- while (it != _d3dChannels.end())
- {
- if (it->second)
- {
- it->second->ReleaseTexture();
- }
- it++;
- }
-
- //close d3d device
- if (CloseDevice() != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "VideoRenderDirect3D9::ResetDevice failed to CloseDevice");
- return -1;
- }
-
- //reinit d3d device
- if (InitDevice() != 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "VideoRenderDirect3D9::ResetDevice failed to InitDevice");
- return -1;
- }
-
- //recreate channel texture
- it = _d3dChannels.begin();
- while (it != _d3dChannels.end())
- {
- if (it->second)
- {
- it->second->RecreateTexture(_pd3dDevice);
- }
- it++;
- }
-
- return 0;
-}
-
-int VideoRenderDirect3D9::InitDevice()
-{
- // Set up the structure used to create the D3DDevice
- ZeroMemory(&_d3dpp, sizeof(_d3dpp));
- _d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
- _d3dpp.BackBufferFormat = D3DFMT_A8R8G8B8;
- if (GetWindowRect(_hWnd, &_originalHwndRect) == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "VideoRenderDirect3D9::InitDevice Could not get window size");
- return -1;
- }
- if (!_fullScreen)
- {
- _winWidth = _originalHwndRect.right - _originalHwndRect.left;
- _winHeight = _originalHwndRect.bottom - _originalHwndRect.top;
- _d3dpp.Windowed = TRUE;
- _d3dpp.BackBufferHeight = 0;
- _d3dpp.BackBufferWidth = 0;
- }
- else
- {
- _winWidth = (LONG) ::GetSystemMetrics(SM_CXSCREEN);
- _winHeight = (LONG) ::GetSystemMetrics(SM_CYSCREEN);
- _d3dpp.Windowed = FALSE;
- _d3dpp.BackBufferWidth = _winWidth;
- _d3dpp.BackBufferHeight = _winHeight;
- _d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
- }
-
- if (InitializeD3D(_hWnd, &_d3dpp) == -1)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "VideoRenderDirect3D9::InitDevice failed in InitializeD3D");
- return -1;
- }
-
- // Turn off culling, so we see the front and back of the triangle
- _pd3dDevice->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
-
- // Turn off D3D lighting, since we are providing our own vertex colors
- _pd3dDevice->SetRenderState(D3DRS_LIGHTING, FALSE);
-
- // Settings for alpha blending
- _pd3dDevice->SetRenderState(D3DRS_ALPHABLENDENABLE, TRUE);
- _pd3dDevice->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
- _pd3dDevice->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
-
- _pd3dDevice->SetSamplerState( 0, D3DSAMP_MINFILTER, D3DTEXF_LINEAR );
- _pd3dDevice->SetSamplerState( 0, D3DSAMP_MAGFILTER, D3DTEXF_LINEAR );
- _pd3dDevice->SetSamplerState( 0, D3DSAMP_MIPFILTER, D3DTEXF_LINEAR );
-
- // Initialize Vertices
- CUSTOMVERTEX Vertices[] = {
- //front
- { -1.0f, -1.0f, 0.0f, 0xffffffff, 0, 1 }, { -1.0f, 1.0f, 0.0f,
- 0xffffffff, 0, 0 },
- { 1.0f, -1.0f, 0.0f, 0xffffffff, 1, 1 }, { 1.0f, 1.0f, 0.0f,
- 0xffffffff, 1, 0 } };
-
- // Create the vertex buffer.
- if (FAILED(_pd3dDevice->CreateVertexBuffer(sizeof(Vertices), 0,
- D3DFVF_CUSTOMVERTEX,
- D3DPOOL_DEFAULT, &_pVB, NULL )))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to create the vertex buffer.");
- return -1;
- }
-
- // Now we fill the vertex buffer.
- VOID* pVertices;
- if (FAILED(_pVB->Lock(0, sizeof(Vertices), (void**) &pVertices, 0)))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to lock the vertex buffer.");
- return -1;
- }
- memcpy(pVertices, Vertices, sizeof(Vertices));
- _pVB->Unlock();
-
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::Init()
-{
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
- "VideoRenderDirect3D9::Init");
-
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- // Start rendering thread...
- if (!_screenUpdateThread)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Thread not created");
- return -1;
- }
- _screenUpdateThread->Start();
- _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
-
- // Start the event triggering the render process
- unsigned int monitorFreq = 60;
- DEVMODE dm;
- // initialize the DEVMODE structure
- ZeroMemory(&dm, sizeof(dm));
- dm.dmSize = sizeof(dm);
- if (0 != EnumDisplaySettings(NULL, ENUM_CURRENT_SETTINGS, &dm))
- {
- monitorFreq = dm.dmDisplayFrequency;
- }
- _screenUpdateEvent->StartTimer(true, 1000 / monitorFreq);
-
- return InitDevice();
-}
-
-int32_t VideoRenderDirect3D9::ChangeWindow(void* window)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return -1;
-}
-
-int VideoRenderDirect3D9::UpdateRenderSurface()
-{
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- // Check if there are any updated buffers
- bool updated = false;
- std::map<int, D3D9Channel*>::iterator it;
- it = _d3dChannels.begin();
- while (it != _d3dChannels.end())
- {
-
- D3D9Channel* channel = it->second;
- channel->IsUpdated(updated);
- if (updated)
- {
- break;
- }
- it++;
- }
- //nothing is updated, continue
- if (!updated)
- return -1;
-
- // Clear the backbuffer to a black color
- _pd3dDevice->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f,
- 0);
-
- // Begin the scene
- if (SUCCEEDED(_pd3dDevice->BeginScene()))
- {
- _pd3dDevice->SetStreamSource(0, _pVB, 0, sizeof(CUSTOMVERTEX));
- _pd3dDevice->SetFVF(D3DFVF_CUSTOMVERTEX);
-
- //draw all the channels
- //get texture from the channels
- LPDIRECT3DTEXTURE9 textureFromChannel = NULL;
- DWORD textureWidth, textureHeight;
-
- std::multimap<int, unsigned int>::reverse_iterator it;
- it = _d3dZorder.rbegin();
- while (it != _d3dZorder.rend())
- {
- // loop through all channels and streams in Z order
- int channel = it->second & 0x0000ffff;
-
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(channel);
- if (ddIt != _d3dChannels.end())
- {
- // found the channel
- D3D9Channel* channelObj = ddIt->second;
- if (channelObj)
- {
- textureFromChannel = channelObj->GetTexture();
- textureWidth = channelObj->GetTextureWidth();
- textureHeight = channelObj->GetTextureHeight();
-
- uint32_t zOrder;
- float startWidth, startHeight, stopWidth, stopHeight;
- channelObj->GetStreamSettings(0, zOrder, startWidth,
- startHeight, stopWidth,
- stopHeight);
-
- //draw the video stream
- UpdateVerticeBuffer(_pVB, 0, startWidth, startHeight,
- stopWidth, stopHeight);
- _pd3dDevice->SetTexture(0, textureFromChannel);
- _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
-
- //Notice channel that this frame as been rendered
- channelObj->RenderOffFrame();
- }
- }
- it++;
- }
-
- //draw the logo
- if (_pTextureLogo)
- {
- UpdateVerticeBuffer(_pVB, 0, _logoLeft, _logoTop, _logoRight,
- _logoBottom);
- _pd3dDevice->SetTexture(0, _pTextureLogo);
- _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
- }
-
- // End the scene
- _pd3dDevice->EndScene();
- }
-
- // Present the backbuffer contents to the display
- _pd3dDevice->Present(NULL, NULL, NULL, NULL );
-
- return 0;
-}
-
-//set the alpha value of the pixal with a particular colorkey as 0
-int VideoRenderDirect3D9::SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
- DDCOLORKEY* transparentColorKey,
- DWORD width,
- DWORD height)
-{
- D3DLOCKED_RECT lr;
- if (!pTexture)
- return -1;
-
- CriticalSectionScoped cs(&_refD3DCritsect);
- if (SUCCEEDED(pTexture->LockRect(0, &lr, NULL, D3DLOCK_DISCARD)))
- {
- for (DWORD y = 0; y < height; y++)
- {
- DWORD dwOffset = y * width;
-
- for (DWORD x = 0; x < width; x)
- {
- DWORD temp = ((DWORD*) lr.pBits)[dwOffset + x];
- if ((temp & 0x00FFFFFF)
- == transparentColorKey->dwColorSpaceLowValue)
- {
- temp &= 0x00FFFFFF;
- }
- else
- {
- temp |= 0xFF000000;
- }
- ((DWORD*) lr.pBits)[dwOffset + x] = temp;
- x++;
- }
- }
- pTexture->UnlockRect(0);
- return 0;
- }
- return -1;
-}
-
-/*
- *
- * Rendering process
- *
- */
-bool VideoRenderDirect3D9::ScreenUpdateThreadProc(void* obj)
-{
- return static_cast<VideoRenderDirect3D9*> (obj)->ScreenUpdateProcess();
-}
-
-bool VideoRenderDirect3D9::ScreenUpdateProcess()
-{
- _screenUpdateEvent->Wait(100);
-
- if (!_screenUpdateThread)
- {
- //stop the thread
- return false;
- }
- if (!_pd3dDevice)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "d3dDevice not created.");
- return true;
- }
-
- HRESULT hr = _pd3dDevice->TestCooperativeLevel();
-
- if (SUCCEEDED(hr))
- {
- UpdateRenderSurface();
- }
-
- if (hr == D3DERR_DEVICELOST)
- {
- //Device is lost and cannot be reset yet
-
- }
- else if (hr == D3DERR_DEVICENOTRESET)
- {
- //Lost but we can reset it now
- //Note: the standard way is to call Reset, however for some reason doesn't work here.
- //so we will release the device and create it again.
- ResetDevice();
- }
-
- return true;
-}
-
-int VideoRenderDirect3D9::CloseDevice()
-{
- CriticalSectionScoped cs(&_refD3DCritsect);
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
- "VideoRenderDirect3D9::CloseDevice");
-
- if (_pTextureLogo != NULL)
- {
- _pTextureLogo->Release();
- _pTextureLogo = NULL;
- }
-
- if (_pVB != NULL)
- {
- _pVB->Release();
- _pVB = NULL;
- }
-
- if (_pd3dDevice != NULL)
- {
- _pd3dDevice->Release();
- _pd3dDevice = NULL;
- }
-
- if (_pD3D != NULL)
- {
- _pD3D->Release();
- _pD3D = NULL;
- }
-
- if (_pd3dSurface != NULL)
- _pd3dSurface->Release();
- return 0;
-}
-
-D3D9Channel* VideoRenderDirect3D9::GetD3DChannel(int channel)
-{
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(channel & 0x0000ffff);
- D3D9Channel* ddobj = NULL;
- if (ddIt != _d3dChannels.end())
- {
- ddobj = ddIt->second;
- }
- if (ddobj == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D render failed to find channel");
- return NULL;
- }
- return ddobj;
-}
-
-int32_t VideoRenderDirect3D9::DeleteChannel(const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_refD3DCritsect);
-
-
- std::multimap<int, unsigned int>::iterator it;
- it = _d3dZorder.begin();
- while (it != _d3dZorder.end())
- {
- if ((streamId & 0x0000ffff) == (it->second & 0x0000ffff))
- {
- it = _d3dZorder.erase(it);
- break;
- }
- it++;
- }
-
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(streamId & 0x0000ffff);
- if (ddIt != _d3dChannels.end())
- {
- delete ddIt->second;
- _d3dChannels.erase(ddIt);
- return 0;
- }
- return -1;
-}
-
-VideoRenderCallback* VideoRenderDirect3D9::CreateChannel(const uint32_t channel,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- //FIXME this should be done in VideoAPIWindows? stop the frame deliver first
- //remove the old channel
- DeleteChannel(channel);
-
- D3D9Channel* d3dChannel = new D3D9Channel(_pd3dDevice,
- &_refD3DCritsect, _trace);
- d3dChannel->SetStreamSettings(0, zOrder, left, top, right, bottom);
-
- // store channel
- _d3dChannels[channel & 0x0000ffff] = d3dChannel;
-
- // store Z order
- // default streamID is 0
- _d3dZorder.insert(
- std::pair<int, unsigned int>(zOrder, channel & 0x0000ffff));
-
- return d3dChannel;
-}
-
-int32_t VideoRenderDirect3D9::GetStreamSettings(const uint32_t channel,
- const uint16_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom)
-{
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(channel & 0x0000ffff);
- D3D9Channel* ddobj = NULL;
- if (ddIt != _d3dChannels.end())
- {
- ddobj = ddIt->second;
- }
- if (ddobj == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D render failed to find channel");
- return -1;
- }
- // Only allow one stream per channel, demuxing is
- return ddobj->GetStreamSettings(0, zOrder, left, top, right, bottom);
-}
-
-int VideoRenderDirect3D9::UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB,
- int offset,
- float startWidth,
- float startHeight,
- float stopWidth,
- float stopHeight)
-{
- if (pVB == NULL)
- return -1;
-
- float left, right, top, bottom;
-
- //update the vertice buffer
- //0,1 => -1,1
- left = startWidth * 2 - 1;
- right = stopWidth * 2 - 1;
-
- //0,1 => 1,-1
- top = 1 - startHeight * 2;
- bottom = 1 - stopHeight * 2;
-
- CUSTOMVERTEX newVertices[] = {
- //logo
- { left, bottom, 0.0f, 0xffffffff, 0, 1 }, { left, top, 0.0f,
- 0xffffffff, 0, 0 },
- { right, bottom, 0.0f, 0xffffffff, 1, 1 }, { right, top, 0.0f,
- 0xffffffff, 1, 0 }, };
- // Now we fill the vertex buffer.
- VOID* pVertices;
- if (FAILED(pVB->Lock(sizeof(CUSTOMVERTEX) * offset, sizeof(newVertices),
- (void**) &pVertices, 0)))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Failed to lock the vertex buffer.");
- return -1;
- }
- memcpy(pVertices, newVertices, sizeof(newVertices));
- pVB->Unlock();
-
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::StartRender()
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::StopRender()
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-bool VideoRenderDirect3D9::IsFullScreen()
-{
- return _fullScreen;
-}
-
-int32_t VideoRenderDirect3D9::SetCropping(const uint32_t channel,
- const uint16_t streamId,
- const float left, const float top,
- const float right, const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::SetTransparentBackground(
- const bool enable)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t colorText,
- const uint32_t colorBg,
- const float left, const float top,
- const float rigth, const float bottom)
-{
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom)
-{
- if (!bitMap)
- {
- if (_pTextureLogo != NULL)
- {
- _pTextureLogo->Release();
- _pTextureLogo = NULL;
- }
- WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "Remove bitmap.");
- return 0;
- }
-
- // sanity
- if (left > 1.0f || left < 0.0f ||
- top > 1.0f || top < 0.0f ||
- right > 1.0f || right < 0.0f ||
- bottom > 1.0f || bottom < 0.0f)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D SetBitmap invalid parameter");
- return -1;
- }
-
- if ((bottom <= top) || (right <= left))
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D SetBitmap invalid parameter");
- return -1;
- }
-
- CriticalSectionScoped cs(&_refD3DCritsect);
-
- unsigned char* srcPtr;
- HGDIOBJ oldhand;
- BITMAPINFO pbi;
- BITMAP bmap;
- HDC hdcNew;
- hdcNew = CreateCompatibleDC(0);
- // Fill out the BITMAP structure.
- GetObject((HBITMAP)bitMap, sizeof(bmap), &bmap);
- //Select the bitmap handle into the new device context.
- oldhand = SelectObject(hdcNew, (HGDIOBJ) bitMap);
- // we are done with this object
- DeleteObject(oldhand);
- pbi.bmiHeader.biSize = 40;
- pbi.bmiHeader.biWidth = bmap.bmWidth;
- pbi.bmiHeader.biHeight = bmap.bmHeight;
- pbi.bmiHeader.biPlanes = 1;
- pbi.bmiHeader.biBitCount = bmap.bmBitsPixel;
- pbi.bmiHeader.biCompression = BI_RGB;
- pbi.bmiHeader.biSizeImage = bmap.bmWidth * bmap.bmHeight * 3;
- srcPtr = new unsigned char[bmap.bmWidth * bmap.bmHeight * 4];
- // the original un-stretched image in RGB24
- int pixelHeight = GetDIBits(hdcNew, (HBITMAP)bitMap, 0, bmap.bmHeight, srcPtr, &pbi,
- DIB_RGB_COLORS);
- if (pixelHeight == 0)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D failed to GetDIBits in SetBitmap");
- delete[] srcPtr;
- return -1;
- }
- DeleteDC(hdcNew);
- if (pbi.bmiHeader.biBitCount != 24 && pbi.bmiHeader.biBitCount != 32)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D failed to SetBitmap invalid bit depth");
- delete[] srcPtr;
- return -1;
- }
-
- HRESULT ret;
- //release the previous logo texture
- if (_pTextureLogo != NULL)
- {
- _pTextureLogo->Release();
- _pTextureLogo = NULL;
- }
- ret = _pd3dDevice->CreateTexture(bmap.bmWidth, bmap.bmHeight, 1, 0,
- D3DFMT_A8R8G8B8, D3DPOOL_MANAGED,
- &_pTextureLogo, NULL);
- if (FAILED(ret))
- {
- _pTextureLogo = NULL;
- delete[] srcPtr;
- return -1;
- }
- if (!_pTextureLogo)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Texture for rendering not initialized.");
- delete[] srcPtr;
- return -1;
- }
-
- D3DLOCKED_RECT lr;
- if (FAILED(_pTextureLogo->LockRect(0, &lr, NULL, 0)))
- {
- delete[] srcPtr;
- return -1;
- }
- unsigned char* dstPtr = (UCHAR*) lr.pBits;
- int pitch = bmap.bmWidth * 4;
-
- if (pbi.bmiHeader.biBitCount == 24)
- {
- ConvertRGB24ToARGB(srcPtr, dstPtr, bmap.bmWidth, bmap.bmHeight, 0);
- }
- else
- {
- unsigned char* srcTmp = srcPtr + (bmap.bmWidth * 4) * (bmap.bmHeight - 1);
- for (int i = 0; i < bmap.bmHeight; ++i)
- {
- memcpy(dstPtr, srcTmp, bmap.bmWidth * 4);
- srcTmp -= bmap.bmWidth * 4;
- dstPtr += pitch;
- }
- }
-
- delete[] srcPtr;
- if (FAILED(_pTextureLogo->UnlockRect(0)))
- {
- return -1;
- }
-
- if (colorKey)
- {
- DDCOLORKEY* ddColorKey =
- static_cast<DDCOLORKEY*> (const_cast<void*> (colorKey));
- SetTransparentColor(_pTextureLogo, ddColorKey, bmap.bmWidth,
- bmap.bmHeight);
- }
-
- //update the vertice buffer
- //0,1 => -1,1
- _logoLeft = left;
- _logoRight = right;
-
- //0,1 => 1,-1
- _logoTop = top;
- _logoBottom = bottom;
-
- return 0;
-
-}
-
-int32_t VideoRenderDirect3D9::GetGraphicsMemory(uint64_t& totalMemory,
- uint64_t& availableMemory)
-{
- totalMemory = _totalMemory;
- availableMemory = _availableMemory;
- return 0;
-}
-
-int32_t VideoRenderDirect3D9::ConfigureRenderer(const uint32_t channel,
- const uint16_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- std::map<int, D3D9Channel*>::iterator ddIt;
- ddIt = _d3dChannels.find(channel & 0x0000ffff);
- D3D9Channel* ddobj = NULL;
- if (ddIt != _d3dChannels.end())
- {
- ddobj = ddIt->second;
- }
- if (ddobj == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
- "Direct3D render failed to find channel");
- return -1;
- }
- // Only allow one stream per channel, demuxing is
- ddobj->SetStreamSettings(0, zOrder, left, top, right, bottom);
-
- return 0;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.h b/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.h
deleted file mode 100644
index eaa8c147e2e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/video_render_direct3d9.h
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
-
-#include <memory>
-
-#include "webrtc/modules/video_render/windows/i_video_render_win.h"
-
-#include <d3d9.h>
-#include <ddraw.h>
-
-#include <Map>
-
-// Added
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-
-#pragma comment(lib, "d3d9.lib") // located in DirectX SDK
-
-namespace webrtc {
-class CriticalSectionWrapper;
-class EventTimerWrapper;
-class Trace;
-
-class D3D9Channel: public VideoRenderCallback
-{
-public:
- D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
- CriticalSectionWrapper* critSect, Trace* trace);
-
- virtual ~D3D9Channel();
-
- // Inherited from VideoRencerCallback, called from VideoAPI class.
- // Called when the incomming frame size and/or number of streams in mix changes
- virtual int FrameSizeChange(int width, int height, int numberOfStreams);
-
- // A new frame is delivered.
- virtual int DeliverFrame(const VideoFrame& videoFrame);
- virtual int32_t RenderFrame(const uint32_t streamId,
- const VideoFrame& videoFrame);
-
- // Called to check if the video frame is updated.
- int IsUpdated(bool& isUpdated);
- // Called after the video frame has been render to the screen
- int RenderOffFrame();
- // Called to get the texture that contains the video frame
- LPDIRECT3DTEXTURE9 GetTexture();
- // Called to get the texture(video frame) size
- int GetTextureWidth();
- int GetTextureHeight();
- //
- void SetStreamSettings(uint16_t streamId,
- uint32_t zOrder,
- float startWidth,
- float startHeight,
- float stopWidth,
- float stopHeight);
- int GetStreamSettings(uint16_t streamId,
- uint32_t& zOrder,
- float& startWidth,
- float& startHeight,
- float& stopWidth,
- float& stopHeight);
-
- int ReleaseTexture();
- int RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice);
-
-protected:
-
-private:
- //critical section passed from the owner
- CriticalSectionWrapper* _critSect;
- LPDIRECT3DDEVICE9 _pd3dDevice;
- LPDIRECT3DTEXTURE9 _pTexture;
-
- bool _bufferIsUpdated;
- // the frame size
- int _width;
- int _height;
- //sream settings
- //TODO support multiple streams in one channel
- uint16_t _streamId;
- uint32_t _zOrder;
- float _startWidth;
- float _startHeight;
- float _stopWidth;
- float _stopHeight;
-};
-
-class VideoRenderDirect3D9: IVideoRenderWin
-{
-public:
- VideoRenderDirect3D9(Trace* trace, HWND hWnd, bool fullScreen);
- ~VideoRenderDirect3D9();
-
-public:
- //IVideoRenderWin
-
- /**************************************************************************
- *
- * Init
- *
- ***************************************************************************/
- virtual int32_t Init();
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
- virtual VideoRenderCallback
- * CreateChannel(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom);
-
- virtual int32_t DeleteChannel(const uint32_t streamId);
-
- virtual int32_t GetStreamSettings(const uint32_t channel,
- const uint16_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom);
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual bool IsFullScreen();
-
- virtual int32_t SetCropping(const uint32_t channel,
- const uint16_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t channel,
- const uint16_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t ChangeWindow(void* window);
-
- virtual int32_t GetGraphicsMemory(uint64_t& totalMemory,
- uint64_t& availableMemory);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t colorText,
- const uint32_t colorBg,
- const float left, const float top,
- const float rigth, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom);
-
-public:
- // Get a channel by channel id
- D3D9Channel* GetD3DChannel(int channel);
- int UpdateRenderSurface();
-
-protected:
- // The thread rendering the screen
- static bool ScreenUpdateThreadProc(void* obj);
- bool ScreenUpdateProcess();
-
-private:
- // Init/close the d3d device
- int InitDevice();
- int CloseDevice();
-
- // Transparent related functions
- int SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
- DDCOLORKEY* transparentColorKey,
- DWORD width,
- DWORD height);
-
- CriticalSectionWrapper& _refD3DCritsect;
- Trace* _trace;
- // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
- std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
- EventTimerWrapper* _screenUpdateEvent;
-
- HWND _hWnd;
- bool _fullScreen;
- RECT _originalHwndRect;
- //FIXME we probably don't need this since all the information can be get from _d3dChannels
- int _channel;
- //Window size
- UINT _winWidth;
- UINT _winHeight;
-
- // Device
- LPDIRECT3D9 _pD3D; // Used to create the D3DDevice
- LPDIRECT3DDEVICE9 _pd3dDevice; // Our rendering device
- LPDIRECT3DVERTEXBUFFER9 _pVB; // Buffer to hold Vertices
- LPDIRECT3DTEXTURE9 _pTextureLogo;
-
- std::map<int, D3D9Channel*> _d3dChannels;
- std::multimap<int, unsigned int> _d3dZorder;
-
- // The position where the logo will be placed
- float _logoLeft;
- float _logoTop;
- float _logoRight;
- float _logoBottom;
-
- typedef HRESULT (WINAPI *DIRECT3DCREATE9EX)(UINT SDKVersion, IDirect3D9Ex**);
- LPDIRECT3DSURFACE9 _pd3dSurface;
-
- DWORD GetVertexProcessingCaps();
- int InitializeD3D(HWND hWnd, D3DPRESENT_PARAMETERS* pd3dpp);
-
- D3DPRESENT_PARAMETERS _d3dpp;
- int ResetDevice();
-
- int UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, int offset,
- float startWidth, float startHeight,
- float stopWidth, float stopHeight);
-
- //code for providing graphics settings
- DWORD _totalMemory;
- DWORD _availableMemory;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.cc b/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.cc
deleted file mode 100644
index 042d7fdfa33..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.cc
+++ /dev/null
@@ -1,337 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
-
-#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/trace.h"
-#ifdef DIRECT3D9_RENDERING
-#include "webrtc/modules/video_render/windows/video_render_direct3d9.h"
-#endif
-
-#include <tchar.h>
-
-namespace webrtc {
-
-VideoRenderWindowsImpl::VideoRenderWindowsImpl(const int32_t id,
- const VideoRenderType videoRenderType, void* window, const bool fullscreen)
- : _renderWindowsCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
- _prtWindow(window),
- _fullscreen(fullscreen),
- _renderMethod(kVideoRenderWinD3D9),
- _ptrRendererWin(NULL) {
-}
-
-VideoRenderWindowsImpl::~VideoRenderWindowsImpl()
-{
- delete &_renderWindowsCritsect;
- if (_ptrRendererWin)
- {
- delete _ptrRendererWin;
- _ptrRendererWin = NULL;
- }
-}
-
-int32_t VideoRenderWindowsImpl::Init()
-{
- // Create the win renderer
- switch (_renderMethod)
- {
- case kVideoRenderWinD3D9:
- {
-#ifdef DIRECT3D9_RENDERING
- VideoRenderDirect3D9* ptrRenderer;
- ptrRenderer = new VideoRenderDirect3D9(NULL, (HWND) _prtWindow, _fullscreen);
- if (ptrRenderer == NULL)
- {
- break;
- }
- _ptrRendererWin = reinterpret_cast<IVideoRenderWin*>(ptrRenderer);
-#else
- return NULL;
-#endif //DIRECT3D9_RENDERING
- }
- break;
- default:
- break;
- }
-
- //Init renderer
- if (_ptrRendererWin)
- return _ptrRendererWin->Init();
- else
- return -1;
-}
-
-int32_t VideoRenderWindowsImpl::ChangeWindow(void* window)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- if (!_ptrRendererWin)
- {
- return -1;
- }
- else
- {
- return _ptrRendererWin->ChangeWindow(window);
- }
-}
-
-VideoRenderCallback*
-VideoRenderWindowsImpl::AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- VideoRenderCallback* renderCallback = NULL;
-
- if (!_ptrRendererWin)
- {
- }
- else
- {
- renderCallback = _ptrRendererWin->CreateChannel(streamId, zOrder, left,
- top, right, bottom);
- }
-
- return renderCallback;
-}
-
-int32_t VideoRenderWindowsImpl::DeleteIncomingRenderStream(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->DeleteChannel(streamId);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- zOrder = 0;
- left = 0;
- top = 0;
- right = 0;
- bottom = 0;
-
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->GetStreamSettings(streamId, 0, zOrder, left,
- top, right, bottom);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::StartRender()
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->StartRender();
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::StopRender()
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->StopRender();
- }
- return error;
-}
-
-VideoRenderType VideoRenderWindowsImpl::RenderType()
-{
- return kRenderWindows;
-}
-
-RawVideoType VideoRenderWindowsImpl::PerferedVideoType()
-{
- return kVideoI420;
-}
-
-bool VideoRenderWindowsImpl::FullScreen()
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- bool fullscreen = false;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- fullscreen = _ptrRendererWin->IsFullScreen();
- }
- return fullscreen;
-}
-
-int32_t VideoRenderWindowsImpl::GetGraphicsMemory(
- uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const
-{
- if (_ptrRendererWin)
- {
- return _ptrRendererWin->GetGraphicsMemory(totalGraphicsMemory,
- availableGraphicsMemory);
- }
-
- totalGraphicsMemory = 0;
- availableGraphicsMemory = 0;
- return -1;
-}
-
-int32_t VideoRenderWindowsImpl::GetScreenResolution(
- uint32_t& screenWidth,
- uint32_t& screenHeight) const
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- screenWidth = 0;
- screenHeight = 0;
- return 0;
-}
-
-uint32_t VideoRenderWindowsImpl::RenderFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- return 0;
-}
-
-int32_t VideoRenderWindowsImpl::SetStreamCropping(
- const uint32_t streamId,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->SetCropping(streamId, 0, left, top, right,
- bottom);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::ConfigureRenderer(
- const uint32_t streamId,
- const unsigned int zOrder,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->ConfigureRenderer(streamId, 0, zOrder, left,
- top, right, bottom);
- }
-
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::SetTransparentBackground(
- const bool enable)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->SetTransparentBackground(enable);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::SetText(
- const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left,
- const float top,
- const float right,
- const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->SetText(textId, text, textLength,
- textColorRef, backgroundColorRef,
- left, top, right, bottom);
- }
- return error;
-}
-
-int32_t VideoRenderWindowsImpl::SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom)
-{
- CriticalSectionScoped cs(&_renderWindowsCritsect);
- int32_t error = -1;
- if (!_ptrRendererWin)
- {
- }
- else
- {
- error = _ptrRendererWin->SetBitmap(bitMap, pictureId, colorKey, left,
- top, right, bottom);
- }
- return error;
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.h b/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.h
deleted file mode 100644
index aaa3f81fc7e..00000000000
--- a/chromium/third_party/webrtc/modules/video_render/windows/video_render_windows_impl.h
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
-#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
-
-#include <Winerror.h>
-#include <dxdiag.h>
-
-#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/modules/video_render/windows/i_video_render_win.h"
-
-namespace webrtc {
-class CriticalSectionWrapper;
-
-#define EXPAND(x) x, sizeof(x)/sizeof(TCHAR)
-
-enum VideoRenderWinMethod {
- kVideoRenderWinD3D9 = 0,
-};
-
-// Class definitions
-class VideoRenderWindowsImpl: IVideoRender
-{
-public:
- /*
- * Constructor/destructor
- */
-
- VideoRenderWindowsImpl(const int32_t id,
- const VideoRenderType videoRenderType,
- void* window, const bool fullscreen);
-
- virtual ~VideoRenderWindowsImpl();
-
- virtual int32_t Init();
-
- virtual int32_t ChangeWindow(void* window);
-
- /**************************************************************************
- *
- * Incoming Streams
- *
- ***************************************************************************/
-
- virtual VideoRenderCallback
- * AddIncomingRenderStream(const uint32_t streamId,
- const uint32_t zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t
- DeleteIncomingRenderStream(const uint32_t streamId);
-
- virtual int32_t
- GetIncomingRenderStreamProperties(const uint32_t streamId,
- uint32_t& zOrder,
- float& left, float& top,
- float& right, float& bottom) const;
-
- /**************************************************************************
- *
- * Start/Stop
- *
- ***************************************************************************/
-
- virtual int32_t StartRender();
-
- virtual int32_t StopRender();
-
- /**************************************************************************
- *
- * Properties
- *
- ***************************************************************************/
-
- virtual VideoRenderType RenderType();
-
- virtual RawVideoType PerferedVideoType();
-
- virtual bool FullScreen();
-
- virtual int32_t
- GetGraphicsMemory(uint64_t& totalGraphicsMemory,
- uint64_t& availableGraphicsMemory) const;
-
- virtual int32_t
- GetScreenResolution(uint32_t& screenWidth,
- uint32_t& screenHeight) const;
-
- virtual uint32_t RenderFrameRate(const uint32_t streamId);
-
- virtual int32_t SetStreamCropping(const uint32_t streamId,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t ConfigureRenderer(const uint32_t streamId,
- const unsigned int zOrder,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetTransparentBackground(const bool enable);
-
- virtual int32_t SetText(const uint8_t textId,
- const uint8_t* text,
- const int32_t textLength,
- const uint32_t textColorRef,
- const uint32_t backgroundColorRef,
- const float left, const float top,
- const float right, const float bottom);
-
- virtual int32_t SetBitmap(const void* bitMap,
- const uint8_t pictureId,
- const void* colorKey,
- const float left, const float top,
- const float right, const float bottom);
-
-private:
- CriticalSectionWrapper& _renderWindowsCritsect;
-
- void* _prtWindow;
- bool _fullscreen;
-
- VideoRenderWinMethod _renderMethod;
- IVideoRenderWin* _ptrRendererWin;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
diff --git a/chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket.h b/chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket.h
index 3a15d4a399f..065b4c259b8 100644
--- a/chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket.h
+++ b/chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket.h
@@ -12,7 +12,7 @@
#define WEBRTC_P2P_BASE_ASYNCSTUNTCPSOCKET_H_
#include "webrtc/base/asynctcpsocket.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/socketfactory.h"
namespace cricket {
diff --git a/chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket_unittest.cc b/chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket_unittest.cc
index 22c1b26903d..5929d1f84da 100644
--- a/chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/asyncstuntcpsocket_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/p2p/base/asyncstuntcpsocket.h"
#include "webrtc/base/asyncsocket.h"
#include "webrtc/base/gunit.h"
@@ -122,11 +124,11 @@ class AsyncStunTCPSocketTest : public testing::Test,
return ret;
}
- rtc::scoped_ptr<rtc::VirtualSocketServer> vss_;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
rtc::SocketServerScope ss_scope_;
- rtc::scoped_ptr<AsyncStunTCPSocket> send_socket_;
- rtc::scoped_ptr<AsyncStunTCPSocket> recv_socket_;
- rtc::scoped_ptr<rtc::AsyncPacketSocket> listen_socket_;
+ std::unique_ptr<AsyncStunTCPSocket> send_socket_;
+ std::unique_ptr<AsyncStunTCPSocket> recv_socket_;
+ std::unique_ptr<rtc::AsyncPacketSocket> listen_socket_;
std::list<std::string> recv_packets_;
};
diff --git a/chromium/third_party/webrtc/p2p/base/basicpacketsocketfactory.cc b/chromium/third_party/webrtc/p2p/base/basicpacketsocketfactory.cc
index 697518da9d0..a05f9df8dfc 100644
--- a/chromium/third_party/webrtc/p2p/base/basicpacketsocketfactory.cc
+++ b/chromium/third_party/webrtc/p2p/base/basicpacketsocketfactory.cc
@@ -17,7 +17,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/nethelpers.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketadapters.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/thread.h"
diff --git a/chromium/third_party/webrtc/p2p/base/candidate.h b/chromium/third_party/webrtc/p2p/base/candidate.h
index 4ab1b6f2356..b918344740a 100644
--- a/chromium/third_party/webrtc/p2p/base/candidate.h
+++ b/chromium/third_party/webrtc/p2p/base/candidate.h
@@ -241,6 +241,19 @@ class Candidate {
(256 - component_);
}
+ bool operator==(const Candidate& o) const {
+ return id_ == o.id_ && component_ == o.component_ &&
+ protocol_ == o.protocol_ && relay_protocol_ == o.relay_protocol_ &&
+ address_ == o.address_ && priority_ == o.priority_ &&
+ username_ == o.username_ && password_ == o.password_ &&
+ type_ == o.type_ && network_name_ == o.network_name_ &&
+ network_type_ == o.network_type_ && generation_ == o.generation_ &&
+ foundation_ == o.foundation_ &&
+ related_address_ == o.related_address_ && tcptype_ == o.tcptype_ &&
+ transport_name_ == o.transport_name_ && network_id_ == o.network_id_;
+ }
+ bool operator!=(const Candidate& o) const { return !(*this == o); }
+
private:
std::string ToStringInternal(bool sensitive) const {
std::ostringstream ost;
@@ -249,7 +262,8 @@ class Candidate {
ost << "Cand[" << transport_name_ << ":" << foundation_ << ":" << component_
<< ":" << protocol_ << ":" << priority_ << ":" << address << ":"
<< type_ << ":" << related_address_ << ":" << username_ << ":"
- << password_ << ":" << network_id_ << ":" << network_cost_ << "]";
+ << password_ << ":" << network_id_ << ":" << network_cost_ << ":"
+ << generation_ << "]";
return ost.str();
}
diff --git a/chromium/third_party/webrtc/p2p/base/dtlstransport.h b/chromium/third_party/webrtc/p2p/base/dtlstransport.h
index 276b05f786f..a4bf383f0a6 100644
--- a/chromium/third_party/webrtc/p2p/base/dtlstransport.h
+++ b/chromium/third_party/webrtc/p2p/base/dtlstransport.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_P2P_BASE_DTLSTRANSPORT_H_
#define WEBRTC_P2P_BASE_DTLSTRANSPORT_H_
+#include <memory>
+
#include "webrtc/p2p/base/dtlstransportchannel.h"
#include "webrtc/p2p/base/transport.h"
@@ -64,27 +66,11 @@ class DtlsTransport : public Base {
rtc::SSLFingerprint* local_fp =
Base::local_description()->identity_fingerprint.get();
- if (local_fp) {
- // Sanity check local fingerprint.
- if (certificate_) {
- rtc::scoped_ptr<rtc::SSLFingerprint> local_fp_tmp(
- rtc::SSLFingerprint::Create(local_fp->algorithm,
- certificate_->identity()));
- ASSERT(local_fp_tmp.get() != NULL);
- if (!(*local_fp_tmp == *local_fp)) {
- std::ostringstream desc;
- desc << "Local fingerprint does not match identity. Expected: ";
- desc << local_fp_tmp->ToString();
- desc << " Got: " << local_fp->ToString();
- return BadTransportDescription(desc.str(), error_desc);
- }
- } else {
- return BadTransportDescription(
- "Local fingerprint provided but no identity available.",
- error_desc);
- }
- } else {
+ if (!local_fp) {
certificate_ = nullptr;
+ } else if (!Base::VerifyCertificateFingerprint(certificate_.get(), local_fp,
+ error_desc)) {
+ return false;
}
if (!channel->SetLocalCertificate(certificate_)) {
@@ -103,96 +89,23 @@ class DtlsTransport : public Base {
"transport descriptions are negotiated";
return BadTransportDescription(msg, error_desc);
}
-
rtc::SSLFingerprint* local_fp =
Base::local_description()->identity_fingerprint.get();
rtc::SSLFingerprint* remote_fp =
Base::remote_description()->identity_fingerprint.get();
-
if (remote_fp && local_fp) {
remote_fingerprint_.reset(new rtc::SSLFingerprint(*remote_fp));
-
- // From RFC 4145, section-4.1, The following are the values that the
- // 'setup' attribute can take in an offer/answer exchange:
- // Offer Answer
- // ________________
- // active passive / holdconn
- // passive active / holdconn
- // actpass active / passive / holdconn
- // holdconn holdconn
- //
- // Set the role that is most conformant with RFC 5763, Section 5, bullet 1
- // The endpoint MUST use the setup attribute defined in [RFC4145].
- // The endpoint that is the offerer MUST use the setup attribute
- // value of setup:actpass and be prepared to receive a client_hello
- // before it receives the answer. The answerer MUST use either a
- // setup attribute value of setup:active or setup:passive. Note that
- // if the answerer uses setup:passive, then the DTLS handshake will
- // not begin until the answerer is received, which adds additional
- // latency. setup:active allows the answer and the DTLS handshake to
- // occur in parallel. Thus, setup:active is RECOMMENDED. Whichever
- // party is active MUST initiate a DTLS handshake by sending a
- // ClientHello over each flow (host/port quartet).
- // IOW - actpass and passive modes should be treated as server and
- // active as client.
- ConnectionRole local_connection_role =
- Base::local_description()->connection_role;
- ConnectionRole remote_connection_role =
- Base::remote_description()->connection_role;
-
- bool is_remote_server = false;
- if (local_role == CA_OFFER) {
- if (local_connection_role != CONNECTIONROLE_ACTPASS) {
- return BadTransportDescription(
- "Offerer must use actpass value for setup attribute.",
- error_desc);
- }
-
- if (remote_connection_role == CONNECTIONROLE_ACTIVE ||
- remote_connection_role == CONNECTIONROLE_PASSIVE ||
- remote_connection_role == CONNECTIONROLE_NONE) {
- is_remote_server = (remote_connection_role == CONNECTIONROLE_PASSIVE);
- } else {
- const std::string msg =
- "Answerer must use either active or passive value "
- "for setup attribute.";
- return BadTransportDescription(msg, error_desc);
- }
- // If remote is NONE or ACTIVE it will act as client.
- } else {
- if (remote_connection_role != CONNECTIONROLE_ACTPASS &&
- remote_connection_role != CONNECTIONROLE_NONE) {
- return BadTransportDescription(
- "Offerer must use actpass value for setup attribute.",
- error_desc);
- }
-
- if (local_connection_role == CONNECTIONROLE_ACTIVE ||
- local_connection_role == CONNECTIONROLE_PASSIVE) {
- is_remote_server = (local_connection_role == CONNECTIONROLE_ACTIVE);
- } else {
- const std::string msg =
- "Answerer must use either active or passive value "
- "for setup attribute.";
- return BadTransportDescription(msg, error_desc);
- }
-
- // If local is passive, local will act as server.
+ if (!Base::NegotiateRole(local_role, &secure_role_, error_desc)) {
+ return false;
}
-
- secure_role_ = is_remote_server ? rtc::SSL_CLIENT :
- rtc::SSL_SERVER;
-
} else if (local_fp && (local_role == CA_ANSWER)) {
return BadTransportDescription(
"Local fingerprint supplied when caller didn't offer DTLS.",
error_desc);
} else {
// We are not doing DTLS
- remote_fingerprint_.reset(new rtc::SSLFingerprint(
- "", NULL, 0));
+ remote_fingerprint_.reset(new rtc::SSLFingerprint("", nullptr, 0));
}
-
// Now run the negotiation for the base class.
return Base::NegotiateTransportDescription(local_role, error_desc);
}
@@ -242,7 +155,7 @@ class DtlsTransport : public Base {
rtc::scoped_refptr<rtc::RTCCertificate> certificate_;
rtc::SSLRole secure_role_;
rtc::SSLProtocolVersion ssl_max_version_;
- rtc::scoped_ptr<rtc::SSLFingerprint> remote_fingerprint_;
+ std::unique_ptr<rtc::SSLFingerprint> remote_fingerprint_;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/dtlstransportchannel.cc b/chromium/third_party/webrtc/p2p/base/dtlstransportchannel.cc
index 88a11928cc2..1e3df13493f 100644
--- a/chromium/third_party/webrtc/p2p/base/dtlstransportchannel.cc
+++ b/chromium/third_party/webrtc/p2p/base/dtlstransportchannel.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <utility>
#include "webrtc/p2p/base/dtlstransportchannel.h"
@@ -36,6 +37,13 @@ static bool IsDtlsPacket(const char* data, size_t len) {
const uint8_t* u = reinterpret_cast<const uint8_t*>(data);
return (len >= kDtlsRecordHeaderLen && (u[0] > 19 && u[0] < 64));
}
+static bool IsDtlsClientHelloPacket(const char* data, size_t len) {
+ if (!IsDtlsPacket(data, len)) {
+ return false;
+ }
+ const uint8_t* u = reinterpret_cast<const uint8_t*>(data);
+ return len > 17 && u[0] == 22 && u[13] == 1;
+}
static bool IsRtpPacket(const char* data, size_t len) {
const uint8_t* u = reinterpret_cast<const uint8_t*>(data);
return (len >= kMinRtpPacketLen && (u[0] & 0xC0) == 0x80);
@@ -250,7 +258,7 @@ bool DtlsTransportChannelWrapper::SetRemoteFingerprint(
return true;
}
-rtc::scoped_ptr<rtc::SSLCertificate>
+std::unique_ptr<rtc::SSLCertificate>
DtlsTransportChannelWrapper::GetRemoteSSLCertificate() const {
if (!dtls_) {
return nullptr;
@@ -469,15 +477,18 @@ void DtlsTransportChannelWrapper::OnReadPacket(
switch (dtls_state()) {
case DTLS_TRANSPORT_NEW:
if (dtls_) {
- // Drop packets received before DTLS has actually started.
- LOG_J(LS_INFO, this) << "Dropping packet received before DTLS started.";
+ LOG_J(LS_INFO, this) << "Packet received before DTLS started.";
+ } else {
+ LOG_J(LS_WARNING, this) << "Packet received before we know if we are "
+ << "doing DTLS or not.";
+ }
+ // Cache a client hello packet received before DTLS has actually started.
+ if (IsDtlsClientHelloPacket(data, size)) {
+ LOG_J(LS_INFO, this) << "Caching DTLS ClientHello packet until DTLS is "
+ << "started.";
+ cached_client_hello_.SetData(data, size);
} else {
- // Currently drop the packet, but we might in future
- // decide to take this as evidence that the other
- // side is ready to do DTLS and start the handshake
- // on our end.
- LOG_J(LS_WARNING, this) << "Received packet before we know if we are "
- << "doing DTLS or not; dropping.";
+ LOG_J(LS_INFO, this) << "Not a DTLS ClientHello packet; dropping.";
}
break;
@@ -576,6 +587,21 @@ bool DtlsTransportChannelWrapper::MaybeStartDtls() {
LOG_J(LS_INFO, this)
<< "DtlsTransportChannelWrapper: Started DTLS handshake";
set_dtls_state(DTLS_TRANSPORT_CONNECTING);
+ // Now that the handshake has started, we can process a cached ClientHello
+ // (if one exists).
+ if (cached_client_hello_.size()) {
+ if (ssl_role_ == rtc::SSL_SERVER) {
+ LOG_J(LS_INFO, this) << "Handling cached DTLS ClientHello packet.";
+ if (!HandleDtlsPacket(cached_client_hello_.data<char>(),
+ cached_client_hello_.size())) {
+ LOG_J(LS_ERROR, this) << "Failed to handle DTLS packet.";
+ }
+ } else {
+ LOG_J(LS_WARNING, this) << "Discarding cached DTLS ClientHello packet "
+ << "because we don't have the server role.";
+ }
+ cached_client_hello_.Clear();
+ }
}
return true;
}
diff --git a/chromium/third_party/webrtc/p2p/base/dtlstransportchannel.h b/chromium/third_party/webrtc/p2p/base/dtlstransportchannel.h
index b6c3cfd2510..c5f55469f29 100644
--- a/chromium/third_party/webrtc/p2p/base/dtlstransportchannel.h
+++ b/chromium/third_party/webrtc/p2p/base/dtlstransportchannel.h
@@ -11,13 +11,14 @@
#ifndef WEBRTC_P2P_BASE_DTLSTRANSPORTCHANNEL_H_
#define WEBRTC_P2P_BASE_DTLSTRANSPORTCHANNEL_H_
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/p2p/base/transportchannelimpl.h"
#include "webrtc/base/buffer.h"
#include "webrtc/base/bufferqueue.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/sslstreamadapter.h"
#include "webrtc/base/stream.h"
@@ -137,7 +138,7 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl {
// Once DTLS has been established, this method retrieves the certificate in
// use by the remote peer, for use in external identity verification.
- rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate() const override;
+ std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate() const override;
// Once DTLS has established (i.e., this channel is writable), this method
// extracts the keys negotiated during the DTLS handshake, for use in external
@@ -226,7 +227,7 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl {
rtc::Thread* worker_thread_; // Everything should occur on this thread.
// Underlying channel, not owned by this class.
TransportChannelImpl* const channel_;
- rtc::scoped_ptr<rtc::SSLStreamAdapter> dtls_; // The DTLS stream
+ std::unique_ptr<rtc::SSLStreamAdapter> dtls_; // The DTLS stream
StreamInterfaceChannel* downward_; // Wrapper for channel_, owned by dtls_.
std::vector<int> srtp_ciphers_; // SRTP ciphers to use with DTLS.
bool dtls_active_ = false;
@@ -236,6 +237,12 @@ class DtlsTransportChannelWrapper : public TransportChannelImpl {
rtc::Buffer remote_fingerprint_value_;
std::string remote_fingerprint_algorithm_;
+ // Cached DTLS ClientHello packet that was received before we started the
+ // DTLS handshake. This could happen if the hello was received before the
+ // transport channel became writable, or before a remote fingerprint was
+ // received.
+ rtc::Buffer cached_client_hello_;
+
RTC_DISALLOW_COPY_AND_ASSIGN(DtlsTransportChannelWrapper);
};
diff --git a/chromium/third_party/webrtc/p2p/base/dtlstransportchannel_unittest.cc b/chromium/third_party/webrtc/p2p/base/dtlstransportchannel_unittest.cc
index 7643016b6a5..486b51aec97 100644
--- a/chromium/third_party/webrtc/p2p/base/dtlstransportchannel_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/dtlstransportchannel_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <set>
#include "webrtc/p2p/base/dtlstransport.h"
@@ -16,7 +17,6 @@
#include "webrtc/base/dscp.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/sslidentity.h"
#include "webrtc/base/sslstreamadapter.h"
@@ -33,28 +33,37 @@ static const char kIcePwd1[] = "TESTICEPWD00000000000001";
static const size_t kPacketNumOffset = 8;
static const size_t kPacketHeaderLen = 12;
static const int kFakePacketId = 0x1234;
+static const int kTimeout = 10000;
static bool IsRtpLeadByte(uint8_t b) {
return ((b & 0xC0) == 0x80);
}
+cricket::TransportDescription MakeTransportDescription(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& cert,
+ cricket::ConnectionRole role) {
+ std::unique_ptr<rtc::SSLFingerprint> fingerprint;
+ if (cert) {
+ std::string digest_algorithm;
+ cert->ssl_certificate().GetSignatureDigestAlgorithm(&digest_algorithm);
+ fingerprint.reset(
+ rtc::SSLFingerprint::Create(digest_algorithm, cert->identity()));
+ }
+ return cricket::TransportDescription(std::vector<std::string>(), kIceUfrag1,
+ kIcePwd1, cricket::ICEMODE_FULL, role,
+ fingerprint.get());
+}
+
using cricket::ConnectionRole;
enum Flags { NF_REOFFER = 0x1, NF_EXPECT_FAILURE = 0x2 };
class DtlsTestClient : public sigslot::has_slots<> {
public:
- DtlsTestClient(const std::string& name)
- : name_(name),
- packet_size_(0),
- use_dtls_srtp_(false),
- ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_12),
- negotiated_dtls_(false),
- received_dtls_client_hello_(false),
- received_dtls_server_hello_(false) {}
+ DtlsTestClient(const std::string& name) : name_(name) {}
void CreateCertificate(rtc::KeyType key_type) {
certificate_ =
- rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
rtc::SSLIdentity::Generate(name_, key_type)));
}
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate() {
@@ -122,8 +131,8 @@ class DtlsTestClient : public sigslot::has_slots<> {
ConnectionRole local_role,
ConnectionRole remote_role,
int flags) {
- rtc::scoped_ptr<rtc::SSLFingerprint> local_fingerprint;
- rtc::scoped_ptr<rtc::SSLFingerprint> remote_fingerprint;
+ std::unique_ptr<rtc::SSLFingerprint> local_fingerprint;
+ std::unique_ptr<rtc::SSLFingerprint> remote_fingerprint;
if (local_cert) {
std::string digest_algorithm;
ASSERT_TRUE(local_cert->ssl_certificate().GetSignatureDigestAlgorithm(
@@ -185,9 +194,8 @@ class DtlsTestClient : public sigslot::has_slots<> {
negotiated_dtls_ = (local_cert && remote_cert);
}
- bool Connect(DtlsTestClient* peer) {
- transport_->ConnectChannels();
- transport_->SetDestination(peer->transport_.get());
+ bool Connect(DtlsTestClient* peer, bool asymmetric) {
+ transport_->SetDestination(peer->transport_.get(), asymmetric);
return true;
}
@@ -203,13 +211,29 @@ class DtlsTestClient : public sigslot::has_slots<> {
return true;
}
+ bool all_raw_channels_writable() const {
+ if (channels_.empty()) {
+ return false;
+ }
+ for (cricket::DtlsTransportChannelWrapper* channel : channels_) {
+ if (!channel->channel()->writable()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ int received_dtls_client_hellos() const {
+ return received_dtls_client_hellos_;
+ }
+
void CheckRole(rtc::SSLRole role) {
if (role == rtc::SSL_CLIENT) {
- ASSERT_FALSE(received_dtls_client_hello_);
- ASSERT_TRUE(received_dtls_server_hello_);
+ ASSERT_EQ(0, received_dtls_client_hellos_);
+ ASSERT_GT(received_dtls_server_hellos_, 0);
} else {
- ASSERT_TRUE(received_dtls_client_hello_);
- ASSERT_FALSE(received_dtls_server_hello_);
+ ASSERT_GT(received_dtls_client_hellos_, 0);
+ ASSERT_EQ(0, received_dtls_server_hellos_);
}
}
@@ -248,7 +272,7 @@ class DtlsTestClient : public sigslot::has_slots<> {
void SendPackets(size_t channel, size_t size, size_t count, bool srtp) {
ASSERT(channel < channels_.size());
- rtc::scoped_ptr<char[]> packet(new char[size]);
+ std::unique_ptr<char[]> packet(new char[size]);
size_t sent = 0;
do {
// Fill the packet with a known value and a sequence number to check
@@ -272,7 +296,7 @@ class DtlsTestClient : public sigslot::has_slots<> {
int SendInvalidSrtpPacket(size_t channel, size_t size) {
ASSERT(channel < channels_.size());
- rtc::scoped_ptr<char[]> packet(new char[size]);
+ std::unique_ptr<char[]> packet(new char[size]);
// Fill the packet with 0 to form an invalid SRTP packet.
memset(packet.get(), 0, size);
@@ -358,20 +382,18 @@ class DtlsTestClient : public sigslot::has_slots<> {
// Look at the handshake packets to see what role we played.
// Check that non-handshake packets are DTLS data or SRTP bypass.
- if (negotiated_dtls_) {
- if (data[0] == 22 && size > 17) {
- if (data[13] == 1) {
- received_dtls_client_hello_ = true;
- } else if (data[13] == 2) {
- received_dtls_server_hello_ = true;
- }
- } else if (!(data[0] >= 20 && data[0] <= 22)) {
- ASSERT_TRUE(data[0] == 23 || IsRtpLeadByte(data[0]));
- if (data[0] == 23) {
- ASSERT_TRUE(VerifyEncryptedPacket(data, size));
- } else if (IsRtpLeadByte(data[0])) {
- ASSERT_TRUE(VerifyPacket(data, size, NULL));
- }
+ if (data[0] == 22 && size > 17) {
+ if (data[13] == 1) {
+ ++received_dtls_client_hellos_;
+ } else if (data[13] == 2) {
+ ++received_dtls_server_hellos_;
+ }
+ } else if (negotiated_dtls_ && !(data[0] >= 20 && data[0] <= 22)) {
+ ASSERT_TRUE(data[0] == 23 || IsRtpLeadByte(data[0]));
+ if (data[0] == 23) {
+ ASSERT_TRUE(VerifyEncryptedPacket(data, size));
+ } else if (IsRtpLeadByte(data[0])) {
+ ASSERT_TRUE(VerifyPacket(data, size, NULL));
}
}
}
@@ -379,15 +401,15 @@ class DtlsTestClient : public sigslot::has_slots<> {
private:
std::string name_;
rtc::scoped_refptr<rtc::RTCCertificate> certificate_;
- rtc::scoped_ptr<cricket::FakeTransport> transport_;
+ std::unique_ptr<cricket::FakeTransport> transport_;
std::vector<cricket::DtlsTransportChannelWrapper*> channels_;
- size_t packet_size_;
+ size_t packet_size_ = 0u;
std::set<int> received_;
- bool use_dtls_srtp_;
- rtc::SSLProtocolVersion ssl_max_version_;
- bool negotiated_dtls_;
- bool received_dtls_client_hello_;
- bool received_dtls_server_hello_;
+ bool use_dtls_srtp_ = false;
+ rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_12;
+ bool negotiated_dtls_ = false;
+ int received_dtls_client_hellos_ = 0;
+ int received_dtls_server_hellos_ = 0;
rtc::SentPacket sent_packet_;
};
@@ -437,14 +459,14 @@ class DtlsTransportChannelTest : public testing::Test {
bool Connect(ConnectionRole client1_role, ConnectionRole client2_role) {
Negotiate(client1_role, client2_role);
- bool rv = client1_.Connect(&client2_);
+ bool rv = client1_.Connect(&client2_, false);
EXPECT_TRUE(rv);
if (!rv)
return false;
EXPECT_TRUE_WAIT(
client1_.all_channels_writable() && client2_.all_channels_writable(),
- 10000);
+ kTimeout);
if (!client1_.all_channels_writable() || !client2_.all_channels_writable())
return false;
@@ -535,7 +557,7 @@ class DtlsTransportChannelTest : public testing::Test {
LOG(LS_INFO) << "Expect packets, size=" << size;
client2_.ExpectPackets(channel, size);
client1_.SendPackets(channel, size, count, srtp);
- EXPECT_EQ_WAIT(count, client2_.NumPacketsReceived(), 10000);
+ EXPECT_EQ_WAIT(count, client2_.NumPacketsReceived(), kTimeout);
}
protected:
@@ -828,11 +850,11 @@ TEST_F(DtlsTransportChannelTest, TestRenegotiateBeforeConnect) {
Renegotiate(&client1_, cricket::CONNECTIONROLE_ACTPASS,
cricket::CONNECTIONROLE_ACTIVE, NF_REOFFER);
- bool rv = client1_.Connect(&client2_);
+ bool rv = client1_.Connect(&client2_, false);
EXPECT_TRUE(rv);
EXPECT_TRUE_WAIT(
client1_.all_channels_writable() && client2_.all_channels_writable(),
- 10000);
+ kTimeout);
TestTransfer(0, 1000, 100, true);
TestTransfer(1, 1000, 100, true);
@@ -846,8 +868,8 @@ TEST_F(DtlsTransportChannelTest, TestCertificatesBeforeConnect) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate1;
rtc::scoped_refptr<rtc::RTCCertificate> certificate2;
- rtc::scoped_ptr<rtc::SSLCertificate> remote_cert1;
- rtc::scoped_ptr<rtc::SSLCertificate> remote_cert2;
+ std::unique_ptr<rtc::SSLCertificate> remote_cert1;
+ std::unique_ptr<rtc::SSLCertificate> remote_cert2;
// After negotiation, each side has a distinct local certificate, but still no
// remote certificate, because connection has not yet occurred.
@@ -875,14 +897,80 @@ TEST_F(DtlsTransportChannelTest, TestCertificatesAfterConnect) {
certificate2->ssl_certificate().ToPEMString());
// Each side's remote certificate is the other side's local certificate.
- rtc::scoped_ptr<rtc::SSLCertificate> remote_cert1 =
+ std::unique_ptr<rtc::SSLCertificate> remote_cert1 =
client1_.transport()->GetRemoteSSLCertificate();
ASSERT_TRUE(remote_cert1);
ASSERT_EQ(remote_cert1->ToPEMString(),
certificate2->ssl_certificate().ToPEMString());
- rtc::scoped_ptr<rtc::SSLCertificate> remote_cert2 =
+ std::unique_ptr<rtc::SSLCertificate> remote_cert2 =
client2_.transport()->GetRemoteSSLCertificate();
ASSERT_TRUE(remote_cert2);
ASSERT_EQ(remote_cert2->ToPEMString(),
certificate1->ssl_certificate().ToPEMString());
}
+
+// Test that DTLS completes promptly if a ClientHello is received before the
+// transport channel is writable (allowing a ServerHello to be sent).
+TEST_F(DtlsTransportChannelTest, TestReceiveClientHelloBeforeWritable) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ PrepareDtls(true, true, rtc::KT_DEFAULT);
+ // Exchange transport descriptions.
+ Negotiate(cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_ACTIVE);
+
+ // Make client2_ writable, but not client1_.
+ EXPECT_TRUE(client2_.Connect(&client1_, true));
+ EXPECT_TRUE_WAIT(client2_.all_raw_channels_writable(), kTimeout);
+
+ // Expect a DTLS ClientHello to be sent even while client1_ isn't writable.
+ EXPECT_EQ_WAIT(1, client1_.received_dtls_client_hellos(), kTimeout);
+ EXPECT_FALSE(client1_.all_raw_channels_writable());
+
+ // Now make client1_ writable and expect the handshake to complete
+ // without client2_ needing to retransmit the ClientHello.
+ EXPECT_TRUE(client1_.Connect(&client2_, true));
+ EXPECT_TRUE_WAIT(
+ client1_.all_channels_writable() && client2_.all_channels_writable(),
+ kTimeout);
+ EXPECT_EQ(1, client1_.received_dtls_client_hellos());
+}
+
+// Test that DTLS completes promptly if a ClientHello is received before the
+// transport channel has a remote fingerprint (allowing a ServerHello to be
+// sent).
+TEST_F(DtlsTransportChannelTest,
+ TestReceiveClientHelloBeforeRemoteFingerprint) {
+ MAYBE_SKIP_TEST(HaveDtls);
+ PrepareDtls(true, true, rtc::KT_DEFAULT);
+ client1_.SetupChannels(channel_ct_, cricket::ICEROLE_CONTROLLING);
+ client2_.SetupChannels(channel_ct_, cricket::ICEROLE_CONTROLLED);
+
+ // Make client2_ writable and give it local/remote certs, but don't yet give
+ // client1_ a remote fingerprint.
+ client1_.transport()->SetLocalTransportDescription(
+ MakeTransportDescription(client1_.certificate(),
+ cricket::CONNECTIONROLE_ACTPASS),
+ cricket::CA_OFFER, nullptr);
+ client2_.Negotiate(&client1_, cricket::CA_ANSWER,
+ cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CONNECTIONROLE_ACTPASS, 0);
+ EXPECT_TRUE(client2_.Connect(&client1_, true));
+ EXPECT_TRUE_WAIT(client2_.all_raw_channels_writable(), kTimeout);
+
+ // Expect a DTLS ClientHello to be sent even while client1_ doesn't have a
+ // remote fingerprint.
+ EXPECT_EQ_WAIT(1, client1_.received_dtls_client_hellos(), kTimeout);
+ EXPECT_FALSE(client1_.all_raw_channels_writable());
+
+ // Now make give client1_ its remote fingerprint and make it writable, and
+ // expect the handshake to complete without client2_ needing to retransmit
+ // the ClientHello.
+ client1_.transport()->SetRemoteTransportDescription(
+ MakeTransportDescription(client2_.certificate(),
+ cricket::CONNECTIONROLE_ACTIVE),
+ cricket::CA_ANSWER, nullptr);
+ EXPECT_TRUE(client1_.Connect(&client2_, true));
+ EXPECT_TRUE_WAIT(
+ client1_.all_channels_writable() && client2_.all_channels_writable(),
+ kTimeout);
+ EXPECT_EQ(1, client1_.received_dtls_client_hellos());
+}
diff --git a/chromium/third_party/webrtc/p2p/client/fakeportallocator.h b/chromium/third_party/webrtc/p2p/base/fakeportallocator.h
index fb188261a27..6730d1ebb82 100644
--- a/chromium/third_party/webrtc/p2p/client/fakeportallocator.h
+++ b/chromium/third_party/webrtc/p2p/base/fakeportallocator.h
@@ -8,14 +8,16 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_P2P_CLIENT_FAKEPORTALLOCATOR_H_
-#define WEBRTC_P2P_CLIENT_FAKEPORTALLOCATOR_H_
+#ifndef WEBRTC_P2P_BASE_FAKEPORTALLOCATOR_H_
+#define WEBRTC_P2P_BASE_FAKEPORTALLOCATOR_H_
+#include <memory>
#include <string>
+#include <vector>
+
#include "webrtc/p2p/base/basicpacketsocketfactory.h"
#include "webrtc/p2p/base/portallocator.h"
#include "webrtc/p2p/base/udpport.h"
-#include "webrtc/base/scoped_ptr.h"
namespace rtc {
class SocketFactory;
@@ -82,24 +84,31 @@ class TestUDPPort : public UDPPort {
class FakePortAllocatorSession : public PortAllocatorSession {
public:
- FakePortAllocatorSession(rtc::Thread* worker_thread,
+ FakePortAllocatorSession(PortAllocator* allocator,
+ rtc::Thread* worker_thread,
rtc::PacketSocketFactory* factory,
const std::string& content_name,
int component,
const std::string& ice_ufrag,
const std::string& ice_pwd)
- : PortAllocatorSession(content_name, component, ice_ufrag, ice_pwd,
- cricket::kDefaultPortAllocatorFlags),
+ : PortAllocatorSession(content_name,
+ component,
+ ice_ufrag,
+ ice_pwd,
+ allocator->flags()),
worker_thread_(worker_thread),
factory_(factory),
- network_("network", "unittest",
- rtc::IPAddress(INADDR_LOOPBACK), 8),
- port_(), running_(false),
- port_config_count_(0) {
+ network_("network", "unittest", rtc::IPAddress(INADDR_LOOPBACK), 8),
+ port_(),
+ running_(false),
+ port_config_count_(0),
+ stun_servers_(allocator->stun_servers()),
+ turn_servers_(allocator->turn_servers()),
+ candidate_filter_(allocator->candidate_filter()) {
network_.AddIP(rtc::IPAddress(INADDR_LOOPBACK));
}
- virtual void StartGettingPorts() {
+ void StartGettingPorts() override {
if (!port_) {
port_.reset(TestUDPPort::Create(worker_thread_, factory_, &network_,
network_.GetBestIP(), 0, 0, username(),
@@ -110,32 +119,70 @@ class FakePortAllocatorSession : public PortAllocatorSession {
running_ = true;
}
- virtual void StopGettingPorts() { running_ = false; }
- virtual bool IsGettingPorts() { return running_; }
- virtual void ClearGettingPorts() {}
+ void StopGettingPorts() override { running_ = false; }
+ bool IsGettingPorts() override { return running_; }
+ void ClearGettingPorts() override {}
+ std::vector<PortInterface*> ReadyPorts() const override {
+ return ready_ports_;
+ }
+ std::vector<Candidate> ReadyCandidates() const override {
+ return candidates_;
+ }
+ bool CandidatesAllocationDone() const override { return allocation_done_; }
int port_config_count() { return port_config_count_; }
+ const ServerAddresses& stun_servers() const { return stun_servers_; }
+
+ const std::vector<RelayServerConfig>& turn_servers() const {
+ return turn_servers_;
+ }
+
+ uint32_t candidate_filter() const { return candidate_filter_; }
+
void AddPort(cricket::Port* port) {
- port->set_component(component_);
- port->set_generation(0);
- port->SignalPortComplete.connect(
- this, &FakePortAllocatorSession::OnPortComplete);
+ port->set_component(component());
+ port->set_generation(generation());
+ port->SignalPortComplete.connect(this,
+ &FakePortAllocatorSession::OnPortComplete);
port->PrepareAddress();
+ ready_ports_.push_back(port);
SignalPortReady(this, port);
}
void OnPortComplete(cricket::Port* port) {
- SignalCandidatesReady(this, port->Candidates());
+ const std::vector<Candidate>& candidates = port->Candidates();
+ candidates_.insert(candidates_.end(), candidates.begin(), candidates.end());
+ SignalCandidatesReady(this, candidates);
+
+ allocation_done_ = true;
SignalCandidatesAllocationDone(this);
}
+ int transport_info_update_count() const {
+ return transport_info_update_count_;
+ }
+
+ protected:
+ void UpdateIceParametersInternal() override {
+ // Since this class is a fake and this method only is overridden for tests,
+ // we don't need to actually update the transport info.
+ ++transport_info_update_count_;
+ }
+
private:
rtc::Thread* worker_thread_;
rtc::PacketSocketFactory* factory_;
rtc::Network network_;
- rtc::scoped_ptr<cricket::Port> port_;
+ std::unique_ptr<cricket::Port> port_;
bool running_;
int port_config_count_;
+ std::vector<Candidate> candidates_;
+ std::vector<PortInterface*> ready_ports_;
+ bool allocation_done_ = false;
+ ServerAddresses stun_servers_;
+ std::vector<RelayServerConfig> turn_servers_;
+ uint32_t candidate_filter_;
+ int transport_info_update_count_ = 0;
};
class FakePortAllocator : public cricket::PortAllocator {
@@ -144,44 +191,29 @@ class FakePortAllocator : public cricket::PortAllocator {
rtc::PacketSocketFactory* factory)
: worker_thread_(worker_thread), factory_(factory) {
if (factory_ == NULL) {
- owned_factory_.reset(new rtc::BasicPacketSocketFactory(
- worker_thread_));
+ owned_factory_.reset(new rtc::BasicPacketSocketFactory(worker_thread_));
factory_ = owned_factory_.get();
}
}
- void SetIceServers(
- const ServerAddresses& stun_servers,
- const std::vector<RelayServerConfig>& turn_servers) override {
- stun_servers_ = stun_servers;
- turn_servers_ = turn_servers;
- }
-
void SetNetworkIgnoreMask(int network_ignore_mask) override {}
- const ServerAddresses& stun_servers() const { return stun_servers_; }
-
- const std::vector<RelayServerConfig>& turn_servers() const {
- return turn_servers_;
- }
-
- virtual cricket::PortAllocatorSession* CreateSessionInternal(
+ cricket::PortAllocatorSession* CreateSessionInternal(
const std::string& content_name,
int component,
const std::string& ice_ufrag,
const std::string& ice_pwd) override {
- return new FakePortAllocatorSession(
- worker_thread_, factory_, content_name, component, ice_ufrag, ice_pwd);
+ return new FakePortAllocatorSession(this, worker_thread_, factory_,
+ content_name, component, ice_ufrag,
+ ice_pwd);
}
private:
rtc::Thread* worker_thread_;
rtc::PacketSocketFactory* factory_;
- rtc::scoped_ptr<rtc::BasicPacketSocketFactory> owned_factory_;
- ServerAddresses stun_servers_;
- std::vector<RelayServerConfig> turn_servers_;
+ std::unique_ptr<rtc::BasicPacketSocketFactory> owned_factory_;
};
} // namespace cricket
-#endif // WEBRTC_P2P_CLIENT_FAKEPORTALLOCATOR_H_
+#endif // WEBRTC_P2P_BASE_FAKEPORTALLOCATOR_H_
diff --git a/chromium/third_party/webrtc/p2p/base/faketransportcontroller.h b/chromium/third_party/webrtc/p2p/base/faketransportcontroller.h
index 2e0c9a97fad..321537dd59b 100644
--- a/chromium/third_party/webrtc/p2p/base/faketransportcontroller.h
+++ b/chromium/third_party/webrtc/p2p/base/faketransportcontroller.h
@@ -12,6 +12,7 @@
#define WEBRTC_P2P_BASE_FAKETRANSPORTCONTROLLER_H_
#include <map>
+#include <memory>
#include <string>
#include <vector>
@@ -140,20 +141,22 @@ class FakeTransportChannel : public TransportChannelImpl,
void SetWritable(bool writable) { set_writable(writable); }
- void SetDestination(FakeTransportChannel* dest) {
+ // Simulates the two transport channels connecting to each other.
+ // If |asymmetric| is true this method only affects this FakeTransportChannel.
+ // If false, it affects |dest| as well.
+ void SetDestination(FakeTransportChannel* dest, bool asymmetric = false) {
if (state_ == STATE_CONNECTING && dest) {
// This simulates the delivery of candidates.
dest_ = dest;
- dest_->dest_ = this;
if (local_cert_ && dest_->local_cert_) {
do_dtls_ = true;
- dest_->do_dtls_ = true;
NegotiateSrtpCiphers();
}
state_ = STATE_CONNECTED;
- dest_->state_ = STATE_CONNECTED;
set_writable(true);
- dest_->set_writable(true);
+ if (!asymmetric) {
+ dest->SetDestination(this, true);
+ }
} else if (state_ == STATE_CONNECTED && !dest) {
// Simulates loss of connectivity, by asymmetrically forgetting dest_.
dest_ = nullptr;
@@ -206,7 +209,7 @@ class FakeTransportChannel : public TransportChannelImpl,
} else {
rtc::Thread::Current()->Send(this, 0, packet);
}
- rtc::SentPacket sent_packet(options.packet_id, rtc::Time64());
+ rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis());
SignalSentPacket(this, sent_packet);
return static_cast<int>(len);
}
@@ -230,7 +233,7 @@ class FakeTransportChannel : public TransportChannelImpl,
}
bool SetLocalCertificate(
- const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) override {
local_cert_ = certificate;
return true;
}
@@ -256,13 +259,13 @@ class FakeTransportChannel : public TransportChannelImpl,
bool GetSslCipherSuite(int* cipher_suite) override { return false; }
- rtc::scoped_refptr<rtc::RTCCertificate> GetLocalCertificate() const {
+ rtc::scoped_refptr<rtc::RTCCertificate> GetLocalCertificate() const override {
return local_cert_;
}
- rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate()
+ std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate()
const override {
- return remote_cert_ ? rtc::scoped_ptr<rtc::SSLCertificate>(
+ return remote_cert_ ? std::unique_ptr<rtc::SSLCertificate>(
remote_cert_->GetReference())
: nullptr;
}
@@ -281,20 +284,6 @@ class FakeTransportChannel : public TransportChannelImpl,
return false;
}
- void NegotiateSrtpCiphers() {
- for (std::vector<int>::const_iterator it1 = srtp_ciphers_.begin();
- it1 != srtp_ciphers_.end(); ++it1) {
- for (std::vector<int>::const_iterator it2 = dest_->srtp_ciphers_.begin();
- it2 != dest_->srtp_ciphers_.end(); ++it2) {
- if (*it1 == *it2) {
- chosen_crypto_suite_ = *it1;
- dest_->chosen_crypto_suite_ = *it2;
- return;
- }
- }
- }
- }
-
bool GetStats(ConnectionInfos* infos) override {
ConnectionInfo info;
infos->clear();
@@ -310,6 +299,19 @@ class FakeTransportChannel : public TransportChannelImpl,
}
private:
+ void NegotiateSrtpCiphers() {
+ for (std::vector<int>::const_iterator it1 = srtp_ciphers_.begin();
+ it1 != srtp_ciphers_.end(); ++it1) {
+ for (std::vector<int>::const_iterator it2 = dest_->srtp_ciphers_.begin();
+ it2 != dest_->srtp_ciphers_.end(); ++it2) {
+ if (*it1 == *it2) {
+ chosen_crypto_suite_ = *it1;
+ return;
+ }
+ }
+ }
+ }
+
enum State { STATE_INIT, STATE_CONNECTING, STATE_CONNECTED };
FakeTransportChannel* dest_ = nullptr;
State state_ = STATE_INIT;
@@ -358,11 +360,14 @@ class FakeTransport : public Transport {
// If async, will send packets by "Post"-ing to message queue instead of
// synchronously "Send"-ing.
void SetAsync(bool async) { async_ = async; }
- void SetDestination(FakeTransport* dest) {
+
+ // If |asymmetric| is true, only set the destination for this transport, and
+ // not |dest|.
+ void SetDestination(FakeTransport* dest, bool asymmetric = false) {
dest_ = dest;
for (const auto& kv : channels_) {
kv.second->SetLocalCertificate(certificate_);
- SetChannelDestination(kv.first, kv.second);
+ SetChannelDestination(kv.first, kv.second, asymmetric);
}
}
@@ -405,6 +410,8 @@ class FakeTransport : public Transport {
using Transport::local_description;
using Transport::remote_description;
+ using Transport::VerifyCertificateFingerprint;
+ using Transport::NegotiateRole;
protected:
TransportChannelImpl* CreateTransportChannel(int component) override {
@@ -414,7 +421,7 @@ class FakeTransport : public Transport {
FakeTransportChannel* channel = new FakeTransportChannel(name(), component);
channel->set_ssl_max_protocol_version(ssl_max_version_);
channel->SetAsync(async_);
- SetChannelDestination(component, channel);
+ SetChannelDestination(component, channel, false);
channels_[component] = channel;
return channel;
}
@@ -430,15 +437,17 @@ class FakeTransport : public Transport {
return (it != channels_.end()) ? it->second : nullptr;
}
- void SetChannelDestination(int component, FakeTransportChannel* channel) {
+ void SetChannelDestination(int component,
+ FakeTransportChannel* channel,
+ bool asymmetric) {
FakeTransportChannel* dest_channel = nullptr;
if (dest_) {
dest_channel = dest_->GetFakeChannel(component);
- if (dest_channel) {
+ if (dest_channel && !asymmetric) {
dest_channel->SetLocalCertificate(dest_->certificate_);
}
}
- channel->SetDestination(dest_channel);
+ channel->SetDestination(dest_channel, asymmetric);
}
// Note, this is distinct from the Channel map owned by Transport.
@@ -502,22 +511,22 @@ class FakeTransportController : public TransportController {
SetIceRole(role);
}
- FakeTransport* GetTransport_w(const std::string& transport_name) {
+ FakeTransport* GetTransport_n(const std::string& transport_name) {
return static_cast<FakeTransport*>(
- TransportController::GetTransport_w(transport_name));
+ TransportController::GetTransport_n(transport_name));
}
void Connect(FakeTransportController* dest) {
- worker_thread()->Invoke<void>(
- rtc::Bind(&FakeTransportController::Connect_w, this, dest));
+ network_thread()->Invoke<void>(
+ rtc::Bind(&FakeTransportController::Connect_n, this, dest));
}
- TransportChannel* CreateTransportChannel_w(const std::string& transport_name,
+ TransportChannel* CreateTransportChannel_n(const std::string& transport_name,
int component) override {
if (fail_create_channel_) {
return nullptr;
}
- return TransportController::CreateTransportChannel_w(transport_name,
+ return TransportController::CreateTransportChannel_n(transport_name,
component);
}
@@ -538,23 +547,34 @@ class FakeTransportController : public TransportController {
}
protected:
- Transport* CreateTransport_w(const std::string& transport_name) override {
+ Transport* CreateTransport_n(const std::string& transport_name) override {
return new FakeTransport(transport_name);
}
- void Connect_w(FakeTransportController* dest) {
+ void Connect_n(FakeTransportController* dest) {
// Simulate the exchange of candidates.
- ConnectChannels_w();
- dest->ConnectChannels_w();
+ ConnectChannels_n();
+ dest->ConnectChannels_n();
for (auto& kv : transports()) {
FakeTransport* transport = static_cast<FakeTransport*>(kv.second);
- transport->SetDestination(dest->GetTransport_w(kv.first));
+ transport->SetDestination(dest->GetTransport_n(kv.first));
}
}
- void ConnectChannels_w() {
+ void ConnectChannels_n() {
+ TransportDescription faketransport_desc(
+ std::vector<std::string>(),
+ rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH),
+ rtc::CreateRandomString(cricket::ICE_PWD_LENGTH), cricket::ICEMODE_FULL,
+ cricket::CONNECTIONROLE_NONE, nullptr);
for (auto& kv : transports()) {
FakeTransport* transport = static_cast<FakeTransport*>(kv.second);
+ // Set local transport description for FakeTransport before connecting.
+ // Otherwise, the RTC_CHECK in Transport.ConnectChannel will fail.
+ if (!transport->local_description()) {
+ transport->SetLocalTransportDescription(faketransport_desc,
+ cricket::CA_OFFER, nullptr);
+ }
transport->ConnectChannels();
transport->MaybeStartGathering();
}
diff --git a/chromium/third_party/webrtc/p2p/base/p2ptransport.h b/chromium/third_party/webrtc/p2p/base/p2ptransport.h
index 0f965b4cdc1..d4da224f979 100644
--- a/chromium/third_party/webrtc/p2p/base/p2ptransport.h
+++ b/chromium/third_party/webrtc/p2p/base/p2ptransport.h
@@ -12,6 +12,8 @@
#define WEBRTC_P2P_BASE_P2PTRANSPORT_H_
#include <string>
+
+#include "webrtc/base/constructormagic.h"
#include "webrtc/p2p/base/transport.h"
namespace cricket {
diff --git a/chromium/third_party/webrtc/p2p/base/p2ptransportchannel.cc b/chromium/third_party/webrtc/p2p/base/p2ptransportchannel.cc
index 66532411b29..2801c446461 100644
--- a/chromium/third_party/webrtc/p2p/base/p2ptransportchannel.cc
+++ b/chromium/third_party/webrtc/p2p/base/p2ptransportchannel.cc
@@ -261,30 +261,27 @@ P2PTransportChannel::P2PTransportChannel(const std::string& transport_name,
P2PTransportChannel::~P2PTransportChannel() {
ASSERT(worker_thread_ == rtc::Thread::Current());
-
- for (size_t i = 0; i < allocator_sessions_.size(); ++i)
- delete allocator_sessions_[i];
}
// Add the allocator session to our list so that we know which sessions
// are still active.
-void P2PTransportChannel::AddAllocatorSession(PortAllocatorSession* session) {
+void P2PTransportChannel::AddAllocatorSession(
+ std::unique_ptr<PortAllocatorSession> session) {
ASSERT(worker_thread_ == rtc::Thread::Current());
session->set_generation(static_cast<uint32_t>(allocator_sessions_.size()));
- allocator_sessions_.push_back(session);
+ session->SignalPortReady.connect(this, &P2PTransportChannel::OnPortReady);
+ session->SignalCandidatesReady.connect(
+ this, &P2PTransportChannel::OnCandidatesReady);
+ session->SignalCandidatesAllocationDone.connect(
+ this, &P2PTransportChannel::OnCandidatesAllocationDone);
// We now only want to apply new candidates that we receive to the ports
// created by this new session because these are replacing those of the
// previous sessions.
ports_.clear();
- session->SignalPortReady.connect(this, &P2PTransportChannel::OnPortReady);
- session->SignalCandidatesReady.connect(
- this, &P2PTransportChannel::OnCandidatesReady);
- session->SignalCandidatesAllocationDone.connect(
- this, &P2PTransportChannel::OnCandidatesAllocationDone);
- session->StartGettingPorts();
+ allocator_sessions_.push_back(std::move(session));
}
void P2PTransportChannel::AddConnection(Connection* connection) {
@@ -390,10 +387,15 @@ void P2PTransportChannel::SetRemoteIceCredentials(const std::string& ice_ufrag,
candidate.set_password(ice_pwd);
}
}
- // We need to update the credentials for any peer reflexive candidates.
+ // We need to update the credentials and generation for any peer reflexive
+ // candidates.
for (Connection* conn : connections_) {
- conn->MaybeSetRemoteIceCredentials(ice_ufrag, ice_pwd);
+ conn->MaybeSetRemoteIceCredentialsAndGeneration(
+ ice_ufrag, ice_pwd,
+ static_cast<int>(remote_ice_parameters_.size() - 1));
}
+ // Updating the remote ICE candidate generation could change the sort order.
+ RequestSort();
}
void P2PTransportChannel::SetRemoteIceMode(IceMode mode) {
@@ -467,9 +469,28 @@ void P2PTransportChannel::MaybeStartGathering() {
gathering_state_ = kIceGatheringGathering;
SignalGatheringState(this);
}
- // Time for a new allocator
- AddAllocatorSession(allocator_->CreateSession(
- SessionId(), transport_name(), component(), ice_ufrag_, ice_pwd_));
+ // Time for a new allocator.
+ std::unique_ptr<PortAllocatorSession> pooled_session =
+ allocator_->TakePooledSession(transport_name(), component(), ice_ufrag_,
+ ice_pwd_);
+ if (pooled_session) {
+ AddAllocatorSession(std::move(pooled_session));
+ PortAllocatorSession* raw_pooled_session =
+ allocator_sessions_.back().get();
+ // Process the pooled session's existing candidates/ports, if they exist.
+ OnCandidatesReady(raw_pooled_session,
+ raw_pooled_session->ReadyCandidates());
+ for (PortInterface* port : allocator_sessions_.back()->ReadyPorts()) {
+ OnPortReady(raw_pooled_session, port);
+ }
+ if (allocator_sessions_.back()->CandidatesAllocationDone()) {
+ OnCandidatesAllocationDone(raw_pooled_session);
+ }
+ } else {
+ AddAllocatorSession(allocator_->CreateSession(
+ SessionId(), transport_name(), component(), ice_ufrag_, ice_pwd_));
+ allocator_sessions_.back()->StartGettingPorts();
+ }
}
}
@@ -1034,7 +1055,7 @@ rtc::DiffServCodePoint P2PTransportChannel::DefaultDscpValue() const {
// Monitor connection states.
void P2PTransportChannel::UpdateConnectionStates() {
- int64_t now = rtc::Time64();
+ int64_t now = rtc::TimeMillis();
// We need to copy the list of connections since some may delete themselves
// when we call UpdateState.
@@ -1208,7 +1229,7 @@ void P2PTransportChannel::MaybeStopPortAllocatorSessions() {
return;
}
- for (PortAllocatorSession* session : allocator_sessions_) {
+ for (const auto& session : allocator_sessions_) {
if (!session->IsGettingPorts()) {
continue;
}
@@ -1279,7 +1300,7 @@ void P2PTransportChannel::OnCheckAndPing() {
// When the best connection is either not receiving or not writable,
// switch to weak ping interval.
int ping_interval = weak() ? weak_ping_interval_ : STRONG_PING_INTERVAL;
- if (rtc::Time64() >= last_ping_sent_ms_ + ping_interval) {
+ if (rtc::TimeMillis() >= last_ping_sent_ms_ + ping_interval) {
Connection* conn = FindNextPingableConnection();
if (conn) {
PingConnection(conn);
@@ -1338,7 +1359,7 @@ bool P2PTransportChannel::IsPingable(Connection* conn, int64_t now) {
// ping target to become writable instead. See the big comment in
// CompareConnections.
Connection* P2PTransportChannel::FindNextPingableConnection() {
- int64_t now = rtc::Time64();
+ int64_t now = rtc::TimeMillis();
Connection* conn_to_ping = nullptr;
if (best_connection_ && best_connection_->connected() &&
best_connection_->writable() &&
@@ -1379,7 +1400,7 @@ void P2PTransportChannel::PingConnection(Connection* conn) {
use_candidate = best_connection_->writable();
}
conn->set_use_candidate_attr(use_candidate);
- last_ping_sent_ms_ = rtc::Time64();
+ last_ping_sent_ms_ = rtc::TimeMillis();
conn->Ping(last_ping_sent_ms_);
}
@@ -1400,10 +1421,14 @@ void P2PTransportChannel::OnConnectionStateChange(Connection* connection) {
}
// May stop the allocator session when at least one connection becomes
- // strongly connected after starting to get ports. It is not enough to check
+ // strongly connected after starting to get ports and the local candidate of
+ // the connection is at the latest generation. It is not enough to check
// that the connection becomes weakly connected because the connection may be
// changing from (writable, receiving) to (writable, not receiving).
- if (!connection->weak()) {
+ bool strongly_connected = !connection->weak();
+ bool latest_generation = connection->local_candidate().generation() >=
+ allocator_session()->generation();
+ if (strongly_connected && latest_generation) {
MaybeStopPortAllocatorSessions();
}
diff --git a/chromium/third_party/webrtc/p2p/base/p2ptransportchannel.h b/chromium/third_party/webrtc/p2p/base/p2ptransportchannel.h
index 4a53d755af5..b002c8bb9f3 100644
--- a/chromium/third_party/webrtc/p2p/base/p2ptransportchannel.h
+++ b/chromium/third_party/webrtc/p2p/base/p2ptransportchannel.h
@@ -21,9 +21,12 @@
#define WEBRTC_P2P_BASE_P2PTRANSPORTCHANNEL_H_
#include <map>
+#include <memory>
#include <set>
#include <string>
#include <vector>
+
+#include "webrtc/base/constructormagic.h"
#include "webrtc/p2p/base/candidate.h"
#include "webrtc/p2p/base/candidatepairinterface.h"
#include "webrtc/p2p/base/p2ptransport.h"
@@ -144,7 +147,7 @@ class P2PTransportChannel : public TransportChannelImpl,
return nullptr;
}
- rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate()
+ std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate()
const override {
return nullptr;
}
@@ -186,7 +189,7 @@ class P2PTransportChannel : public TransportChannelImpl,
// Public for unit tests.
PortAllocatorSession* allocator_session() {
- return allocator_sessions_.back();
+ return allocator_sessions_.back().get();
}
// Public for unit tests.
@@ -224,7 +227,7 @@ class P2PTransportChannel : public TransportChannelImpl,
PortInterface* origin_port);
bool IsPingable(Connection* conn, int64_t now);
void PingConnection(Connection* conn);
- void AddAllocatorSession(PortAllocatorSession* session);
+ void AddAllocatorSession(std::unique_ptr<PortAllocatorSession> session);
void AddConnection(Connection* connection);
void OnPortReady(PortAllocatorSession *session, PortInterface* port);
@@ -292,7 +295,7 @@ class P2PTransportChannel : public TransportChannelImpl,
rtc::Thread* worker_thread_;
bool incoming_only_;
int error_;
- std::vector<PortAllocatorSession*> allocator_sessions_;
+ std::vector<std::unique_ptr<PortAllocatorSession>> allocator_sessions_;
std::vector<PortInterface *> ports_;
// |connections_| is a sorted list with the first one always be the
diff --git a/chromium/third_party/webrtc/p2p/base/p2ptransportchannel_unittest.cc b/chromium/third_party/webrtc/p2p/base/p2ptransportchannel_unittest.cc
index 87e92a5b30d..54ab3196cbc 100644
--- a/chromium/third_party/webrtc/p2p/base/p2ptransportchannel_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/p2ptransportchannel_unittest.cc
@@ -8,12 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <algorithm>
+#include <memory>
+
+#include "webrtc/p2p/base/fakeportallocator.h"
#include "webrtc/p2p/base/p2ptransportchannel.h"
#include "webrtc/p2p/base/testrelayserver.h"
#include "webrtc/p2p/base/teststunserver.h"
#include "webrtc/p2p/base/testturnserver.h"
#include "webrtc/p2p/client/basicportallocator.h"
-#include "webrtc/p2p/client/fakeportallocator.h"
#include "webrtc/base/dscp.h"
#include "webrtc/base/fakenetwork.h"
#include "webrtc/base/firewallsocketserver.h"
@@ -211,7 +214,7 @@ class P2PTransportChannelTestBase : public testing::Test,
std::string name_; // TODO - Currently not used.
std::list<std::string> ch_packets_;
- rtc::scoped_ptr<cricket::P2PTransportChannel> ch_;
+ std::unique_ptr<cricket::P2PTransportChannel> ch_;
};
struct CandidatesData : public rtc::MessageData {
@@ -255,7 +258,7 @@ class P2PTransportChannelTestBase : public testing::Test,
}
rtc::FakeNetworkManager network_manager_;
- rtc::scoped_ptr<cricket::BasicPortAllocator> allocator_;
+ std::unique_ptr<cricket::BasicPortAllocator> allocator_;
ChannelData cd1_;
ChannelData cd2_;
cricket::IceRole role_;
@@ -285,6 +288,8 @@ class P2PTransportChannelTestBase : public testing::Test,
1, cricket::ICE_CANDIDATE_COMPONENT_DEFAULT,
ice_ufrag_ep2_cd1_ch, ice_pwd_ep2_cd1_ch,
ice_ufrag_ep1_cd1_ch, ice_pwd_ep1_cd1_ch));
+ ep1_.cd1_.ch_->MaybeStartGathering();
+ ep2_.cd1_.ch_->MaybeStartGathering();
if (num == 2) {
std::string ice_ufrag_ep1_cd2_ch = kIceUfrag[2];
std::string ice_pwd_ep1_cd2_ch = kIcePwd[2];
@@ -298,6 +303,8 @@ class P2PTransportChannelTestBase : public testing::Test,
1, cricket::ICE_CANDIDATE_COMPONENT_DEFAULT,
ice_ufrag_ep2_cd2_ch, ice_pwd_ep2_cd2_ch,
ice_ufrag_ep1_cd2_ch, ice_pwd_ep1_cd2_ch));
+ ep1_.cd2_.ch_->MaybeStartGathering();
+ ep2_.cd2_.ch_->MaybeStartGathering();
}
}
cricket::P2PTransportChannel* CreateChannel(
@@ -326,7 +333,6 @@ class P2PTransportChannelTestBase : public testing::Test,
channel->SetIceRole(GetEndpoint(endpoint)->ice_role());
channel->SetIceTiebreaker(GetEndpoint(endpoint)->GetIceTiebreaker());
channel->Connect();
- channel->MaybeStartGathering();
return channel;
}
void DestroyChannels() {
@@ -504,7 +510,7 @@ class P2PTransportChannelTestBase : public testing::Test,
}
void Test(const Result& expected) {
- int64_t connect_start = rtc::Time64();
+ int64_t connect_start = rtc::TimeMillis();
int64_t connect_time;
// Create the channels and wait for them to connect.
@@ -517,7 +523,7 @@ class P2PTransportChannelTestBase : public testing::Test,
ep2_ch1()->writable(),
expected.connect_wait,
1000);
- connect_time = rtc::Time64() - connect_start;
+ connect_time = rtc::TimeMillis() - connect_start;
if (connect_time < expected.connect_wait) {
LOG(LS_INFO) << "Connect time: " << connect_time << " ms";
} else {
@@ -529,7 +535,7 @@ class P2PTransportChannelTestBase : public testing::Test,
// This may take up to 2 seconds.
if (ep1_ch1()->best_connection() &&
ep2_ch1()->best_connection()) {
- int64_t converge_start = rtc::Time64();
+ int64_t converge_start = rtc::TimeMillis();
int64_t converge_time;
int64_t converge_wait = 2000;
EXPECT_TRUE_WAIT_MARGIN(CheckCandidate1(expected), converge_wait,
@@ -547,7 +553,7 @@ class P2PTransportChannelTestBase : public testing::Test,
// For verbose
ExpectCandidate2(expected);
- converge_time = rtc::Time64() - converge_start;
+ converge_time = rtc::TimeMillis() - converge_start;
if (converge_time < converge_wait) {
LOG(LS_INFO) << "Converge time: " << converge_time << " ms";
} else {
@@ -702,7 +708,7 @@ class P2PTransportChannelTestBase : public testing::Test,
void OnMessage(rtc::Message* msg) {
switch (msg->message_id) {
case MSG_ADD_CANDIDATES: {
- rtc::scoped_ptr<CandidatesData> data(
+ std::unique_ptr<CandidatesData> data(
static_cast<CandidatesData*>(msg->pdata));
cricket::P2PTransportChannel* rch = GetRemoteChannel(data->channel);
for (auto& c : data->candidates) {
@@ -717,7 +723,7 @@ class P2PTransportChannelTestBase : public testing::Test,
break;
}
case MSG_REMOVE_CANDIDATES: {
- rtc::scoped_ptr<CandidatesData> data(
+ std::unique_ptr<CandidatesData> data(
static_cast<CandidatesData*>(msg->pdata));
cricket::P2PTransportChannel* rch = GetRemoteChannel(data->channel);
for (cricket::Candidate& c : data->candidates) {
@@ -797,12 +803,12 @@ class P2PTransportChannelTestBase : public testing::Test,
private:
rtc::Thread* main_;
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> vss_;
- rtc::scoped_ptr<rtc::NATSocketServer> nss_;
- rtc::scoped_ptr<rtc::FirewallSocketServer> ss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ std::unique_ptr<rtc::NATSocketServer> nss_;
+ std::unique_ptr<rtc::FirewallSocketServer> ss_;
rtc::SocketServerScope ss_scope_;
- rtc::scoped_ptr<cricket::TestStunServer> stun_server_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_;
cricket::TestTurnServer turn_server_;
cricket::TestRelayServer relay_server_;
rtc::SocksProxyServer socks_server1_;
@@ -1193,7 +1199,7 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) {
set_clear_remote_candidates_ufrag_pwd(false);
CreateChannels(1);
// Only have remote credentials come in for ep2, not ep1.
- ep2_ch1()->SetRemoteIceCredentials(kIceUfrag[3], kIcePwd[3]);
+ ep2_ch1()->SetRemoteIceCredentials(kIceUfrag[0], kIcePwd[0]);
// Pause sending ep2's candidates to ep1 until ep1 receives the peer reflexive
// candidate.
@@ -1209,15 +1215,23 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) {
EXPECT_EQ(kIceUfrag[1],
ep1_ch1()->best_connection()->remote_candidate().username());
EXPECT_EQ("", ep1_ch1()->best_connection()->remote_candidate().password());
+ // Because we don't have ICE credentials yet, we don't know the generation.
+ EXPECT_EQ(0u, ep1_ch1()->best_connection()->remote_candidate().generation());
EXPECT_TRUE(nullptr == ep1_ch1()->FindNextPingableConnection());
+ // Add two sets of remote ICE credentials, so that the ones used by the
+ // candidate will be generation 1 instead of 0.
+ ep1_ch1()->SetRemoteIceCredentials(kIceUfrag[3], kIcePwd[3]);
ep1_ch1()->SetRemoteIceCredentials(kIceUfrag[1], kIcePwd[1]);
- ResumeCandidates(1);
-
+ // After setting the remote ICE credentials, the password and generation
+ // of the peer reflexive candidate should be updated.
EXPECT_EQ(kIcePwd[1],
ep1_ch1()->best_connection()->remote_candidate().password());
+ EXPECT_EQ(1u, ep1_ch1()->best_connection()->remote_candidate().generation());
EXPECT_TRUE(nullptr != ep1_ch1()->FindNextPingableConnection());
+ ResumeCandidates(1);
+
WAIT(ep2_ch1()->best_connection() != NULL, 2000);
// Verify ep1's best connection is updated to use the 'local' candidate.
EXPECT_EQ_WAIT(
@@ -1237,7 +1251,7 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) {
set_clear_remote_candidates_ufrag_pwd(false);
CreateChannels(1);
// Only have remote credentials come in for ep2, not ep1.
- ep2_ch1()->SetRemoteIceCredentials(kIceUfrag[3], kIcePwd[3]);
+ ep2_ch1()->SetRemoteIceCredentials(kIceUfrag[0], kIcePwd[0]);
// Pause sending ep2's candidates to ep1 until ep1 receives the peer reflexive
// candidate.
PauseCandidates(1);
@@ -1251,14 +1265,21 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) {
EXPECT_EQ(kIceUfrag[1],
ep1_ch1()->best_connection()->remote_candidate().username());
EXPECT_EQ("", ep1_ch1()->best_connection()->remote_candidate().password());
+ // Because we don't have ICE credentials yet, we don't know the generation.
+ EXPECT_EQ(0u, ep1_ch1()->best_connection()->remote_candidate().generation());
EXPECT_TRUE(nullptr == ep1_ch1()->FindNextPingableConnection());
+ // Add two sets of remote ICE credentials, so that the ones used by the
+ // candidate will be generation 1 instead of 0.
+ ep1_ch1()->SetRemoteIceCredentials(kIceUfrag[3], kIcePwd[3]);
ep1_ch1()->SetRemoteIceCredentials(kIceUfrag[1], kIcePwd[1]);
- ResumeCandidates(1);
-
+ // After setting the remote ICE credentials, the password and generation
+ // of the peer reflexive candidate should be updated.
EXPECT_EQ(kIcePwd[1],
ep1_ch1()->best_connection()->remote_candidate().password());
- EXPECT_TRUE(nullptr != ep1_ch1()->FindNextPingableConnection());
+ EXPECT_EQ(1u, ep1_ch1()->best_connection()->remote_candidate().generation());
+
+ ResumeCandidates(1);
const cricket::Connection* best_connection = NULL;
WAIT((best_connection = ep2_ch1()->best_connection()) != NULL, 2000);
@@ -1530,6 +1551,92 @@ TEST_F(P2PTransportChannelTest, TestContinualGathering) {
DestroyChannels();
}
+// Test that a connection succeeds when the P2PTransportChannel uses a pooled
+// PortAllocatorSession that has not yet finished gathering candidates.
+TEST_F(P2PTransportChannelTest, TestUsingPooledSessionBeforeDoneGathering) {
+ ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags,
+ kDefaultPortAllocatorFlags);
+ // First create a pooled session for each endpoint.
+ auto& allocator_1 = GetEndpoint(0)->allocator_;
+ auto& allocator_2 = GetEndpoint(1)->allocator_;
+ int pool_size = 1;
+ allocator_1->SetConfiguration(allocator_1->stun_servers(),
+ allocator_1->turn_servers(), pool_size);
+ allocator_2->SetConfiguration(allocator_2->stun_servers(),
+ allocator_2->turn_servers(), pool_size);
+ const cricket::PortAllocatorSession* pooled_session_1 =
+ allocator_1->GetPooledSession();
+ const cricket::PortAllocatorSession* pooled_session_2 =
+ allocator_2->GetPooledSession();
+ ASSERT_NE(nullptr, pooled_session_1);
+ ASSERT_NE(nullptr, pooled_session_2);
+ // Sanity check that pooled sessions haven't gathered anything yet.
+ EXPECT_TRUE(pooled_session_1->ReadyPorts().empty());
+ EXPECT_TRUE(pooled_session_1->ReadyCandidates().empty());
+ EXPECT_TRUE(pooled_session_2->ReadyPorts().empty());
+ EXPECT_TRUE(pooled_session_2->ReadyCandidates().empty());
+ // Now let the endpoints connect and try exchanging some data.
+ CreateChannels(1);
+ EXPECT_TRUE_WAIT_MARGIN(ep1_ch1() != NULL && ep2_ch1() != NULL &&
+ ep1_ch1()->receiving() && ep1_ch1()->writable() &&
+ ep2_ch1()->receiving() && ep2_ch1()->writable(),
+ 1000, 1000);
+ TestSendRecv(1);
+ // Make sure the P2PTransportChannels are actually using ports from the
+ // pooled sessions.
+ auto pooled_ports_1 = pooled_session_1->ReadyPorts();
+ auto pooled_ports_2 = pooled_session_2->ReadyPorts();
+ EXPECT_NE(pooled_ports_1.end(),
+ std::find(pooled_ports_1.begin(), pooled_ports_1.end(),
+ ep1_ch1()->best_connection()->port()));
+ EXPECT_NE(pooled_ports_2.end(),
+ std::find(pooled_ports_2.begin(), pooled_ports_2.end(),
+ ep2_ch1()->best_connection()->port()));
+}
+
+// Test that a connection succeeds when the P2PTransportChannel uses a pooled
+// PortAllocatorSession that already finished gathering candidates.
+TEST_F(P2PTransportChannelTest, TestUsingPooledSessionAfterDoneGathering) {
+ ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags,
+ kDefaultPortAllocatorFlags);
+ // First create a pooled session for each endpoint.
+ auto& allocator_1 = GetEndpoint(0)->allocator_;
+ auto& allocator_2 = GetEndpoint(1)->allocator_;
+ int pool_size = 1;
+ allocator_1->SetConfiguration(allocator_1->stun_servers(),
+ allocator_1->turn_servers(), pool_size);
+ allocator_2->SetConfiguration(allocator_2->stun_servers(),
+ allocator_2->turn_servers(), pool_size);
+ const cricket::PortAllocatorSession* pooled_session_1 =
+ allocator_1->GetPooledSession();
+ const cricket::PortAllocatorSession* pooled_session_2 =
+ allocator_2->GetPooledSession();
+ ASSERT_NE(nullptr, pooled_session_1);
+ ASSERT_NE(nullptr, pooled_session_2);
+ // Wait for the pooled sessions to finish gathering before the
+ // P2PTransportChannels try to use them.
+ EXPECT_TRUE_WAIT(pooled_session_1->CandidatesAllocationDone() &&
+ pooled_session_2->CandidatesAllocationDone(),
+ kDefaultTimeout);
+ // Now let the endpoints connect and try exchanging some data.
+ CreateChannels(1);
+ EXPECT_TRUE_WAIT_MARGIN(ep1_ch1() != NULL && ep2_ch1() != NULL &&
+ ep1_ch1()->receiving() && ep1_ch1()->writable() &&
+ ep2_ch1()->receiving() && ep2_ch1()->writable(),
+ 1000, 1000);
+ TestSendRecv(1);
+ // Make sure the P2PTransportChannels are actually using ports from the
+ // pooled sessions.
+ auto pooled_ports_1 = pooled_session_1->ReadyPorts();
+ auto pooled_ports_2 = pooled_session_2->ReadyPorts();
+ EXPECT_NE(pooled_ports_1.end(),
+ std::find(pooled_ports_1.begin(), pooled_ports_1.end(),
+ ep1_ch1()->best_connection()->port()));
+ EXPECT_NE(pooled_ports_2.end(),
+ std::find(pooled_ports_2.begin(), pooled_ports_2.end(),
+ ep2_ch1()->best_connection()->port()));
+}
+
// Test what happens when we have 2 users behind the same NAT. This can lead
// to interesting behavior because the STUN server will only give out the
// address of the outermost NAT.
@@ -1979,8 +2086,8 @@ class P2PTransportChannelPingTest : public testing::Test,
void reset_channel_ready_to_send() { channel_ready_to_send_ = false; }
private:
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> vss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
rtc::SocketServerScope ss_scope_;
cricket::CandidatePairInterface* last_selected_candidate_pair_ = nullptr;
int last_sent_packet_id_ = -1;
@@ -2532,9 +2639,10 @@ TEST_F(P2PTransportChannelPingTest, TestDeleteConnectionsIfAllWriteTimedout) {
EXPECT_TRUE_WAIT(ch.connections().empty(), 1000);
}
-// Test that after a port allocator session is started, it will be stopped
-// when a new connection becomes writable and receiving. Also test that this
-// holds even if the transport channel did not lose the writability.
+// Tests that after a port allocator session is started, it will be stopped
+// when a new connection becomes writable and receiving. Also tests that if a
+// connection belonging to an old session becomes writable, it won't stop
+// the current port allocator session.
TEST_F(P2PTransportChannelPingTest, TestStopPortAllocatorSessions) {
cricket::FakePortAllocator pa(rtc::Thread::Current(), nullptr);
cricket::P2PTransportChannel ch("test channel", 1, &pa);
@@ -2548,11 +2656,17 @@ TEST_F(P2PTransportChannelPingTest, TestStopPortAllocatorSessions) {
conn1->ReceivedPingResponse(); // Becomes writable and receiving
EXPECT_TRUE(!ch.allocator_session()->IsGettingPorts());
- // Restart gathering even if the transport channel is still writable.
- // It should stop getting ports after a new connection becomes strongly
- // connected.
+ // Start a new session. Even though conn1, which belongs to an older
+ // session, becomes unwritable and writable again, it should not stop the
+ // current session.
ch.SetIceCredentials(kIceUfrag[1], kIcePwd[1]);
ch.MaybeStartGathering();
+ conn1->Prune();
+ conn1->ReceivedPingResponse();
+ EXPECT_TRUE(ch.allocator_session()->IsGettingPorts());
+
+ // But if a new connection created from the new session becomes writable,
+ // it will stop the current session.
ch.AddRemoteCandidate(CreateHostCandidate("2.2.2.2", 2, 100));
cricket::Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2);
ASSERT_TRUE(conn2 != nullptr);
@@ -2625,10 +2739,10 @@ class P2PTransportChannelMostLikelyToWorkFirstTest
}
private:
- rtc::scoped_ptr<cricket::BasicPortAllocator> allocator_;
+ std::unique_ptr<cricket::BasicPortAllocator> allocator_;
rtc::FakeNetworkManager network_manager_;
cricket::TestTurnServer turn_server_;
- rtc::scoped_ptr<cricket::P2PTransportChannel> channel_;
+ std::unique_ptr<cricket::P2PTransportChannel> channel_;
};
// Test that Relay/Relay connections will be pinged first when no other
diff --git a/chromium/third_party/webrtc/p2p/base/packetsocketfactory.h b/chromium/third_party/webrtc/p2p/base/packetsocketfactory.h
index 54037241b0a..290d9ca844b 100644
--- a/chromium/third_party/webrtc/p2p/base/packetsocketfactory.h
+++ b/chromium/third_party/webrtc/p2p/base/packetsocketfactory.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_P2P_BASE_PACKETSOCKETFACTORY_H_
#define WEBRTC_P2P_BASE_PACKETSOCKETFACTORY_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/proxyinfo.h"
namespace rtc {
diff --git a/chromium/third_party/webrtc/p2p/base/port.cc b/chromium/third_party/webrtc/p2p/base/port.cc
index d26a9302a8a..fbc64f2e0a7 100644
--- a/chromium/third_party/webrtc/p2p/base/port.cc
+++ b/chromium/third_party/webrtc/p2p/base/port.cc
@@ -21,7 +21,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/messagedigest.h"
#include "webrtc/base/network.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stringencode.h"
#include "webrtc/base/stringutils.h"
@@ -220,6 +219,19 @@ Port::~Port() {
delete list[i];
}
+void Port::SetIceParameters(int component,
+ const std::string& username_fragment,
+ const std::string& password) {
+ component_ = component;
+ ice_username_fragment_ = username_fragment;
+ password_ = password;
+ for (Candidate& c : candidates_) {
+ c.set_component(component);
+ c.set_username(username_fragment);
+ c.set_password(password);
+ }
+}
+
Connection* Port::GetConnection(const rtc::SocketAddress& remote_addr) {
AddressMap::const_iterator iter = connections_.find(remote_addr);
if (iter != connections_.end())
@@ -278,7 +290,7 @@ void Port::OnReadPacket(
// If this is an authenticated STUN request, then signal unknown address and
// send back a proper binding response.
- rtc::scoped_ptr<IceMessage> msg;
+ std::unique_ptr<IceMessage> msg;
std::string remote_username;
if (!GetStunMessage(data, size, addr, &msg, &remote_username)) {
LOG_J(LS_ERROR, this) << "Received non-STUN packet from unknown address ("
@@ -325,7 +337,7 @@ size_t Port::AddPrflxCandidate(const Candidate& local) {
bool Port::GetStunMessage(const char* data,
size_t size,
const rtc::SocketAddress& addr,
- rtc::scoped_ptr<IceMessage>* out_msg,
+ std::unique_ptr<IceMessage>* out_msg,
std::string* out_username) {
// NOTE: This could clearly be optimized to avoid allocating any memory.
// However, at the data rates we'll be looking at on the client side,
@@ -342,7 +354,7 @@ bool Port::GetStunMessage(const char* data,
// Parse the request message. If the packet is not a complete and correct
// STUN message, then ignore it.
- rtc::scoped_ptr<IceMessage> stun_msg(new IceMessage());
+ std::unique_ptr<IceMessage> stun_msg(new IceMessage());
rtc::ByteBufferReader buf(data, size);
if (!stun_msg->Read(&buf) || (buf.Length() > 0)) {
return false;
@@ -790,14 +802,14 @@ Connection::Connection(Port* port,
last_ping_received_(0),
last_data_received_(0),
last_ping_response_received_(0),
- recv_rate_tracker_(100u, 10u),
- send_rate_tracker_(100u, 10u),
+ recv_rate_tracker_(100, 10u),
+ send_rate_tracker_(100, 10u),
sent_packets_discarded_(0),
sent_packets_total_(0),
reported_(false),
state_(STATE_WAITING),
receiving_timeout_(WEAK_CONNECTION_RECEIVE_TIMEOUT),
- time_created_ms_(rtc::Time64()) {
+ time_created_ms_(rtc::TimeMillis()) {
// All of our connections start in WAITING state.
// TODO(mallinath) - Start connections from STATE_FROZEN.
// Wire up to send stun packets
@@ -895,14 +907,14 @@ void Connection::OnSendStunPacket(const void* data, size_t size,
void Connection::OnReadPacket(
const char* data, size_t size, const rtc::PacketTime& packet_time) {
- rtc::scoped_ptr<IceMessage> msg;
+ std::unique_ptr<IceMessage> msg;
std::string remote_ufrag;
const rtc::SocketAddress& addr(remote_candidate_.address());
if (!port_->GetStunMessage(data, size, addr, &msg, &remote_ufrag)) {
// The packet did not parse as a valid STUN message
// This is a data packet, pass it along.
set_receiving(true);
- last_data_received_ = rtc::Time64();
+ last_data_received_ = rtc::TimeMillis();
recv_rate_tracker_.AddSamples(size);
SignalReadPacket(this, data, size, packet_time);
@@ -1117,7 +1129,7 @@ void Connection::Ping(int64_t now) {
void Connection::ReceivedPing() {
set_receiving(true);
- last_ping_received_ = rtc::Time64();
+ last_ping_received_ = rtc::TimeMillis();
}
void Connection::ReceivedPingResponse() {
@@ -1130,7 +1142,7 @@ void Connection::ReceivedPingResponse() {
set_write_state(STATE_WRITABLE);
set_state(STATE_SUCCEEDED);
pings_since_last_response_.clear();
- last_ping_response_received_ = rtc::Time64();
+ last_ping_response_received_ = rtc::TimeMillis();
}
bool Connection::dead(int64_t now) const {
@@ -1298,12 +1310,22 @@ void Connection::HandleRoleConflictFromPeer() {
port_->SignalRoleConflict(port_);
}
-void Connection::MaybeSetRemoteIceCredentials(const std::string& ice_ufrag,
- const std::string& ice_pwd) {
+void Connection::MaybeSetRemoteIceCredentialsAndGeneration(
+ const std::string& ice_ufrag,
+ const std::string& ice_pwd,
+ int generation) {
if (remote_candidate_.username() == ice_ufrag &&
remote_candidate_.password().empty()) {
remote_candidate_.set_password(ice_pwd);
}
+ // TODO(deadbeef): A value of '0' for the generation is used for both
+ // generation 0 and "generation unknown". It should be changed to an
+ // rtc::Optional to fix this.
+ if (remote_candidate_.username() == ice_ufrag &&
+ remote_candidate_.password() == ice_pwd &&
+ remote_candidate_.generation() == 0) {
+ remote_candidate_.set_generation(generation);
+ }
}
void Connection::MaybeUpdatePeerReflexiveCandidate(
diff --git a/chromium/third_party/webrtc/p2p/base/port.h b/chromium/third_party/webrtc/p2p/base/port.h
index 7c4468d2e6b..937f3cd0284 100644
--- a/chromium/third_party/webrtc/p2p/base/port.h
+++ b/chromium/third_party/webrtc/p2p/base/port.h
@@ -12,6 +12,7 @@
#define WEBRTC_P2P_BASE_PORT_H_
#include <map>
+#include <memory>
#include <set>
#include <string>
#include <vector>
@@ -106,6 +107,11 @@ struct ProtocolAddress {
: address(a), proto(p), secure(false) { }
ProtocolAddress(const rtc::SocketAddress& a, ProtocolType p, bool sec)
: address(a), proto(p), secure(sec) { }
+
+ bool operator==(const ProtocolAddress& o) const {
+ return address == o.address && proto == o.proto && secure == o.secure;
+ }
+ bool operator!=(const ProtocolAddress& o) const { return !(*this == o); }
};
typedef std::set<rtc::SocketAddress> ServerAddresses;
@@ -175,23 +181,16 @@ class Port : public PortInterface, public rtc::MessageHandler,
uint32_t generation() { return generation_; }
void set_generation(uint32_t generation) { generation_ = generation; }
- // ICE requires a single username/password per content/media line. So the
- // |ice_username_fragment_| of the ports that belongs to the same content will
- // be the same. However this causes a small complication with our relay
- // server, which expects different username for RTP and RTCP.
- //
- // To resolve this problem, we implemented the username_fragment(),
- // which returns a different username (calculated from
- // |ice_username_fragment_|) for RTCP in the case of ICEPROTO_GOOGLE. And the
- // username_fragment() simply returns |ice_username_fragment_| when running
- // in ICEPROTO_RFC5245.
- //
- // As a result the ICEPROTO_GOOGLE will use different usernames for RTP and
- // RTCP. And the ICEPROTO_RFC5245 will use same username for both RTP and
- // RTCP.
const std::string username_fragment() const;
const std::string& password() const { return password_; }
+ // May be called when this port was initially created by a pooled
+ // PortAllocatorSession, and is now being assigned to an ICE transport.
+ // Updates the information for candidates as well.
+ void SetIceParameters(int component,
+ const std::string& username_fragment,
+ const std::string& password);
+
// Fired when candidates are discovered by the port. When all candidates
// are discovered that belong to port SignalAddressReady is fired.
sigslot::signal2<Port*, const Candidate&> SignalCandidateReady;
@@ -336,7 +335,7 @@ class Port : public PortInterface, public rtc::MessageHandler,
bool GetStunMessage(const char* data,
size_t size,
const rtc::SocketAddress& addr,
- rtc::scoped_ptr<IceMessage>* out_msg,
+ std::unique_ptr<IceMessage>* out_msg,
std::string* out_username);
// Checks if the address in addr is compatible with the port's ip.
@@ -574,10 +573,14 @@ class Connection : public CandidatePairInterface,
uint32_t ComputeNetworkCost() const;
- // Update the ICE password of the remote candidate if |ice_ufrag| matches
- // the candidate's ufrag, and the candidate's passwrod has not been set.
- void MaybeSetRemoteIceCredentials(const std::string& ice_ufrag,
- const std::string& ice_pwd);
+ // Update the ICE password and/or generation of the remote candidate if a
+ // ufrag in |remote_ice_parameters| matches the candidate's ufrag, and the
+ // candidate's password and/or ufrag has not been set.
+ // |remote_ice_parameters| should be a list of known ICE parameters ordered
+ // by generation.
+ void MaybeSetRemoteIceCredentialsAndGeneration(const std::string& ice_ufrag,
+ const std::string& ice_pwd,
+ int generation);
// If |remote_candidate_| is peer reflexive and is equivalent to
// |new_candidate| except the type, update |remote_candidate_| to
diff --git a/chromium/third_party/webrtc/p2p/base/port_unittest.cc b/chromium/third_party/webrtc/p2p/base/port_unittest.cc
index fc49f20a5d7..efc26094017 100644
--- a/chromium/third_party/webrtc/p2p/base/port_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/port_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/p2p/base/basicpacketsocketfactory.h"
#include "webrtc/p2p/base/relayport.h"
#include "webrtc/p2p/base/stunport.h"
@@ -26,7 +28,6 @@
#include "webrtc/base/natserver.h"
#include "webrtc/base/natsocketfactory.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/stringutils.h"
@@ -43,7 +44,6 @@ using rtc::NAT_ADDR_RESTRICTED;
using rtc::NAT_PORT_RESTRICTED;
using rtc::NAT_SYMMETRIC;
using rtc::PacketSocketFactory;
-using rtc::scoped_ptr;
using rtc::Socket;
using rtc::SocketAddress;
using namespace cricket;
@@ -212,8 +212,8 @@ class TestPort : public Port {
const rtc::SentPacket& sent_packet) {
PortInterface::SignalSentPacket(sent_packet);
}
- rtc::scoped_ptr<Buffer> last_stun_buf_;
- rtc::scoped_ptr<IceMessage> last_stun_msg_;
+ std::unique_ptr<Buffer> last_stun_buf_;
+ std::unique_ptr<IceMessage> last_stun_msg_;
int type_preference_ = 0;
};
@@ -344,12 +344,12 @@ class TestChannel : public sigslot::has_slots<> {
}
IceMode ice_mode_;
- rtc::scoped_ptr<Port> port_;
+ std::unique_ptr<Port> port_;
int complete_count_;
Connection* conn_;
SocketAddress remote_address_;
- rtc::scoped_ptr<StunMessage> remote_request_;
+ std::unique_ptr<StunMessage> remote_request_;
std::string remote_frag_;
bool nominated_;
bool connection_ready_to_send_ = false;
@@ -764,18 +764,18 @@ class PortTest : public testing::Test, public sigslot::has_slots<> {
private:
rtc::Thread* main_;
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
rtc::SocketServerScope ss_scope_;
rtc::Network network_;
rtc::BasicPacketSocketFactory socket_factory_;
- rtc::scoped_ptr<rtc::NATServer> nat_server1_;
- rtc::scoped_ptr<rtc::NATServer> nat_server2_;
+ std::unique_ptr<rtc::NATServer> nat_server1_;
+ std::unique_ptr<rtc::NATServer> nat_server2_;
rtc::NATSocketFactory nat_factory1_;
rtc::NATSocketFactory nat_factory2_;
rtc::BasicPacketSocketFactory nat_socket_factory1_;
rtc::BasicPacketSocketFactory nat_socket_factory2_;
- scoped_ptr<TestStunServer> stun_server_;
+ std::unique_ptr<TestStunServer> stun_server_;
TestTurnServer turn_server_;
TestRelayServer relay_server_;
std::string username_;
@@ -965,7 +965,7 @@ class FakePacketSocketFactory : public rtc::PacketSocketFactory {
void set_next_client_tcp_socket(AsyncPacketSocket* next_client_tcp_socket) {
next_client_tcp_socket_ = next_client_tcp_socket;
}
- rtc::AsyncResolverInterface* CreateAsyncResolver() {
+ rtc::AsyncResolverInterface* CreateAsyncResolver() override {
return NULL;
}
@@ -1217,7 +1217,7 @@ TEST_F(PortTest, TestTcpNeverConnect) {
ch1.Start();
ASSERT_EQ_WAIT(1, ch1.complete_count(), kTimeout);
- rtc::scoped_ptr<rtc::AsyncSocket> server(
+ std::unique_ptr<rtc::AsyncSocket> server(
vss()->CreateAsyncSocket(kLocalAddr2.family(), SOCK_STREAM));
// Bind but not listen.
EXPECT_EQ(0, server->Bind(kLocalAddr2));
@@ -1268,9 +1268,9 @@ TEST_F(PortTest, TestConnectionDead) {
ASSERT_EQ_WAIT(1, ch2.complete_count(), kTimeout);
// Test case that the connection has never received anything.
- int64_t before_created = rtc::Time64();
+ int64_t before_created = rtc::TimeMillis();
ch1.CreateConnection(GetCandidate(port2));
- int64_t after_created = rtc::Time64();
+ int64_t after_created = rtc::TimeMillis();
Connection* conn = ch1.conn();
ASSERT(conn != nullptr);
// It is not dead if it is after MIN_CONNECTION_LIFETIME but not pruned.
@@ -1291,9 +1291,9 @@ TEST_F(PortTest, TestConnectionDead) {
ch1.CreateConnection(GetCandidate(port2));
conn = ch1.conn();
ASSERT(conn != nullptr);
- int64_t before_last_receiving = rtc::Time64();
+ int64_t before_last_receiving = rtc::TimeMillis();
conn->ReceivedPing();
- int64_t after_last_receiving = rtc::Time64();
+ int64_t after_last_receiving = rtc::TimeMillis();
// The connection will be dead after DEAD_CONNECTION_RECEIVE_TIMEOUT
conn->UpdateState(
before_last_receiving + DEAD_CONNECTION_RECEIVE_TIMEOUT - 1);
@@ -1322,7 +1322,7 @@ TEST_F(PortTest, TestLocalToLocalStandard) {
// should remain equal to the request generated by the port and role of port
// must be in controlling.
TEST_F(PortTest, TestLoopbackCal) {
- rtc::scoped_ptr<TestPort> lport(
+ std::unique_ptr<TestPort> lport(
CreateTestPort(kLocalAddr1, "lfrag", "lpass"));
lport->SetIceRole(cricket::ICEROLE_CONTROLLING);
lport->SetIceTiebreaker(kTiebreaker1);
@@ -1354,7 +1354,7 @@ TEST_F(PortTest, TestLoopbackCal) {
ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, 1000);
msg = lport->last_stun_msg();
EXPECT_EQ(STUN_BINDING_REQUEST, msg->type());
- rtc::scoped_ptr<IceMessage> modified_req(
+ std::unique_ptr<IceMessage> modified_req(
CreateStunMessage(STUN_BINDING_REQUEST));
const StunByteStringAttribute* username_attr = msg->GetByteString(
STUN_ATTR_USERNAME);
@@ -1368,7 +1368,7 @@ TEST_F(PortTest, TestLoopbackCal) {
modified_req->AddFingerprint();
lport->Reset();
- rtc::scoped_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
+ std::unique_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
WriteStunMessage(modified_req.get(), buf.get());
conn1->OnReadPacket(buf->Data(), buf->Length(), rtc::PacketTime());
ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, 1000);
@@ -1382,11 +1382,11 @@ TEST_F(PortTest, TestLoopbackCal) {
// value of tiebreaker, when it receives ping request from |rport| it will
// send role conflict signal.
TEST_F(PortTest, TestIceRoleConflict) {
- rtc::scoped_ptr<TestPort> lport(
+ std::unique_ptr<TestPort> lport(
CreateTestPort(kLocalAddr1, "lfrag", "lpass"));
lport->SetIceRole(cricket::ICEROLE_CONTROLLING);
lport->SetIceTiebreaker(kTiebreaker1);
- rtc::scoped_ptr<TestPort> rport(
+ std::unique_ptr<TestPort> rport(
CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
rport->SetIceRole(cricket::ICEROLE_CONTROLLING);
rport->SetIceTiebreaker(kTiebreaker2);
@@ -1430,8 +1430,7 @@ TEST_F(PortTest, TestDelayedBindingUdp) {
FakePacketSocketFactory socket_factory;
socket_factory.set_next_udp_socket(socket);
- scoped_ptr<UDPPort> port(
- CreateUdpPort(kLocalAddr1, &socket_factory));
+ std::unique_ptr<UDPPort> port(CreateUdpPort(kLocalAddr1, &socket_factory));
socket->set_state(AsyncPacketSocket::STATE_BINDING);
port->PrepareAddress();
@@ -1447,8 +1446,7 @@ TEST_F(PortTest, TestDelayedBindingTcp) {
FakePacketSocketFactory socket_factory;
socket_factory.set_next_server_tcp_socket(socket);
- scoped_ptr<TCPPort> port(
- CreateTcpPort(kLocalAddr1, &socket_factory));
+ std::unique_ptr<TCPPort> port(CreateTcpPort(kLocalAddr1, &socket_factory));
socket->set_state(AsyncPacketSocket::STATE_BINDING);
port->PrepareAddress();
@@ -1461,7 +1459,7 @@ TEST_F(PortTest, TestDelayedBindingTcp) {
void PortTest::TestCrossFamilyPorts(int type) {
FakePacketSocketFactory factory;
- scoped_ptr<Port> ports[4];
+ std::unique_ptr<Port> ports[4];
SocketAddress addresses[4] = {SocketAddress("192.168.1.3", 0),
SocketAddress("192.168.1.4", 0),
SocketAddress("2001:db8::1", 0),
@@ -1531,7 +1529,7 @@ void PortTest::ExpectPortsCanConnect(bool can_connect, Port* p1, Port* p2) {
TEST_F(PortTest, TestUdpV6CrossTypePorts) {
FakePacketSocketFactory factory;
- scoped_ptr<Port> ports[4];
+ std::unique_ptr<Port> ports[4];
SocketAddress addresses[4] = {SocketAddress("2001:db8::1", 0),
SocketAddress("fe80::1", 0),
SocketAddress("fe80::2", 0),
@@ -1564,23 +1562,23 @@ TEST_F(PortTest, TestUdpV6CrossTypePorts) {
// get through DefaultDscpValue.
TEST_F(PortTest, TestDefaultDscpValue) {
int dscp;
- rtc::scoped_ptr<UDPPort> udpport(CreateUdpPort(kLocalAddr1));
+ std::unique_ptr<UDPPort> udpport(CreateUdpPort(kLocalAddr1));
EXPECT_EQ(0, udpport->SetOption(rtc::Socket::OPT_DSCP,
rtc::DSCP_CS6));
EXPECT_EQ(0, udpport->GetOption(rtc::Socket::OPT_DSCP, &dscp));
- rtc::scoped_ptr<TCPPort> tcpport(CreateTcpPort(kLocalAddr1));
+ std::unique_ptr<TCPPort> tcpport(CreateTcpPort(kLocalAddr1));
EXPECT_EQ(0, tcpport->SetOption(rtc::Socket::OPT_DSCP,
rtc::DSCP_AF31));
EXPECT_EQ(0, tcpport->GetOption(rtc::Socket::OPT_DSCP, &dscp));
EXPECT_EQ(rtc::DSCP_AF31, dscp);
- rtc::scoped_ptr<StunPort> stunport(
+ std::unique_ptr<StunPort> stunport(
CreateStunPort(kLocalAddr1, nat_socket_factory1()));
EXPECT_EQ(0, stunport->SetOption(rtc::Socket::OPT_DSCP,
rtc::DSCP_AF41));
EXPECT_EQ(0, stunport->GetOption(rtc::Socket::OPT_DSCP, &dscp));
EXPECT_EQ(rtc::DSCP_AF41, dscp);
- rtc::scoped_ptr<TurnPort> turnport1(CreateTurnPort(
- kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
+ std::unique_ptr<TurnPort> turnport1(
+ CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
// Socket is created in PrepareAddress.
turnport1->PrepareAddress();
EXPECT_EQ(0, turnport1->SetOption(rtc::Socket::OPT_DSCP,
@@ -1588,8 +1586,8 @@ TEST_F(PortTest, TestDefaultDscpValue) {
EXPECT_EQ(0, turnport1->GetOption(rtc::Socket::OPT_DSCP, &dscp));
EXPECT_EQ(rtc::DSCP_CS7, dscp);
// This will verify correct value returned without the socket.
- rtc::scoped_ptr<TurnPort> turnport2(CreateTurnPort(
- kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
+ std::unique_ptr<TurnPort> turnport2(
+ CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
EXPECT_EQ(0, turnport2->SetOption(rtc::Socket::OPT_DSCP,
rtc::DSCP_CS6));
EXPECT_EQ(0, turnport2->GetOption(rtc::Socket::OPT_DSCP, &dscp));
@@ -1598,9 +1596,9 @@ TEST_F(PortTest, TestDefaultDscpValue) {
// Test sending STUN messages.
TEST_F(PortTest, TestSendStunMessage) {
- rtc::scoped_ptr<TestPort> lport(
+ std::unique_ptr<TestPort> lport(
CreateTestPort(kLocalAddr1, "lfrag", "lpass"));
- rtc::scoped_ptr<TestPort> rport(
+ std::unique_ptr<TestPort> rport(
CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
lport->SetIceRole(cricket::ICEROLE_CONTROLLING);
lport->SetIceTiebreaker(kTiebreaker1);
@@ -1647,7 +1645,7 @@ TEST_F(PortTest, TestSendStunMessage) {
ASSERT_TRUE(msg->GetUInt32(STUN_ATTR_RETRANSMIT_COUNT) == NULL);
// Save a copy of the BINDING-REQUEST for use below.
- rtc::scoped_ptr<IceMessage> request(CopyStunMessage(msg));
+ std::unique_ptr<IceMessage> request(CopyStunMessage(msg));
// Respond with a BINDING-RESPONSE.
rport->SendBindingResponse(request.get(), lport->Candidates()[0].address());
@@ -1738,9 +1736,9 @@ TEST_F(PortTest, TestSendStunMessage) {
}
TEST_F(PortTest, TestUseCandidateAttribute) {
- rtc::scoped_ptr<TestPort> lport(
+ std::unique_ptr<TestPort> lport(
CreateTestPort(kLocalAddr1, "lfrag", "lpass"));
- rtc::scoped_ptr<TestPort> rport(
+ std::unique_ptr<TestPort> rport(
CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
lport->SetIceRole(cricket::ICEROLE_CONTROLLING);
lport->SetIceTiebreaker(kTiebreaker1);
@@ -1765,11 +1763,11 @@ TEST_F(PortTest, TestUseCandidateAttribute) {
}
TEST_F(PortTest, TestNetworkInfoAttribute) {
- rtc::scoped_ptr<TestPort> lport(
+ std::unique_ptr<TestPort> lport(
CreateTestPort(kLocalAddr1, "lfrag", "lpass"));
// Set the network type for rport to be cellular so its cost will be 999.
SetNetworkType(rtc::ADAPTER_TYPE_CELLULAR);
- rtc::scoped_ptr<TestPort> rport(
+ std::unique_ptr<TestPort> rport(
CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
lport->SetIceRole(cricket::ICEROLE_CONTROLLING);
lport->SetIceTiebreaker(kTiebreaker1);
@@ -1812,11 +1810,10 @@ TEST_F(PortTest, TestNetworkInfoAttribute) {
// Test handling STUN messages.
TEST_F(PortTest, TestHandleStunMessage) {
// Our port will act as the "remote" port.
- rtc::scoped_ptr<TestPort> port(
- CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
+ std::unique_ptr<TestPort> port(CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
- rtc::scoped_ptr<IceMessage> in_msg, out_msg;
- rtc::scoped_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
+ std::unique_ptr<IceMessage> in_msg, out_msg;
+ std::unique_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
rtc::SocketAddress addr(kLocalAddr1);
std::string username;
@@ -1862,11 +1859,10 @@ TEST_F(PortTest, TestHandleStunMessage) {
// Tests handling of ICE binding requests with missing or incorrect usernames.
TEST_F(PortTest, TestHandleStunMessageBadUsername) {
- rtc::scoped_ptr<TestPort> port(
- CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
+ std::unique_ptr<TestPort> port(CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
- rtc::scoped_ptr<IceMessage> in_msg, out_msg;
- rtc::scoped_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
+ std::unique_ptr<IceMessage> in_msg, out_msg;
+ std::unique_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
rtc::SocketAddress addr(kLocalAddr1);
std::string username;
@@ -1931,11 +1927,10 @@ TEST_F(PortTest, TestHandleStunMessageBadUsername) {
// Test handling STUN messages with missing or malformed M-I.
TEST_F(PortTest, TestHandleStunMessageBadMessageIntegrity) {
// Our port will act as the "remote" port.
- rtc::scoped_ptr<TestPort> port(
- CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
+ std::unique_ptr<TestPort> port(CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
- rtc::scoped_ptr<IceMessage> in_msg, out_msg;
- rtc::scoped_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
+ std::unique_ptr<IceMessage> in_msg, out_msg;
+ std::unique_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
rtc::SocketAddress addr(kLocalAddr1);
std::string username;
@@ -1972,11 +1967,10 @@ TEST_F(PortTest, TestHandleStunMessageBadMessageIntegrity) {
// Test handling STUN messages with missing or malformed FINGERPRINT.
TEST_F(PortTest, TestHandleStunMessageBadFingerprint) {
// Our port will act as the "remote" port.
- rtc::scoped_ptr<TestPort> port(
- CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
+ std::unique_ptr<TestPort> port(CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
- rtc::scoped_ptr<IceMessage> in_msg, out_msg;
- rtc::scoped_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
+ std::unique_ptr<IceMessage> in_msg, out_msg;
+ std::unique_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
rtc::SocketAddress addr(kLocalAddr1);
std::string username;
@@ -2038,14 +2032,14 @@ TEST_F(PortTest, TestHandleStunMessageBadFingerprint) {
// Test handling of STUN binding indication messages . STUN binding
// indications are allowed only to the connection which is in read mode.
TEST_F(PortTest, TestHandleStunBindingIndication) {
- rtc::scoped_ptr<TestPort> lport(
+ std::unique_ptr<TestPort> lport(
CreateTestPort(kLocalAddr2, "lfrag", "lpass"));
lport->SetIceRole(cricket::ICEROLE_CONTROLLING);
lport->SetIceTiebreaker(kTiebreaker1);
// Verifying encoding and decoding STUN indication message.
- rtc::scoped_ptr<IceMessage> in_msg, out_msg;
- rtc::scoped_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
+ std::unique_ptr<IceMessage> in_msg, out_msg;
+ std::unique_ptr<ByteBufferWriter> buf(new ByteBufferWriter());
rtc::SocketAddress addr(kLocalAddr1);
std::string username;
@@ -2060,7 +2054,7 @@ TEST_F(PortTest, TestHandleStunBindingIndication) {
// Verify connection can handle STUN indication and updates
// last_ping_received.
- rtc::scoped_ptr<TestPort> rport(
+ std::unique_ptr<TestPort> rport(
CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
rport->SetIceRole(cricket::ICEROLE_CONTROLLED);
rport->SetIceTiebreaker(kTiebreaker2);
@@ -2096,8 +2090,7 @@ TEST_F(PortTest, TestHandleStunBindingIndication) {
}
TEST_F(PortTest, TestComputeCandidatePriority) {
- rtc::scoped_ptr<TestPort> port(
- CreateTestPort(kLocalAddr1, "name", "pass"));
+ std::unique_ptr<TestPort> port(CreateTestPort(kLocalAddr1, "name", "pass"));
port->set_type_preference(90);
port->set_component(177);
port->AddCandidateAddress(SocketAddress("192.168.1.4", 1234));
@@ -2134,7 +2127,7 @@ TEST_F(PortTest, TestComputeCandidatePriority) {
// In the case of shared socket, one port may be shared by local and stun.
// Test that candidates with different types will have different foundation.
TEST_F(PortTest, TestFoundation) {
- rtc::scoped_ptr<TestPort> testport(
+ std::unique_ptr<TestPort> testport(
CreateTestPort(kLocalAddr1, "name", "pass"));
testport->AddCandidateAddress(kLocalAddr1, kLocalAddr1,
LOCAL_PORT_TYPE,
@@ -2148,21 +2141,21 @@ TEST_F(PortTest, TestFoundation) {
// This test verifies the foundation of different types of ICE candidates.
TEST_F(PortTest, TestCandidateFoundation) {
- rtc::scoped_ptr<rtc::NATServer> nat_server(
+ std::unique_ptr<rtc::NATServer> nat_server(
CreateNatServer(kNatAddr1, NAT_OPEN_CONE));
- rtc::scoped_ptr<UDPPort> udpport1(CreateUdpPort(kLocalAddr1));
+ std::unique_ptr<UDPPort> udpport1(CreateUdpPort(kLocalAddr1));
udpport1->PrepareAddress();
- rtc::scoped_ptr<UDPPort> udpport2(CreateUdpPort(kLocalAddr1));
+ std::unique_ptr<UDPPort> udpport2(CreateUdpPort(kLocalAddr1));
udpport2->PrepareAddress();
EXPECT_EQ(udpport1->Candidates()[0].foundation(),
udpport2->Candidates()[0].foundation());
- rtc::scoped_ptr<TCPPort> tcpport1(CreateTcpPort(kLocalAddr1));
+ std::unique_ptr<TCPPort> tcpport1(CreateTcpPort(kLocalAddr1));
tcpport1->PrepareAddress();
- rtc::scoped_ptr<TCPPort> tcpport2(CreateTcpPort(kLocalAddr1));
+ std::unique_ptr<TCPPort> tcpport2(CreateTcpPort(kLocalAddr1));
tcpport2->PrepareAddress();
EXPECT_EQ(tcpport1->Candidates()[0].foundation(),
tcpport2->Candidates()[0].foundation());
- rtc::scoped_ptr<Port> stunport(
+ std::unique_ptr<Port> stunport(
CreateStunPort(kLocalAddr1, nat_socket_factory1()));
stunport->PrepareAddress();
ASSERT_EQ_WAIT(1U, stunport->Candidates().size(), kTimeout);
@@ -2175,8 +2168,7 @@ TEST_F(PortTest, TestCandidateFoundation) {
EXPECT_NE(udpport2->Candidates()[0].foundation(),
stunport->Candidates()[0].foundation());
// Verify GTURN candidate foundation.
- rtc::scoped_ptr<RelayPort> relayport(
- CreateGturnPort(kLocalAddr1));
+ std::unique_ptr<RelayPort> relayport(CreateGturnPort(kLocalAddr1));
relayport->AddServerAddress(
cricket::ProtocolAddress(kRelayUdpIntAddr, cricket::PROTO_UDP));
relayport->PrepareAddress();
@@ -2186,8 +2178,8 @@ TEST_F(PortTest, TestCandidateFoundation) {
EXPECT_NE(udpport2->Candidates()[0].foundation(),
relayport->Candidates()[0].foundation());
// Verifying TURN candidate foundation.
- rtc::scoped_ptr<Port> turnport1(CreateTurnPort(
- kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
+ std::unique_ptr<Port> turnport1(
+ CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
turnport1->PrepareAddress();
ASSERT_EQ_WAIT(1U, turnport1->Candidates().size(), kTimeout);
EXPECT_NE(udpport1->Candidates()[0].foundation(),
@@ -2196,8 +2188,8 @@ TEST_F(PortTest, TestCandidateFoundation) {
turnport1->Candidates()[0].foundation());
EXPECT_NE(stunport->Candidates()[0].foundation(),
turnport1->Candidates()[0].foundation());
- rtc::scoped_ptr<Port> turnport2(CreateTurnPort(
- kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
+ std::unique_ptr<Port> turnport2(
+ CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
turnport2->PrepareAddress();
ASSERT_EQ_WAIT(1U, turnport2->Candidates().size(), kTimeout);
EXPECT_EQ(turnport1->Candidates()[0].foundation(),
@@ -2208,9 +2200,9 @@ TEST_F(PortTest, TestCandidateFoundation) {
SocketAddress kTurnUdpExtAddr2("99.99.98.5", 0);
TestTurnServer turn_server2(
rtc::Thread::Current(), kTurnUdpIntAddr2, kTurnUdpExtAddr2);
- rtc::scoped_ptr<Port> turnport3(CreateTurnPort(
- kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP,
- kTurnUdpIntAddr2));
+ std::unique_ptr<Port> turnport3(
+ CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP,
+ kTurnUdpIntAddr2));
turnport3->PrepareAddress();
ASSERT_EQ_WAIT(1U, turnport3->Candidates().size(), kTimeout);
EXPECT_NE(turnport3->Candidates()[0].foundation(),
@@ -2220,7 +2212,7 @@ TEST_F(PortTest, TestCandidateFoundation) {
// different foundations if their relay protocols are different.
TestTurnServer turn_server3(rtc::Thread::Current(), kTurnTcpIntAddr,
kTurnUdpExtAddr, PROTO_TCP);
- rtc::scoped_ptr<Port> turnport4(
+ std::unique_ptr<Port> turnport4(
CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_TCP, PROTO_UDP));
turnport4->PrepareAddress();
ASSERT_EQ_WAIT(1U, turnport4->Candidates().size(), kTimeout);
@@ -2231,16 +2223,16 @@ TEST_F(PortTest, TestCandidateFoundation) {
// This test verifies the related addresses of different types of
// ICE candiates.
TEST_F(PortTest, TestCandidateRelatedAddress) {
- rtc::scoped_ptr<rtc::NATServer> nat_server(
+ std::unique_ptr<rtc::NATServer> nat_server(
CreateNatServer(kNatAddr1, NAT_OPEN_CONE));
- rtc::scoped_ptr<UDPPort> udpport(CreateUdpPort(kLocalAddr1));
+ std::unique_ptr<UDPPort> udpport(CreateUdpPort(kLocalAddr1));
udpport->PrepareAddress();
// For UDPPort, related address will be empty.
EXPECT_TRUE(udpport->Candidates()[0].related_address().IsNil());
// Testing related address for stun candidates.
// For stun candidate related address must be equal to the base
// socket address.
- rtc::scoped_ptr<StunPort> stunport(
+ std::unique_ptr<StunPort> stunport(
CreateStunPort(kLocalAddr1, nat_socket_factory1()));
stunport->PrepareAddress();
ASSERT_EQ_WAIT(1U, stunport->Candidates().size(), kTimeout);
@@ -2253,8 +2245,7 @@ TEST_F(PortTest, TestCandidateRelatedAddress) {
// Verifying the related address for the GTURN candidates.
// NOTE: In case of GTURN related address will be equal to the mapped
// address, but address(mapped) will not be XOR.
- rtc::scoped_ptr<RelayPort> relayport(
- CreateGturnPort(kLocalAddr1));
+ std::unique_ptr<RelayPort> relayport(CreateGturnPort(kLocalAddr1));
relayport->AddServerAddress(
cricket::ProtocolAddress(kRelayUdpIntAddr, cricket::PROTO_UDP));
relayport->PrepareAddress();
@@ -2264,8 +2255,8 @@ TEST_F(PortTest, TestCandidateRelatedAddress) {
relayport->Candidates()[0].related_address());
// Verifying the related address for TURN candidate.
// For TURN related address must be equal to the mapped address.
- rtc::scoped_ptr<Port> turnport(CreateTurnPort(
- kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
+ std::unique_ptr<Port> turnport(
+ CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
turnport->PrepareAddress();
ASSERT_EQ_WAIT(1U, turnport->Candidates().size(), kTimeout);
EXPECT_EQ(kTurnUdpExtAddr.ipaddr(),
@@ -2285,10 +2276,10 @@ TEST_F(PortTest, TestCandidatePriority) {
// Test the Connection priority is calculated correctly.
TEST_F(PortTest, TestConnectionPriority) {
- rtc::scoped_ptr<TestPort> lport(
+ std::unique_ptr<TestPort> lport(
CreateTestPort(kLocalAddr1, "lfrag", "lpass"));
lport->set_type_preference(cricket::ICE_TYPE_PREFERENCE_HOST);
- rtc::scoped_ptr<TestPort> rport(
+ std::unique_ptr<TestPort> rport(
CreateTestPort(kLocalAddr2, "rfrag", "rpass"));
rport->set_type_preference(cricket::ICE_TYPE_PREFERENCE_RELAY);
lport->set_component(123);
@@ -2428,9 +2419,9 @@ TEST_F(PortTest, TestIceLiteConnectivity) {
kLocalAddr1, "lfrag", "lpass",
cricket::ICEROLE_CONTROLLING, kTiebreaker1);
- rtc::scoped_ptr<TestPort> ice_lite_port(CreateTestPort(
- kLocalAddr2, "rfrag", "rpass",
- cricket::ICEROLE_CONTROLLED, kTiebreaker2));
+ std::unique_ptr<TestPort> ice_lite_port(
+ CreateTestPort(kLocalAddr2, "rfrag", "rpass", cricket::ICEROLE_CONTROLLED,
+ kTiebreaker2));
// Setup TestChannel. This behaves like FULL mode client.
TestChannel ch1(ice_full_port);
ch1.SetIceMode(ICEMODE_FULL);
@@ -2462,7 +2453,7 @@ TEST_F(PortTest, TestIceLiteConnectivity) {
// But we need a connection to send a response message.
ice_lite_port->CreateConnection(
ice_full_port->Candidates()[0], cricket::Port::ORIGIN_MESSAGE);
- rtc::scoped_ptr<IceMessage> request(CopyStunMessage(msg));
+ std::unique_ptr<IceMessage> request(CopyStunMessage(msg));
ice_lite_port->SendBindingResponse(
request.get(), ice_full_port->Candidates()[0].address());
@@ -2576,22 +2567,39 @@ TEST_F(PortTest, TestControlledToControllingNotDestroyed) {
}
TEST_F(PortTest, TestSupportsProtocol) {
- rtc::scoped_ptr<Port> udp_port(CreateUdpPort(kLocalAddr1));
+ std::unique_ptr<Port> udp_port(CreateUdpPort(kLocalAddr1));
EXPECT_TRUE(udp_port->SupportsProtocol(UDP_PROTOCOL_NAME));
EXPECT_FALSE(udp_port->SupportsProtocol(TCP_PROTOCOL_NAME));
- rtc::scoped_ptr<Port> stun_port(
+ std::unique_ptr<Port> stun_port(
CreateStunPort(kLocalAddr1, nat_socket_factory1()));
EXPECT_TRUE(stun_port->SupportsProtocol(UDP_PROTOCOL_NAME));
EXPECT_FALSE(stun_port->SupportsProtocol(TCP_PROTOCOL_NAME));
- rtc::scoped_ptr<Port> tcp_port(CreateTcpPort(kLocalAddr1));
+ std::unique_ptr<Port> tcp_port(CreateTcpPort(kLocalAddr1));
EXPECT_TRUE(tcp_port->SupportsProtocol(TCP_PROTOCOL_NAME));
EXPECT_TRUE(tcp_port->SupportsProtocol(SSLTCP_PROTOCOL_NAME));
EXPECT_FALSE(tcp_port->SupportsProtocol(UDP_PROTOCOL_NAME));
- rtc::scoped_ptr<Port> turn_port(
+ std::unique_ptr<Port> turn_port(
CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP));
EXPECT_TRUE(turn_port->SupportsProtocol(UDP_PROTOCOL_NAME));
EXPECT_FALSE(turn_port->SupportsProtocol(TCP_PROTOCOL_NAME));
}
+
+// Test that SetIceParameters updates the component, ufrag and password
+// on both the port itself and its candidates.
+TEST_F(PortTest, TestSetIceParameters) {
+ std::unique_ptr<TestPort> port(
+ CreateTestPort(kLocalAddr1, "ufrag1", "password1"));
+ port->PrepareAddress();
+ EXPECT_EQ(1UL, port->Candidates().size());
+ port->SetIceParameters(1, "ufrag2", "password2");
+ EXPECT_EQ(1, port->component());
+ EXPECT_EQ("ufrag2", port->username_fragment());
+ EXPECT_EQ("password2", port->password());
+ const Candidate& candidate = port->Candidates()[0];
+ EXPECT_EQ(1, candidate.component());
+ EXPECT_EQ("ufrag2", candidate.username());
+ EXPECT_EQ("password2", candidate.password());
+}
diff --git a/chromium/third_party/webrtc/p2p/base/portallocator.cc b/chromium/third_party/webrtc/p2p/base/portallocator.cc
index 5c4243abf6b..fdf213b3119 100644
--- a/chromium/third_party/webrtc/p2p/base/portallocator.cc
+++ b/chromium/third_party/webrtc/p2p/base/portallocator.cc
@@ -18,23 +18,85 @@ PortAllocatorSession::PortAllocatorSession(const std::string& content_name,
const std::string& ice_ufrag,
const std::string& ice_pwd,
uint32_t flags)
- : content_name_(content_name),
- component_(component),
- flags_(flags),
+ : flags_(flags),
generation_(0),
+ content_name_(content_name),
+ component_(component),
ice_ufrag_(ice_ufrag),
ice_pwd_(ice_pwd) {
- RTC_DCHECK(!ice_ufrag.empty());
- RTC_DCHECK(!ice_pwd.empty());
+ // Pooled sessions are allowed to be created with empty content name,
+ // component, ufrag and password.
+ RTC_DCHECK(ice_ufrag.empty() == ice_pwd.empty());
}
-PortAllocatorSession* PortAllocator::CreateSession(
+void PortAllocator::SetConfiguration(
+ const ServerAddresses& stun_servers,
+ const std::vector<RelayServerConfig>& turn_servers,
+ int candidate_pool_size) {
+ bool ice_servers_changed =
+ (stun_servers != stun_servers_ || turn_servers != turn_servers_);
+ stun_servers_ = stun_servers;
+ turn_servers_ = turn_servers;
+
+ // If ICE servers changed, throw away any existing pooled sessions and create
+ // new ones.
+ if (ice_servers_changed) {
+ pooled_sessions_.clear();
+ allocated_pooled_session_count_ = 0;
+ }
+
+ // If |size| is less than the number of allocated sessions, get rid of the
+ // extras.
+ while (allocated_pooled_session_count_ > candidate_pool_size &&
+ !pooled_sessions_.empty()) {
+ pooled_sessions_.front().reset(nullptr);
+ pooled_sessions_.pop_front();
+ --allocated_pooled_session_count_;
+ }
+ // If |size| is greater than the number of allocated sessions, create new
+ // sessions.
+ while (allocated_pooled_session_count_ < candidate_pool_size) {
+ PortAllocatorSession* pooled_session = CreateSessionInternal("", 0, "", "");
+ pooled_session->StartGettingPorts();
+ pooled_sessions_.push_back(
+ std::unique_ptr<PortAllocatorSession>(pooled_session));
+ ++allocated_pooled_session_count_;
+ }
+ target_pooled_session_count_ = candidate_pool_size;
+}
+
+std::unique_ptr<PortAllocatorSession> PortAllocator::CreateSession(
const std::string& sid,
const std::string& content_name,
int component,
const std::string& ice_ufrag,
const std::string& ice_pwd) {
- return CreateSessionInternal(content_name, component, ice_ufrag, ice_pwd);
+ return std::unique_ptr<PortAllocatorSession>(
+ CreateSessionInternal(content_name, component, ice_ufrag, ice_pwd));
+}
+
+std::unique_ptr<PortAllocatorSession> PortAllocator::TakePooledSession(
+ const std::string& content_name,
+ int component,
+ const std::string& ice_ufrag,
+ const std::string& ice_pwd) {
+ RTC_DCHECK(!ice_ufrag.empty());
+ RTC_DCHECK(!ice_pwd.empty());
+ if (pooled_sessions_.empty()) {
+ return nullptr;
+ }
+ std::unique_ptr<PortAllocatorSession> ret =
+ std::move(pooled_sessions_.front());
+ ret->SetIceParameters(content_name, component, ice_ufrag, ice_pwd);
+ pooled_sessions_.pop_front();
+ return ret;
+}
+
+const PortAllocatorSession* PortAllocator::GetPooledSession() const {
+ if (pooled_sessions_.empty()) {
+ return nullptr;
+ }
+ return pooled_sessions_.front().get();
}
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/portallocator.h b/chromium/third_party/webrtc/p2p/base/portallocator.h
index 6fb79b065e6..879657081a5 100644
--- a/chromium/third_party/webrtc/p2p/base/portallocator.h
+++ b/chromium/third_party/webrtc/p2p/base/portallocator.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_P2P_BASE_PORTALLOCATOR_H_
#define WEBRTC_P2P_BASE_PORTALLOCATOR_H_
+#include <deque>
+#include <memory>
#include <string>
#include <vector>
@@ -19,6 +21,7 @@
#include "webrtc/base/helpers.h"
#include "webrtc/base/proxyinfo.h"
#include "webrtc/base/sigslot.h"
+#include "webrtc/base/thread.h"
namespace cricket {
@@ -82,6 +85,11 @@ struct RelayCredentials {
RelayCredentials(const std::string& username, const std::string& password)
: username(username), password(password) {}
+ bool operator==(const RelayCredentials& o) const {
+ return username == o.username && password == o.password;
+ }
+ bool operator!=(const RelayCredentials& o) const { return !(*this == o); }
+
std::string username;
std::string password;
};
@@ -89,7 +97,7 @@ struct RelayCredentials {
typedef std::vector<ProtocolAddress> PortList;
// TODO(deadbeef): Rename to TurnServerConfig.
struct RelayServerConfig {
- RelayServerConfig(RelayType type) : type(type), priority(0) {}
+ RelayServerConfig(RelayType type) : type(type) {}
RelayServerConfig(const std::string& address,
int port,
@@ -102,10 +110,16 @@ struct RelayServerConfig {
ProtocolAddress(rtc::SocketAddress(address, port), proto, secure));
}
+ bool operator==(const RelayServerConfig& o) const {
+ return type == o.type && ports == o.ports && credentials == o.credentials &&
+ priority == o.priority;
+ }
+ bool operator!=(const RelayServerConfig& o) const { return !(*this == o); }
+
RelayType type;
PortList ports;
RelayCredentials credentials;
- int priority;
+ int priority = 0;
};
class PortAllocatorSession : public sigslot::has_slots<> {
@@ -124,6 +138,9 @@ class PortAllocatorSession : public sigslot::has_slots<> {
void set_flags(uint32_t flags) { flags_ = flags; }
std::string content_name() const { return content_name_; }
int component() const { return component_; }
+ const std::string& ice_ufrag() const { return ice_ufrag_; }
+ const std::string& ice_pwd() const { return ice_pwd_; }
+ bool pooled() const { return ice_ufrag_.empty(); }
// Starts gathering STUN and Relay configurations.
virtual void StartGettingPorts() = 0;
@@ -133,6 +150,14 @@ class PortAllocatorSession : public sigslot::has_slots<> {
// Whether the process of getting ports has been stopped.
virtual bool IsGettingPorts() = 0;
+ // Another way of getting the information provided by the signals below.
+ //
+ // Ports and candidates are not guaranteed to be in the same order as the
+ // signals were emitted in.
+ virtual std::vector<PortInterface*> ReadyPorts() const = 0;
+ virtual std::vector<Candidate> ReadyCandidates() const = 0;
+ virtual bool CandidatesAllocationDone() const = 0;
+
sigslot::signal2<PortAllocatorSession*, PortInterface*> SignalPortReady;
sigslot::signal2<PortAllocatorSession*,
const std::vector<Candidate>&> SignalCandidatesReady;
@@ -142,25 +167,46 @@ class PortAllocatorSession : public sigslot::has_slots<> {
virtual void set_generation(uint32_t generation) { generation_ = generation; }
sigslot::signal1<PortAllocatorSession*> SignalDestroyed;
- const std::string& ice_ufrag() const { return ice_ufrag_; }
- const std::string& ice_pwd() const { return ice_pwd_; }
-
protected:
+ // This method is called when a pooled session (which doesn't have these
+ // properties initially) is returned by PortAllocator::TakePooledSession,
+ // and the content name, component, and ICE ufrag/pwd are updated.
+ //
+ // A subclass may need to override this method to perform additional actions,
+ // such as applying the updated information to ports and candidates.
+ virtual void UpdateIceParametersInternal() {}
+
// TODO(deadbeef): Get rid of these when everyone switches to ice_ufrag and
// ice_pwd.
const std::string& username() const { return ice_ufrag_; }
const std::string& password() const { return ice_pwd_; }
- std::string content_name_;
- int component_;
-
private:
+ void SetIceParameters(const std::string& content_name,
+ int component,
+ const std::string& ice_ufrag,
+ const std::string& ice_pwd) {
+ content_name_ = content_name;
+ component_ = component;
+ ice_ufrag_ = ice_ufrag;
+ ice_pwd_ = ice_pwd;
+ UpdateIceParametersInternal();
+ }
+
uint32_t flags_;
uint32_t generation_;
+ std::string content_name_;
+ int component_;
std::string ice_ufrag_;
std::string ice_pwd_;
+
+ // SetIceParameters is an implementation detail which only PortAllocator
+ // should be able to call.
+ friend class PortAllocator;
};
+// Note that this class should only be used on one thread.
+// This includes calling the destructor.
class PortAllocator : public sigslot::has_slots<> {
public:
PortAllocator() :
@@ -174,10 +220,25 @@ class PortAllocator : public sigslot::has_slots<> {
}
virtual ~PortAllocator() {}
- // Set STUN and TURN servers to be used in future sessions.
- virtual void SetIceServers(
- const ServerAddresses& stun_servers,
- const std::vector<RelayServerConfig>& turn_servers) = 0;
+ // Set STUN and TURN servers to be used in future sessions, and set
+ // candidate pool size, as described in JSEP.
+ //
+ // If the servers are changing and the candidate pool size is nonzero,
+ // existing pooled sessions will be destroyed and new ones created.
+ //
+ // If the servers are not changing but the candidate pool size is,
+ // pooled sessions will be either created or destroyed as necessary.
+ void SetConfiguration(const ServerAddresses& stun_servers,
+ const std::vector<RelayServerConfig>& turn_servers,
+ int candidate_pool_size);
+
+ const ServerAddresses& stun_servers() const { return stun_servers_; }
+
+ const std::vector<RelayServerConfig>& turn_servers() const {
+ return turn_servers_;
+ }
+
+ int candidate_pool_size() const { return target_pooled_session_count_; }
// Sets the network types to ignore.
// Values are defined by the AdapterType enum.
@@ -186,13 +247,27 @@ class PortAllocator : public sigslot::has_slots<> {
// loopback interfaces.
virtual void SetNetworkIgnoreMask(int network_ignore_mask) = 0;
- PortAllocatorSession* CreateSession(
+ std::unique_ptr<PortAllocatorSession> CreateSession(
const std::string& sid,
const std::string& content_name,
int component,
const std::string& ice_ufrag,
const std::string& ice_pwd);
+ // Get an available pooled session and set the transport information on it.
+ //
+ // Caller takes ownership of the returned session.
+ //
+ // If no pooled sessions are available, returns null.
+ std::unique_ptr<PortAllocatorSession> TakePooledSession(
+ const std::string& content_name,
+ int component,
+ const std::string& ice_ufrag,
+ const std::string& ice_pwd);
+
+ // Returns the next session that would be returned by TakePooledSession.
+ const PortAllocatorSession* GetPooledSession() const;
+
uint32_t flags() const { return flags_; }
void set_flags(uint32_t flags) { flags_ = flags; }
@@ -225,10 +300,9 @@ class PortAllocator : public sigslot::has_slots<> {
}
uint32_t candidate_filter() { return candidate_filter_; }
- bool set_candidate_filter(uint32_t filter) {
+ void set_candidate_filter(uint32_t filter) {
// TODO(mallinath) - Do transition check?
candidate_filter_ = filter;
- return true;
}
// Gets/Sets the Origin value used for WebRTC STUN requests.
@@ -251,6 +325,16 @@ class PortAllocator : public sigslot::has_slots<> {
bool allow_tcp_listen_;
uint32_t candidate_filter_;
std::string origin_;
+
+ private:
+ ServerAddresses stun_servers_;
+ std::vector<RelayServerConfig> turn_servers_;
+ // The last size passed into SetConfiguration.
+ int target_pooled_session_count_ = 0;
+ // This variable represents the total number of pooled sessions
+ // both owned by this class and taken by TakePooledSession.
+ int allocated_pooled_session_count_ = 0;
+ std::deque<std::unique_ptr<PortAllocatorSession>> pooled_sessions_;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/portallocator_unittest.cc b/chromium/third_party/webrtc/p2p/base/portallocator_unittest.cc
new file mode 100644
index 00000000000..2e16725de48
--- /dev/null
+++ b/chromium/third_party/webrtc/p2p/base/portallocator_unittest.cc
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/p2p/base/fakeportallocator.h"
+#include "webrtc/p2p/base/portallocator.h"
+
+static const char kContentName[] = "test content";
+// Based on ICE_UFRAG_LENGTH
+static const char kIceUfrag[] = "TESTICEUFRAG0000";
+// Based on ICE_PWD_LENGTH
+static const char kIcePwd[] = "TESTICEPWD00000000000000";
+static const char kTurnUsername[] = "test";
+static const char kTurnPassword[] = "test";
+
+class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
+ public:
+ PortAllocatorTest() {
+ allocator_.reset(
+ new cricket::FakePortAllocator(rtc::Thread::Current(), nullptr));
+ }
+
+ protected:
+ void SetConfigurationWithPoolSize(int candidate_pool_size) {
+ allocator_->SetConfiguration(cricket::ServerAddresses(),
+ std::vector<cricket::RelayServerConfig>(),
+ candidate_pool_size);
+ }
+
+ const cricket::FakePortAllocatorSession* GetPooledSession() const {
+ return static_cast<const cricket::FakePortAllocatorSession*>(
+ allocator_->GetPooledSession());
+ }
+
+ std::unique_ptr<cricket::FakePortAllocatorSession> TakePooledSession() {
+ return std::unique_ptr<cricket::FakePortAllocatorSession>(
+ static_cast<cricket::FakePortAllocatorSession*>(
+ allocator_->TakePooledSession(kContentName, 0, kIceUfrag, kIcePwd)
+ .release()));
+ }
+
+ int GetAllPooledSessionsReturnCount() {
+ int count = 0;
+ while (GetPooledSession()) {
+ TakePooledSession();
+ ++count;
+ }
+ return count;
+ }
+
+ std::unique_ptr<cricket::FakePortAllocator> allocator_;
+ rtc::SocketAddress stun_server_1{"11.11.11.11", 3478};
+ rtc::SocketAddress stun_server_2{"22.22.22.22", 3478};
+ cricket::RelayServerConfig turn_server_1{"11.11.11.11", 3478,
+ kTurnUsername, kTurnPassword,
+ cricket::PROTO_UDP, false};
+ cricket::RelayServerConfig turn_server_2{"22.22.22.22", 3478,
+ kTurnUsername, kTurnPassword,
+ cricket::PROTO_UDP, false};
+};
+
+TEST_F(PortAllocatorTest, TestDefaults) {
+ EXPECT_EQ(0UL, allocator_->stun_servers().size());
+ EXPECT_EQ(0UL, allocator_->turn_servers().size());
+ EXPECT_EQ(0, allocator_->candidate_pool_size());
+ EXPECT_EQ(0, GetAllPooledSessionsReturnCount());
+}
+
+TEST_F(PortAllocatorTest, SetConfigurationUpdatesIceServers) {
+ cricket::ServerAddresses stun_servers_1 = {stun_server_1};
+ std::vector<cricket::RelayServerConfig> turn_servers_1 = {turn_server_1};
+ allocator_->SetConfiguration(stun_servers_1, turn_servers_1, 0);
+ EXPECT_EQ(stun_servers_1, allocator_->stun_servers());
+ EXPECT_EQ(turn_servers_1, allocator_->turn_servers());
+
+ // Update with a different set of servers.
+ cricket::ServerAddresses stun_servers_2 = {stun_server_2};
+ std::vector<cricket::RelayServerConfig> turn_servers_2 = {turn_server_2};
+ allocator_->SetConfiguration(stun_servers_2, turn_servers_2, 0);
+ EXPECT_EQ(stun_servers_2, allocator_->stun_servers());
+ EXPECT_EQ(turn_servers_2, allocator_->turn_servers());
+}
+
+TEST_F(PortAllocatorTest, SetConfigurationUpdatesCandidatePoolSize) {
+ SetConfigurationWithPoolSize(2);
+ EXPECT_EQ(2, allocator_->candidate_pool_size());
+ SetConfigurationWithPoolSize(3);
+ EXPECT_EQ(3, allocator_->candidate_pool_size());
+ SetConfigurationWithPoolSize(1);
+ EXPECT_EQ(1, allocator_->candidate_pool_size());
+ SetConfigurationWithPoolSize(4);
+ EXPECT_EQ(4, allocator_->candidate_pool_size());
+}
+
+// A negative pool size should just be treated as zero.
+TEST_F(PortAllocatorTest, SetConfigurationWithNegativePoolSizeDoesntCrash) {
+ SetConfigurationWithPoolSize(-1);
+ // No asserts; we're just testing that this doesn't crash.
+}
+
+// Test that if the candidate pool size is nonzero, pooled sessions are
+// created, and StartGettingPorts is called on them.
+TEST_F(PortAllocatorTest, SetConfigurationCreatesPooledSessions) {
+ SetConfigurationWithPoolSize(2);
+ auto session_1 = TakePooledSession();
+ auto session_2 = TakePooledSession();
+ ASSERT_NE(nullptr, session_1.get());
+ ASSERT_NE(nullptr, session_2.get());
+ EXPECT_EQ(1, session_1->port_config_count());
+ EXPECT_EQ(1, session_2->port_config_count());
+ EXPECT_EQ(0, GetAllPooledSessionsReturnCount());
+}
+
+// Test that if the candidate pool size is increased, pooled sessions are
+// created as necessary.
+TEST_F(PortAllocatorTest, SetConfigurationCreatesMorePooledSessions) {
+ SetConfigurationWithPoolSize(1);
+ SetConfigurationWithPoolSize(2);
+ EXPECT_EQ(2, GetAllPooledSessionsReturnCount());
+}
+
+// Test that if the candidate pool size is reduced, extra sessions are
+// destroyed.
+TEST_F(PortAllocatorTest, SetConfigurationDestroysPooledSessions) {
+ SetConfigurationWithPoolSize(2);
+ SetConfigurationWithPoolSize(1);
+ EXPECT_EQ(1, GetAllPooledSessionsReturnCount());
+}
+
+// Test that if the candidate pool size is reduced and increased, but reducing
+// didn't actually destroy any sessions (because they were already given away),
+// increasing the size to its initial value doesn't create a new session.
+TEST_F(PortAllocatorTest, SetConfigurationDoesntCreateExtraSessions) {
+ SetConfigurationWithPoolSize(1);
+ TakePooledSession();
+ SetConfigurationWithPoolSize(0);
+ SetConfigurationWithPoolSize(1);
+ EXPECT_EQ(0, GetAllPooledSessionsReturnCount());
+}
+
+// According to JSEP, exising pooled sessions should be destroyed and new
+// ones created when the ICE servers change.
+TEST_F(PortAllocatorTest,
+ SetConfigurationRecreatesPooledSessionsWhenIceServersChange) {
+ cricket::ServerAddresses stun_servers_1 = {stun_server_1};
+ std::vector<cricket::RelayServerConfig> turn_servers_1 = {turn_server_1};
+ allocator_->SetConfiguration(stun_servers_1, turn_servers_1, 1);
+ EXPECT_EQ(stun_servers_1, allocator_->stun_servers());
+ EXPECT_EQ(turn_servers_1, allocator_->turn_servers());
+
+ // Update with a different set of servers (and also change pool size).
+ cricket::ServerAddresses stun_servers_2 = {stun_server_2};
+ std::vector<cricket::RelayServerConfig> turn_servers_2 = {turn_server_2};
+ allocator_->SetConfiguration(stun_servers_2, turn_servers_2, 2);
+ EXPECT_EQ(stun_servers_2, allocator_->stun_servers());
+ EXPECT_EQ(turn_servers_2, allocator_->turn_servers());
+ auto session_1 = TakePooledSession();
+ auto session_2 = TakePooledSession();
+ ASSERT_NE(nullptr, session_1.get());
+ ASSERT_NE(nullptr, session_2.get());
+ EXPECT_EQ(stun_servers_2, session_1->stun_servers());
+ EXPECT_EQ(turn_servers_2, session_1->turn_servers());
+ EXPECT_EQ(stun_servers_2, session_2->stun_servers());
+ EXPECT_EQ(turn_servers_2, session_2->turn_servers());
+ EXPECT_EQ(0, GetAllPooledSessionsReturnCount());
+}
+
+TEST_F(PortAllocatorTest, GetPooledSessionReturnsNextSession) {
+ SetConfigurationWithPoolSize(2);
+ auto peeked_session_1 = GetPooledSession();
+ auto session_1 = TakePooledSession();
+ EXPECT_EQ(session_1.get(), peeked_session_1);
+ auto peeked_session_2 = GetPooledSession();
+ auto session_2 = TakePooledSession();
+ EXPECT_EQ(session_2.get(), peeked_session_2);
+}
+
+// Verify that subclasses of PortAllocatorSession are given a chance to update
+// ICE parameters when TakePooledSession is called, and the base class updates
+// the info itself.
+TEST_F(PortAllocatorTest, TakePooledSessionUpdatesIceParameters) {
+ SetConfigurationWithPoolSize(1);
+ auto peeked_session = GetPooledSession();
+ ASSERT_NE(nullptr, peeked_session);
+ EXPECT_EQ(0, peeked_session->transport_info_update_count());
+ std::unique_ptr<cricket::FakePortAllocatorSession> session(
+ static_cast<cricket::FakePortAllocatorSession*>(
+ allocator_->TakePooledSession(kContentName, 1, kIceUfrag, kIcePwd)
+ .release()));
+ EXPECT_EQ(1, session->transport_info_update_count());
+ EXPECT_EQ(kContentName, session->content_name());
+ EXPECT_EQ(1, session->component());
+ EXPECT_EQ(kIceUfrag, session->ice_ufrag());
+ EXPECT_EQ(kIcePwd, session->ice_pwd());
+}
diff --git a/chromium/third_party/webrtc/p2p/base/pseudotcp.cc b/chromium/third_party/webrtc/p2p/base/pseudotcp.cc
index 1dfdcbb8944..44893747cce 100644
--- a/chromium/third_party/webrtc/p2p/base/pseudotcp.cc
+++ b/chromium/third_party/webrtc/p2p/base/pseudotcp.cc
@@ -14,6 +14,7 @@
#include <stdlib.h>
#include <algorithm>
+#include <memory>
#include <set>
#include "webrtc/base/arraysize.h"
@@ -22,7 +23,6 @@
#include "webrtc/base/byteorder.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socket.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/timeutils.h"
@@ -203,9 +203,9 @@ void ReportStats() {
uint32_t PseudoTcp::Now() {
#if 0 // Use this to synchronize timers with logging timestamps (easier debug)
- return rtc::TimeSince(StartTime());
+ return static_cast<uint32_t>(rtc::TimeSince(StartTime()));
#else
- return rtc::Time();
+ return rtc::Time32();
#endif
}
@@ -289,7 +289,7 @@ void PseudoTcp::NotifyClock(uint32_t now) {
return;
// Check if it's time to retransmit a segment
- if (m_rto_base && (rtc::TimeDiff(m_rto_base + m_rx_rto, now) <= 0)) {
+ if (m_rto_base && (rtc::TimeDiff32(m_rto_base + m_rx_rto, now) <= 0)) {
if (m_slist.empty()) {
ASSERT(false);
} else {
@@ -320,9 +320,8 @@ void PseudoTcp::NotifyClock(uint32_t now) {
}
// Check if it's time to probe closed windows
- if ((m_snd_wnd == 0)
- && (rtc::TimeDiff(m_lastsend + m_rx_rto, now) <= 0)) {
- if (rtc::TimeDiff(now, m_lastrecv) >= 15000) {
+ if ((m_snd_wnd == 0) && (rtc::TimeDiff32(m_lastsend + m_rx_rto, now) <= 0)) {
+ if (rtc::TimeDiff32(now, m_lastrecv) >= 15000) {
closedown(ECONNABORTED);
return;
}
@@ -336,19 +335,22 @@ void PseudoTcp::NotifyClock(uint32_t now) {
}
// Check if it's time to send delayed acks
- if (m_t_ack && (rtc::TimeDiff(m_t_ack + m_ack_delay, now) <= 0)) {
+ if (m_t_ack && (rtc::TimeDiff32(m_t_ack + m_ack_delay, now) <= 0)) {
packet(m_snd_nxt, 0, 0, 0);
}
#if PSEUDO_KEEPALIVE
// Check for idle timeout
- if ((m_state == TCP_ESTABLISHED) && (TimeDiff(m_lastrecv + IDLE_TIMEOUT, now) <= 0)) {
+ if ((m_state == TCP_ESTABLISHED) &&
+ (TimeDiff32(m_lastrecv + IDLE_TIMEOUT, now) <= 0)) {
closedown(ECONNABORTED);
return;
}
// Check for ping timeout (to keep udp mapping open)
- if ((m_state == TCP_ESTABLISHED) && (TimeDiff(m_lasttraffic + (m_bOutgoing ? IDLE_PING * 3/2 : IDLE_PING), now) <= 0)) {
+ if ((m_state == TCP_ESTABLISHED) &&
+ (TimeDiff32(m_lasttraffic + (m_bOutgoing ? IDLE_PING * 3 / 2 : IDLE_PING),
+ now) <= 0)) {
packet(m_snd_nxt, 0, 0, 0);
}
#endif // PSEUDO_KEEPALIVE
@@ -518,7 +520,7 @@ IPseudoTcpNotify::WriteResult PseudoTcp::packet(uint32_t seq,
uint32_t now = Now();
- rtc::scoped_ptr<uint8_t[]> buffer(new uint8_t[MAX_PACKET]);
+ std::unique_ptr<uint8_t[]> buffer(new uint8_t[MAX_PACKET]);
long_to_bytes(m_conv, buffer.get());
long_to_bytes(seq, buffer.get() + 4);
long_to_bytes(m_rcv_nxt, buffer.get() + 8);
@@ -571,7 +573,7 @@ IPseudoTcpNotify::WriteResult PseudoTcp::packet(uint32_t seq,
}
bool PseudoTcp::parse(const uint8_t* buffer, uint32_t size) {
- if (size < 12)
+ if (size < HEADER_SIZE)
return false;
Segment seg;
@@ -621,23 +623,24 @@ bool PseudoTcp::clock_check(uint32_t now, long& nTimeout) {
nTimeout = DEFAULT_TIMEOUT;
if (m_t_ack) {
- nTimeout =
- std::min<int32_t>(nTimeout, rtc::TimeDiff(m_t_ack + m_ack_delay, now));
+ nTimeout = std::min<int32_t>(nTimeout,
+ rtc::TimeDiff32(m_t_ack + m_ack_delay, now));
}
if (m_rto_base) {
- nTimeout =
- std::min<int32_t>(nTimeout, rtc::TimeDiff(m_rto_base + m_rx_rto, now));
+ nTimeout = std::min<int32_t>(nTimeout,
+ rtc::TimeDiff32(m_rto_base + m_rx_rto, now));
}
if (m_snd_wnd == 0) {
- nTimeout =
- std::min<int32_t>(nTimeout, rtc::TimeDiff(m_lastsend + m_rx_rto, now));
+ nTimeout = std::min<int32_t>(nTimeout,
+ rtc::TimeDiff32(m_lastsend + m_rx_rto, now));
}
#if PSEUDO_KEEPALIVE
if (m_state == TCP_ESTABLISHED) {
nTimeout = std::min<int32_t>(
- nTimeout, rtc::TimeDiff(m_lasttraffic + (m_bOutgoing ? IDLE_PING * 3 / 2
- : IDLE_PING),
- now));
+ nTimeout,
+ rtc::TimeDiff32(
+ m_lasttraffic + (m_bOutgoing ? IDLE_PING * 3 / 2 : IDLE_PING),
+ now));
}
#endif // PSEUDO_KEEPALIVE
return true;
@@ -710,7 +713,7 @@ bool PseudoTcp::process(Segment& seg) {
if ((seg.ack > m_snd_una) && (seg.ack <= m_snd_nxt)) {
// Calculate round-trip time
if (seg.tsecr) {
- int32_t rtt = rtc::TimeDiff(now, seg.tsecr);
+ int32_t rtt = rtc::TimeDiff32(now, seg.tsecr);
if (rtt >= 0) {
if (m_rx_srtt == 0) {
m_rx_srtt = rtt;
@@ -1033,7 +1036,7 @@ bool PseudoTcp::transmit(const SList::iterator& seg, uint32_t now) {
void PseudoTcp::attemptSend(SendFlags sflags) {
uint32_t now = Now();
- if (rtc::TimeDiff(now, m_lastsend) > static_cast<long>(m_rx_rto)) {
+ if (rtc::TimeDiff32(now, m_lastsend) > static_cast<long>(m_rx_rto)) {
m_cwnd = m_mss;
}
diff --git a/chromium/third_party/webrtc/p2p/base/pseudotcp_unittest.cc b/chromium/third_party/webrtc/p2p/base/pseudotcp_unittest.cc
index c9ccbca1d95..a635bcef169 100644
--- a/chromium/third_party/webrtc/p2p/base/pseudotcp_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/pseudotcp_unittest.cc
@@ -208,7 +208,8 @@ class PseudoTcpTestBase : public testing::Test,
class PseudoTcpTest : public PseudoTcpTestBase {
public:
void TestTransfer(int size) {
- uint32_t start, elapsed;
+ uint32_t start;
+ int32_t elapsed;
size_t received;
// Create some dummy data to send.
send_stream_.ReserveSize(size);
@@ -220,13 +221,13 @@ class PseudoTcpTest : public PseudoTcpTestBase {
// Prepare the receive stream.
recv_stream_.ReserveSize(size);
// Connect and wait until connected.
- start = rtc::Time();
+ start = rtc::Time32();
EXPECT_EQ(0, Connect());
EXPECT_TRUE_WAIT(have_connected_, kConnectTimeoutMs);
// Sending will start from OnTcpWriteable and complete when all data has
// been received.
EXPECT_TRUE_WAIT(have_disconnected_, kTransferTimeoutMs);
- elapsed = rtc::TimeSince(start);
+ elapsed = rtc::Time32() - start;
recv_stream_.GetSize(&received);
// Ensure we closed down OK and we got the right data.
// TODO: Ensure the errors are cleared properly.
@@ -339,7 +340,7 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase {
// Prepare the receive stream.
recv_stream_.ReserveSize(size);
// Connect and wait until connected.
- start = rtc::Time();
+ start = rtc::Time32();
EXPECT_EQ(0, Connect());
EXPECT_TRUE_WAIT(have_connected_, kConnectTimeoutMs);
// Sending will start from OnTcpWriteable and stop when the required
diff --git a/chromium/third_party/webrtc/p2p/base/relayport.cc b/chromium/third_party/webrtc/p2p/base/relayport.cc
index 719604374e0..7f05e1b1ef7 100644
--- a/chromium/third_party/webrtc/p2p/base/relayport.cc
+++ b/chromium/third_party/webrtc/p2p/base/relayport.cc
@@ -779,7 +779,7 @@ AllocateRequest::AllocateRequest(RelayEntry* entry,
: StunRequest(new RelayMessage()),
entry_(entry),
connection_(connection) {
- start_time_ = rtc::Time64();
+ start_time_ = rtc::TimeMillis();
}
void AllocateRequest::Prepare(StunMessage* request) {
@@ -834,7 +834,7 @@ void AllocateRequest::OnErrorResponse(StunMessage* response) {
<< " reason='" << attr->reason() << "'";
}
- if (rtc::Time64() - start_time_ <= kRetryTimeout)
+ if (rtc::TimeMillis() - start_time_ <= kRetryTimeout)
entry_->ScheduleKeepAlive();
}
diff --git a/chromium/third_party/webrtc/p2p/base/relayport_unittest.cc b/chromium/third_party/webrtc/p2p/base/relayport_unittest.cc
index d644d67c4f3..738ea9abb21 100644
--- a/chromium/third_party/webrtc/p2p/base/relayport_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/relayport_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/p2p/base/basicpacketsocketfactory.h"
#include "webrtc/p2p/base/relayport.h"
#include "webrtc/p2p/base/relayserver.h"
@@ -15,7 +17,6 @@
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketadapters.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/ssladapter.h"
@@ -179,7 +180,7 @@ class RelayPortTest : public testing::Test,
// Create a tcp server socket that listens on the fake address so
// the relay port can attempt to connect to it.
- rtc::scoped_ptr<rtc::AsyncSocket> tcp_server_socket(
+ std::unique_ptr<rtc::AsyncSocket> tcp_server_socket(
CreateServerSocket(kRelayTcpAddr));
// Add server addresses to the relay port and let it start.
@@ -244,16 +245,15 @@ class RelayPortTest : public testing::Test,
typedef std::map<rtc::AsyncPacketSocket*, int> PacketMap;
rtc::Thread* main_;
- rtc::scoped_ptr<rtc::PhysicalSocketServer>
- physical_socket_server_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> virtual_socket_server_;
+ std::unique_ptr<rtc::PhysicalSocketServer> physical_socket_server_;
+ std::unique_ptr<rtc::VirtualSocketServer> virtual_socket_server_;
rtc::SocketServerScope ss_scope_;
rtc::Network network_;
rtc::BasicPacketSocketFactory socket_factory_;
std::string username_;
std::string password_;
- rtc::scoped_ptr<cricket::RelayPort> relay_port_;
- rtc::scoped_ptr<cricket::RelayServer> relay_server_;
+ std::unique_ptr<cricket::RelayPort> relay_port_;
+ std::unique_ptr<cricket::RelayServer> relay_server_;
std::vector<cricket::ProtocolAddress> failed_connections_;
std::vector<cricket::ProtocolAddress> soft_timedout_connections_;
PacketMap received_packet_count_;
diff --git a/chromium/third_party/webrtc/p2p/base/relayserver.cc b/chromium/third_party/webrtc/p2p/base/relayserver.cc
index e098cbcd843..ebe46c48c6a 100644
--- a/chromium/third_party/webrtc/p2p/base/relayserver.cc
+++ b/chromium/third_party/webrtc/p2p/base/relayserver.cc
@@ -694,7 +694,7 @@ void RelayServerBinding::AddExternalConnection(RelayServerConnection* conn) {
}
void RelayServerBinding::NoteUsed() {
- last_used_ = rtc::Time64();
+ last_used_ = rtc::TimeMillis();
}
bool RelayServerBinding::HasMagicCookie(const char* bytes, size_t size) const {
@@ -735,7 +735,7 @@ void RelayServerBinding::OnMessage(rtc::Message *pmsg) {
// If the lifetime timeout has been exceeded, then send a signal.
// Otherwise, just keep waiting.
- if (rtc::Time64() >= last_used_ + lifetime_) {
+ if (rtc::TimeMillis() >= last_used_ + lifetime_) {
LOG(LS_INFO) << "Expiring binding " << username_;
SignalTimeout(this);
} else {
diff --git a/chromium/third_party/webrtc/p2p/base/relayserver_unittest.cc b/chromium/third_party/webrtc/p2p/base/relayserver_unittest.cc
index 3581f7153ba..1d07a07c7fb 100644
--- a/chromium/third_party/webrtc/p2p/base/relayserver_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/relayserver_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/p2p/base/relayserver.h"
@@ -60,16 +61,14 @@ class RelayServerTest : public testing::Test {
}
void Allocate() {
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_ALLOCATE_REQUEST));
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_ALLOCATE_REQUEST));
AddUsernameAttr(req.get(), username_);
AddLifetimeAttr(req.get(), LIFETIME);
Send1(req.get());
delete Receive1();
}
void Bind() {
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_BINDING_REQUEST));
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_BINDING_REQUEST));
AddUsernameAttr(req.get(), username_);
Send2(req.get());
delete Receive1();
@@ -172,12 +171,12 @@ class RelayServerTest : public testing::Test {
msg->AddAttribute(attr);
}
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
rtc::SocketServerScope ss_scope_;
- rtc::scoped_ptr<RelayServer> server_;
- rtc::scoped_ptr<rtc::TestClient> client1_;
- rtc::scoped_ptr<rtc::TestClient> client2_;
+ std::unique_ptr<RelayServer> server_;
+ std::unique_ptr<rtc::TestClient> client1_;
+ std::unique_ptr<rtc::TestClient> client2_;
std::string username_;
std::string password_;
};
@@ -190,8 +189,8 @@ TEST_F(RelayServerTest, TestBadRequest) {
// Send an allocate request without a username and verify it is rejected.
TEST_F(RelayServerTest, TestAllocateNoUsername) {
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_ALLOCATE_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_ALLOCATE_REQUEST)),
+ res;
Send1(req.get());
res.reset(Receive1());
@@ -209,8 +208,8 @@ TEST_F(RelayServerTest, TestAllocateNoUsername) {
// Send a binding request and verify that it is rejected.
TEST_F(RelayServerTest, TestBindingRequest) {
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_BINDING_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_BINDING_REQUEST)),
+ res;
AddUsernameAttr(req.get(), username_);
Send1(req.get());
@@ -229,8 +228,8 @@ TEST_F(RelayServerTest, TestBindingRequest) {
// Send an allocate request and verify that it is accepted.
TEST_F(RelayServerTest, TestAllocate) {
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_ALLOCATE_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_ALLOCATE_REQUEST)),
+ res;
AddUsernameAttr(req.get(), username_);
AddLifetimeAttr(req.get(), LIFETIME);
@@ -259,8 +258,8 @@ TEST_F(RelayServerTest, TestAllocate) {
TEST_F(RelayServerTest, TestReallocate) {
Allocate();
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_ALLOCATE_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_ALLOCATE_REQUEST)),
+ res;
AddMagicCookieAttr(req.get());
AddUsernameAttr(req.get(), username_);
@@ -289,8 +288,8 @@ TEST_F(RelayServerTest, TestReallocate) {
TEST_F(RelayServerTest, TestRemoteBind) {
Allocate();
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_BINDING_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_BINDING_REQUEST)),
+ res;
AddUsernameAttr(req.get(), username_);
Send2(req.get());
@@ -304,7 +303,7 @@ TEST_F(RelayServerTest, TestRemoteBind) {
ASSERT_TRUE(recv_data != NULL);
rtc::ByteBufferReader buf(recv_data->bytes(), recv_data->length());
- rtc::scoped_ptr<StunMessage> res2(new StunMessage());
+ std::unique_ptr<StunMessage> res2(new StunMessage());
EXPECT_TRUE(res2->Read(&buf));
EXPECT_EQ(STUN_BINDING_REQUEST, res2->type());
EXPECT_EQ(req->transaction_id(), res2->transaction_id());
@@ -335,8 +334,7 @@ TEST_F(RelayServerTest, TestSendRequestMissingUsername) {
Allocate();
Bind();
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_SEND_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_SEND_REQUEST)), res;
AddMagicCookieAttr(req.get());
Send1(req.get());
@@ -358,8 +356,7 @@ TEST_F(RelayServerTest, TestSendRequestBadUsername) {
Allocate();
Bind();
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_SEND_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_SEND_REQUEST)), res;
AddMagicCookieAttr(req.get());
AddUsernameAttr(req.get(), "foobarbizbaz");
@@ -383,8 +380,7 @@ TEST_F(RelayServerTest, TestSendRequestNoDestinationAddress) {
Allocate();
Bind();
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_SEND_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_SEND_REQUEST)), res;
AddMagicCookieAttr(req.get());
AddUsernameAttr(req.get(), username_);
@@ -407,8 +403,7 @@ TEST_F(RelayServerTest, TestSendRequestNoData) {
Allocate();
Bind();
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_SEND_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_SEND_REQUEST)), res;
AddMagicCookieAttr(req.get());
AddUsernameAttr(req.get(), username_);
AddDestinationAttr(req.get(), client2_addr);
@@ -432,8 +427,8 @@ TEST_F(RelayServerTest, TestSendRequestWrongType) {
Allocate();
Bind();
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_BINDING_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_BINDING_REQUEST)),
+ res;
AddMagicCookieAttr(req.get());
AddUsernameAttr(req.get(), username_);
@@ -458,8 +453,7 @@ TEST_F(RelayServerTest, TestSendRaw) {
Bind();
for (int i = 0; i < 10; i++) {
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_SEND_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_SEND_REQUEST)), res;
AddMagicCookieAttr(req.get());
AddUsernameAttr(req.get(), username_);
AddDestinationAttr(req.get(), client2_addr);
@@ -501,8 +495,7 @@ TEST_F(RelayServerTest, DISABLED_TestExpiration) {
// Wait twice the lifetime to make sure the server has expired the binding.
rtc::Thread::Current()->ProcessMessages((LIFETIME * 2) * 1000);
- rtc::scoped_ptr<StunMessage> req(
- CreateStunMessage(STUN_SEND_REQUEST)), res;
+ std::unique_ptr<StunMessage> req(CreateStunMessage(STUN_SEND_REQUEST)), res;
AddMagicCookieAttr(req.get());
AddUsernameAttr(req.get(), username_);
AddDestinationAttr(req.get(), client2_addr);
diff --git a/chromium/third_party/webrtc/p2p/base/stun.cc b/chromium/third_party/webrtc/p2p/base/stun.cc
index d0f001485de..ac3fd5f9368 100644
--- a/chromium/third_party/webrtc/p2p/base/stun.cc
+++ b/chromium/third_party/webrtc/p2p/base/stun.cc
@@ -12,12 +12,13 @@
#include <string.h>
+#include <memory>
+
#include "webrtc/base/byteorder.h"
#include "webrtc/base/common.h"
#include "webrtc/base/crc32.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/messagedigest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/stringencode.h"
using rtc::ByteBufferReader;
@@ -173,7 +174,7 @@ bool StunMessage::ValidateMessageIntegrity(const char* data, size_t size,
// Getting length of the message to calculate Message Integrity.
size_t mi_pos = current_pos;
- rtc::scoped_ptr<char[]> temp_data(new char[current_pos]);
+ std::unique_ptr<char[]> temp_data(new char[current_pos]);
memcpy(temp_data.get(), data, current_pos);
if (size > mi_pos + kStunAttributeHeaderSize + kStunMessageIntegritySize) {
// Stun message has other attributes after message integrity.
diff --git a/chromium/third_party/webrtc/p2p/base/stun_unittest.cc b/chromium/third_party/webrtc/p2p/base/stun_unittest.cc
index 2213397f2e4..d7ca9991f85 100644
--- a/chromium/third_party/webrtc/p2p/base/stun_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/stun_unittest.cc
@@ -16,7 +16,6 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/messagedigest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketaddress.h"
namespace cricket {
diff --git a/chromium/third_party/webrtc/p2p/base/stunport.cc b/chromium/third_party/webrtc/p2p/base/stunport.cc
index 16546faaf1b..8ed8c448872 100644
--- a/chromium/third_party/webrtc/p2p/base/stunport.cc
+++ b/chromium/third_party/webrtc/p2p/base/stunport.cc
@@ -65,7 +65,7 @@ class StunBindingRequest : public StunRequest {
}
// The keep-alive requests will be stopped after its lifetime has passed.
- if (WithinLifetime(rtc::Time64())) {
+ if (WithinLifetime(rtc::TimeMillis())) {
port_->requests_.SendDelayed(
new StunBindingRequest(port_, server_addr_, start_time_, lifetime_),
port_->stun_keepalive_delay());
@@ -85,9 +85,9 @@ class StunBindingRequest : public StunRequest {
port_->OnStunBindingOrResolveRequestFailed(server_addr_);
- int64_t now = rtc::Time64();
+ int64_t now = rtc::TimeMillis();
if (WithinLifetime(now) &&
- rtc::TimeDiff64(now, start_time_) < RETRY_TIMEOUT) {
+ rtc::TimeDiff(now, start_time_) < RETRY_TIMEOUT) {
port_->requests_.SendDelayed(
new StunBindingRequest(port_, server_addr_, start_time_, lifetime_),
port_->stun_keepalive_delay());
@@ -105,7 +105,7 @@ class StunBindingRequest : public StunRequest {
// Returns true if |now| is within the lifetime of the request (a negative
// lifetime means infinite).
bool WithinLifetime(int64_t now) const {
- return lifetime_ < 0 || rtc::TimeDiff64(now, start_time_) <= lifetime_;
+ return lifetime_ < 0 || rtc::TimeDiff(now, start_time_) <= lifetime_;
}
UDPPort* port_;
const rtc::SocketAddress server_addr_;
@@ -411,7 +411,7 @@ void UDPPort::SendStunBindingRequest(const rtc::SocketAddress& stun_addr) {
} else if (socket_->GetState() == rtc::AsyncPacketSocket::STATE_BOUND) {
// Check if |server_addr_| is compatible with the port's ip.
if (IsCompatibleAddress(stun_addr)) {
- requests_.Send(new StunBindingRequest(this, stun_addr, rtc::Time64(),
+ requests_.Send(new StunBindingRequest(this, stun_addr, rtc::TimeMillis(),
stun_keepalive_lifetime_));
} else {
// Since we can't send stun messages to the server, we should mark this
diff --git a/chromium/third_party/webrtc/p2p/base/stunport.h b/chromium/third_party/webrtc/p2p/base/stunport.h
index a0eba51cd43..cd844aa273e 100644
--- a/chromium/third_party/webrtc/p2p/base/stunport.h
+++ b/chromium/third_party/webrtc/p2p/base/stunport.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_P2P_BASE_STUNPORT_H_
#define WEBRTC_P2P_BASE_STUNPORT_H_
+#include <memory>
#include <string>
#include "webrtc/p2p/base/port.h"
@@ -224,7 +225,7 @@ class UDPPort : public Port {
StunRequestManager requests_;
rtc::AsyncPacketSocket* socket_;
int error_;
- rtc::scoped_ptr<AddressResolver> resolver_;
+ std::unique_ptr<AddressResolver> resolver_;
bool ready_;
int stun_keepalive_delay_;
int stun_keepalive_lifetime_;
diff --git a/chromium/third_party/webrtc/p2p/base/stunport_unittest.cc b/chromium/third_party/webrtc/p2p/base/stunport_unittest.cc
index 1926b932799..702ec2504e8 100644
--- a/chromium/third_party/webrtc/p2p/base/stunport_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/stunport_unittest.cc
@@ -8,13 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/p2p/base/basicpacketsocketfactory.h"
#include "webrtc/p2p/base/stunport.h"
#include "webrtc/p2p/base/teststunserver.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/virtualsocketserver.h"
@@ -155,15 +156,15 @@ class StunPortTest : public testing::Test,
}
private:
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
rtc::SocketServerScope ss_scope_;
rtc::Network network_;
rtc::BasicPacketSocketFactory socket_factory_;
- rtc::scoped_ptr<cricket::UDPPort> stun_port_;
- rtc::scoped_ptr<cricket::TestStunServer> stun_server_1_;
- rtc::scoped_ptr<cricket::TestStunServer> stun_server_2_;
- rtc::scoped_ptr<rtc::AsyncPacketSocket> socket_;
+ std::unique_ptr<cricket::UDPPort> stun_port_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_1_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_2_;
+ std::unique_ptr<rtc::AsyncPacketSocket> socket_;
bool done_;
bool error_;
int stun_keepalive_delay_;
diff --git a/chromium/third_party/webrtc/p2p/base/stunrequest.cc b/chromium/third_party/webrtc/p2p/base/stunrequest.cc
index 546dd157d99..b75bcf67cc5 100644
--- a/chromium/third_party/webrtc/p2p/base/stunrequest.cc
+++ b/chromium/third_party/webrtc/p2p/base/stunrequest.cc
@@ -11,6 +11,8 @@
#include "webrtc/p2p/base/stunrequest.h"
#include <algorithm>
+#include <memory>
+
#include "webrtc/base/common.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
@@ -139,7 +141,7 @@ bool StunRequestManager::CheckResponse(const char* data, size_t size) {
// Parse the STUN message and continue processing as usual.
rtc::ByteBufferReader buf(data, size);
- rtc::scoped_ptr<StunMessage> response(iter->second->msg_->CreateNew());
+ std::unique_ptr<StunMessage> response(iter->second->msg_->CreateNew());
if (!response->Read(&buf)) {
LOG(LS_WARNING) << "Failed to read STUN response " << rtc::hex_encode(id);
return false;
@@ -192,7 +194,7 @@ const StunMessage* StunRequest::msg() const {
}
int StunRequest::Elapsed() const {
- return static_cast<int>(rtc::Time64() - tstamp_);
+ return static_cast<int>(rtc::TimeMillis() - tstamp_);
}
@@ -211,7 +213,7 @@ void StunRequest::OnMessage(rtc::Message* pmsg) {
return;
}
- tstamp_ = rtc::Time64();
+ tstamp_ = rtc::TimeMillis();
rtc::ByteBufferWriter buf;
msg_->Write(&buf);
diff --git a/chromium/third_party/webrtc/p2p/base/stunrequest_unittest.cc b/chromium/third_party/webrtc/p2p/base/stunrequest_unittest.cc
index 5e4d2560021..5845a0ecbbb 100644
--- a/chromium/third_party/webrtc/p2p/base/stunrequest_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/stunrequest_unittest.cc
@@ -146,13 +146,13 @@ TEST_F(StunRequestTest, TestUnexpected) {
TEST_F(StunRequestTest, TestBackoff) {
StunMessage* req = CreateStunMessage(STUN_BINDING_REQUEST, NULL);
- int64_t start = rtc::Time64();
+ int64_t start = rtc::TimeMillis();
manager_.Send(new StunRequestThunker(req, this));
StunMessage* res = CreateStunMessage(STUN_BINDING_RESPONSE, req);
for (int i = 0; i < 9; ++i) {
while (request_count_ == i)
rtc::Thread::Current()->ProcessMessages(1);
- int64_t elapsed = rtc::Time64() - start;
+ int64_t elapsed = rtc::TimeMillis() - start;
LOG(LS_INFO) << "STUN request #" << (i + 1)
<< " sent at " << elapsed << " ms";
EXPECT_GE(TotalDelay(i + 1), elapsed);
diff --git a/chromium/third_party/webrtc/p2p/base/stunserver.h b/chromium/third_party/webrtc/p2p/base/stunserver.h
index a7eeab15445..9d1c169a507 100644
--- a/chromium/third_party/webrtc/p2p/base/stunserver.h
+++ b/chromium/third_party/webrtc/p2p/base/stunserver.h
@@ -11,9 +11,10 @@
#ifndef WEBRTC_P2P_BASE_STUNSERVER_H_
#define WEBRTC_P2P_BASE_STUNSERVER_H_
+#include <memory>
+
#include "webrtc/p2p/base/stun.h"
#include "webrtc/base/asyncudpsocket.h"
-#include "webrtc/base/scoped_ptr.h"
namespace cricket {
@@ -58,7 +59,7 @@ class StunServer : public sigslot::has_slots<> {
StunMessage* response) const;
private:
- rtc::scoped_ptr<rtc::AsyncUDPSocket> socket_;
+ std::unique_ptr<rtc::AsyncUDPSocket> socket_;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/stunserver_unittest.cc b/chromium/third_party/webrtc/p2p/base/stunserver_unittest.cc
index 973ab2adfc7..e468447005a 100644
--- a/chromium/third_party/webrtc/p2p/base/stunserver_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/stunserver_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include "webrtc/p2p/base/stunserver.h"
@@ -62,11 +63,11 @@ class StunServerTest : public testing::Test {
return msg;
}
private:
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
rtc::Thread worker_;
- rtc::scoped_ptr<StunServer> server_;
- rtc::scoped_ptr<rtc::TestClient> client_;
+ std::unique_ptr<StunServer> server_;
+ std::unique_ptr<rtc::TestClient> client_;
};
// Disable for TSan v2, see
diff --git a/chromium/third_party/webrtc/p2p/base/tcpport.cc b/chromium/third_party/webrtc/p2p/base/tcpport.cc
index 59c4f317d4b..5ccb8108a05 100644
--- a/chromium/third_party/webrtc/p2p/base/tcpport.cc
+++ b/chromium/third_party/webrtc/p2p/base/tcpport.cc
@@ -387,28 +387,35 @@ void TCPConnection::OnConnect(rtc::AsyncPacketSocket* socket) {
// the one we asked for. This is seen in Chrome, where TCP sockets cannot be
// given a binding address, and the platform is expected to pick the
// correct local address.
- const rtc::IPAddress& socket_ip = socket->GetLocalAddress().ipaddr();
- if (socket_ip == port()->ip() || IPIsAny(port()->ip())) {
- if (socket_ip == port()->ip()) {
- LOG_J(LS_VERBOSE, this) << "Connection established to "
- << socket->GetRemoteAddress().ToSensitiveString();
- } else {
- LOG(LS_WARNING) << "Socket is bound to a different address:"
- << socket->GetLocalAddress().ipaddr().ToString()
- << ", rather then the local port:"
- << port()->ip().ToString()
- << ". Still allowing it since it's any address"
- << ", possibly caused by multi-routes being disabled.";
- }
- set_connected(true);
- connection_pending_ = false;
+ const rtc::SocketAddress& socket_addr = socket->GetLocalAddress();
+ if (socket_addr.ipaddr() == port()->ip()) {
+ LOG_J(LS_VERBOSE, this) << "Connection established to "
+ << socket->GetRemoteAddress().ToSensitiveString();
+ } else if (IPIsAny(port()->ip())) {
+ LOG(LS_WARNING) << "Socket is bound to a different address:"
+ << socket_addr.ipaddr().ToString()
+ << ", rather then the local port:"
+ << port()->ip().ToString()
+ << ". Still allowing it since it's any address"
+ << ", possibly caused by multi-routes being disabled.";
+ } else if (socket_addr.IsLoopbackIP()) {
+ LOG(LS_WARNING) << "Socket is bound to a different address:"
+ << socket_addr.ipaddr().ToString()
+ << ", rather then the local port:"
+ << port()->ip().ToString()
+ << ". Still allowing it since it's localhost.";
} else {
LOG_J(LS_WARNING, this) << "Dropping connection as TCP socket bound to IP "
- << socket_ip.ToSensitiveString()
+ << socket_addr.ipaddr().ToSensitiveString()
<< ", different from the local candidate IP "
<< port()->ip().ToSensitiveString();
OnClose(socket, 0);
+ return;
}
+
+ // Connection is established successfully.
+ set_connected(true);
+ connection_pending_ = false;
}
void TCPConnection::OnClose(rtc::AsyncPacketSocket* socket, int error) {
diff --git a/chromium/third_party/webrtc/p2p/base/tcpport.h b/chromium/third_party/webrtc/p2p/base/tcpport.h
index cfc6245601e..77bbd09eab0 100644
--- a/chromium/third_party/webrtc/p2p/base/tcpport.h
+++ b/chromium/third_party/webrtc/p2p/base/tcpport.h
@@ -12,7 +12,9 @@
#define WEBRTC_P2P_BASE_TCPPORT_H_
#include <list>
+#include <memory>
#include <string>
+
#include "webrtc/p2p/base/port.h"
#include "webrtc/base/asyncpacketsocket.h"
@@ -164,7 +166,7 @@ class TCPConnection : public Connection {
const rtc::PacketTime& packet_time);
void OnReadyToSend(rtc::AsyncPacketSocket* socket);
- rtc::scoped_ptr<rtc::AsyncPacketSocket> socket_;
+ std::unique_ptr<rtc::AsyncPacketSocket> socket_;
int error_;
bool outgoing_;
diff --git a/chromium/third_party/webrtc/p2p/base/tcpport_unittest.cc b/chromium/third_party/webrtc/p2p/base/tcpport_unittest.cc
new file mode 100644
index 00000000000..e33dd433a7d
--- /dev/null
+++ b/chromium/third_party/webrtc/p2p/base/tcpport_unittest.cc
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/physicalsocketserver.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/virtualsocketserver.h"
+#include "webrtc/p2p/base/basicpacketsocketfactory.h"
+#include "webrtc/p2p/base/tcpport.h"
+
+using rtc::SocketAddress;
+using cricket::Connection;
+using cricket::Port;
+using cricket::TCPPort;
+using cricket::ICE_UFRAG_LENGTH;
+using cricket::ICE_PWD_LENGTH;
+
+static int kTimeout = 1000;
+static const SocketAddress kLocalAddr("11.11.11.11", 1);
+static const SocketAddress kRemoteAddr("22.22.22.22", 2);
+
+class TCPPortTest : public testing::Test, public sigslot::has_slots<> {
+ public:
+ TCPPortTest()
+ : main_(rtc::Thread::Current()),
+ pss_(new rtc::PhysicalSocketServer),
+ ss_(new rtc::VirtualSocketServer(pss_.get())),
+ ss_scope_(ss_.get()),
+ network_("unittest", "unittest", rtc::IPAddress(INADDR_ANY), 32),
+ socket_factory_(rtc::Thread::Current()),
+ username_(rtc::CreateRandomString(ICE_UFRAG_LENGTH)),
+ password_(rtc::CreateRandomString(ICE_PWD_LENGTH)) {
+ network_.AddIP(rtc::IPAddress(INADDR_ANY));
+ }
+
+ void ConnectSignalSocketCreated() {
+ ss_->SignalSocketCreated.connect(this, &TCPPortTest::OnSocketCreated);
+ }
+
+ void OnSocketCreated(rtc::VirtualSocket* socket) {
+ LOG(LS_INFO) << "socket created ";
+ socket->SignalAddressReady.connect(
+ this, &TCPPortTest::SetLocalhostAsAlternativeLocalAddress);
+ }
+
+ void SetLocalhostAsAlternativeLocalAddress(rtc::VirtualSocket* socket,
+ const SocketAddress& address) {
+ SocketAddress local_address("127.0.0.1", 2000);
+ socket->SetAlternativeLocalAddress(local_address);
+ }
+
+ TCPPort* CreateTCPPort(const SocketAddress& addr) {
+ return TCPPort::Create(main_, &socket_factory_, &network_, addr.ipaddr(), 0,
+ 0, username_, password_, true);
+ }
+
+ protected:
+ rtc::Thread* main_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
+ rtc::SocketServerScope ss_scope_;
+ rtc::Network network_;
+ rtc::BasicPacketSocketFactory socket_factory_;
+ std::string username_;
+ std::string password_;
+};
+
+TEST_F(TCPPortTest, TestTCPPortWithLocalhostAddress) {
+ std::unique_ptr<TCPPort> lport(CreateTCPPort(kLocalAddr));
+ std::unique_ptr<TCPPort> rport(CreateTCPPort(kRemoteAddr));
+ lport->PrepareAddress();
+ rport->PrepareAddress();
+ // Start to listen to new socket creation event.
+ ConnectSignalSocketCreated();
+ Connection* conn =
+ lport->CreateConnection(rport->Candidates()[0], Port::ORIGIN_MESSAGE);
+ EXPECT_TRUE_WAIT(conn->connected(), kTimeout);
+}
diff --git a/chromium/third_party/webrtc/p2p/base/testrelayserver.h b/chromium/third_party/webrtc/p2p/base/testrelayserver.h
index 87cb9e5dc32..7bc0beead23 100644
--- a/chromium/third_party/webrtc/p2p/base/testrelayserver.h
+++ b/chromium/third_party/webrtc/p2p/base/testrelayserver.h
@@ -11,9 +11,10 @@
#ifndef WEBRTC_P2P_BASE_TESTRELAYSERVER_H_
#define WEBRTC_P2P_BASE_TESTRELAYSERVER_H_
+#include <memory>
+
#include "webrtc/p2p/base/relayserver.h"
#include "webrtc/base/asynctcpsocket.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslot.h"
#include "webrtc/base/socketadapters.h"
#include "webrtc/base/thread.h"
@@ -90,10 +91,10 @@ class TestRelayServer : public sigslot::has_slots<> {
}
private:
cricket::RelayServer server_;
- rtc::scoped_ptr<rtc::AsyncSocket> tcp_int_socket_;
- rtc::scoped_ptr<rtc::AsyncSocket> tcp_ext_socket_;
- rtc::scoped_ptr<rtc::AsyncSocket> ssl_int_socket_;
- rtc::scoped_ptr<rtc::AsyncSocket> ssl_ext_socket_;
+ std::unique_ptr<rtc::AsyncSocket> tcp_int_socket_;
+ std::unique_ptr<rtc::AsyncSocket> tcp_ext_socket_;
+ std::unique_ptr<rtc::AsyncSocket> ssl_int_socket_;
+ std::unique_ptr<rtc::AsyncSocket> ssl_ext_socket_;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/transport.cc b/chromium/third_party/webrtc/p2p/base/transport.cc
index a1988820ed3..9688e4aedcd 100644
--- a/chromium/third_party/webrtc/p2p/base/transport.cc
+++ b/chromium/third_party/webrtc/p2p/base/transport.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <utility> // for std::pair
#include "webrtc/p2p/base/transport.h"
@@ -77,7 +78,7 @@ void Transport::SetIceRole(IceRole role) {
}
}
-rtc::scoped_ptr<rtc::SSLCertificate> Transport::GetRemoteSSLCertificate() {
+std::unique_ptr<rtc::SSLCertificate> Transport::GetRemoteSSLCertificate() {
if (channels_.empty()) {
return nullptr;
}
@@ -227,21 +228,7 @@ void Transport::ConnectChannels() {
connect_requested_ = true;
- if (!local_description_) {
- // TOOD(mallinath) : TransportDescription(TD) shouldn't be generated here.
- // As Transport must know TD is offer or answer and cricket::Transport
- // doesn't have the capability to decide it. This should be set by the
- // Session.
- // Session must generate local TD before remote candidates pushed when
- // initiate request initiated by the remote.
- LOG(LS_INFO) << "Transport::ConnectChannels: No local description has "
- << "been set. Will generate one.";
- TransportDescription desc(std::vector<std::string>(),
- rtc::CreateRandomString(ICE_UFRAG_LENGTH),
- rtc::CreateRandomString(ICE_PWD_LENGTH),
- ICEMODE_FULL, CONNECTIONROLE_NONE, nullptr);
- SetLocalTransportDescription(desc, CA_OFFER, nullptr);
- }
+ RTC_DCHECK(local_description_);
CallChannels(&TransportChannelImpl::Connect);
}
@@ -414,4 +401,107 @@ bool Transport::NegotiateTransportDescription(ContentAction local_role,
return true;
}
+bool Transport::VerifyCertificateFingerprint(
+ const rtc::RTCCertificate* certificate,
+ const rtc::SSLFingerprint* fingerprint,
+ std::string* error_desc) const {
+ if (!fingerprint) {
+ return BadTransportDescription("No fingerprint.", error_desc);
+ }
+ if (!certificate) {
+ return BadTransportDescription(
+ "Fingerprint provided but no identity available.", error_desc);
+ }
+ std::unique_ptr<rtc::SSLFingerprint> fp_tmp(rtc::SSLFingerprint::Create(
+ fingerprint->algorithm, certificate->identity()));
+ ASSERT(fp_tmp.get() != NULL);
+ if (*fp_tmp == *fingerprint) {
+ return true;
+ }
+ std::ostringstream desc;
+ desc << "Local fingerprint does not match identity. Expected: ";
+ desc << fp_tmp->ToString();
+ desc << " Got: " << fingerprint->ToString();
+ return BadTransportDescription(desc.str(), error_desc);
+}
+
+bool Transport::NegotiateRole(ContentAction local_role,
+ rtc::SSLRole* ssl_role,
+ std::string* error_desc) const {
+ RTC_DCHECK(ssl_role);
+ if (!local_description() || !remote_description()) {
+ const std::string msg =
+ "Local and Remote description must be set before "
+ "transport descriptions are negotiated";
+ return BadTransportDescription(msg, error_desc);
+ }
+
+ // From RFC 4145, section-4.1, The following are the values that the
+ // 'setup' attribute can take in an offer/answer exchange:
+ // Offer Answer
+ // ________________
+ // active passive / holdconn
+ // passive active / holdconn
+ // actpass active / passive / holdconn
+ // holdconn holdconn
+ //
+ // Set the role that is most conformant with RFC 5763, Section 5, bullet 1
+ // The endpoint MUST use the setup attribute defined in [RFC4145].
+ // The endpoint that is the offerer MUST use the setup attribute
+ // value of setup:actpass and be prepared to receive a client_hello
+ // before it receives the answer. The answerer MUST use either a
+ // setup attribute value of setup:active or setup:passive. Note that
+ // if the answerer uses setup:passive, then the DTLS handshake will
+ // not begin until the answerer is received, which adds additional
+ // latency. setup:active allows the answer and the DTLS handshake to
+ // occur in parallel. Thus, setup:active is RECOMMENDED. Whichever
+ // party is active MUST initiate a DTLS handshake by sending a
+ // ClientHello over each flow (host/port quartet).
+ // IOW - actpass and passive modes should be treated as server and
+ // active as client.
+ ConnectionRole local_connection_role = local_description()->connection_role;
+ ConnectionRole remote_connection_role = remote_description()->connection_role;
+
+ bool is_remote_server = false;
+ if (local_role == CA_OFFER) {
+ if (local_connection_role != CONNECTIONROLE_ACTPASS) {
+ return BadTransportDescription(
+ "Offerer must use actpass value for setup attribute.", error_desc);
+ }
+
+ if (remote_connection_role == CONNECTIONROLE_ACTIVE ||
+ remote_connection_role == CONNECTIONROLE_PASSIVE ||
+ remote_connection_role == CONNECTIONROLE_NONE) {
+ is_remote_server = (remote_connection_role == CONNECTIONROLE_PASSIVE);
+ } else {
+ const std::string msg =
+ "Answerer must use either active or passive value "
+ "for setup attribute.";
+ return BadTransportDescription(msg, error_desc);
+ }
+ // If remote is NONE or ACTIVE it will act as client.
+ } else {
+ if (remote_connection_role != CONNECTIONROLE_ACTPASS &&
+ remote_connection_role != CONNECTIONROLE_NONE) {
+ return BadTransportDescription(
+ "Offerer must use actpass value for setup attribute.", error_desc);
+ }
+
+ if (local_connection_role == CONNECTIONROLE_ACTIVE ||
+ local_connection_role == CONNECTIONROLE_PASSIVE) {
+ is_remote_server = (local_connection_role == CONNECTIONROLE_ACTIVE);
+ } else {
+ const std::string msg =
+ "Answerer must use either active or passive value "
+ "for setup attribute.";
+ return BadTransportDescription(msg, error_desc);
+ }
+
+ // If local is passive, local will act as server.
+ }
+
+ *ssl_role = is_remote_server ? rtc::SSL_CLIENT : rtc::SSL_SERVER;
+ return true;
+}
+
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/transport.h b/chromium/third_party/webrtc/p2p/base/transport.h
index 8b30127b7f9..e31d37a6f6a 100644
--- a/chromium/third_party/webrtc/p2p/base/transport.h
+++ b/chromium/third_party/webrtc/p2p/base/transport.h
@@ -26,8 +26,11 @@
#define WEBRTC_P2P_BASE_TRANSPORT_H_
#include <map>
+#include <memory>
#include <string>
#include <vector>
+
+#include "webrtc/base/constructormagic.h"
#include "webrtc/p2p/base/candidate.h"
#include "webrtc/p2p/base/p2pconstants.h"
#include "webrtc/p2p/base/sessiondescription.h"
@@ -214,7 +217,7 @@ class Transport : public sigslot::has_slots<> {
}
// Get a copy of the remote certificate in use by the specified channel.
- rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate();
+ std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate();
// Create, destroy, and lookup the channels of this type by their components.
TransportChannelImpl* CreateChannel(int component);
@@ -312,6 +315,20 @@ class Transport : public sigslot::has_slots<> {
TransportChannelImpl* channel,
std::string* error_desc);
+ // Returns false if the certificate's identity does not match the fingerprint,
+ // or either is NULL.
+ virtual bool VerifyCertificateFingerprint(
+ const rtc::RTCCertificate* certificate,
+ const rtc::SSLFingerprint* fingerprint,
+ std::string* error_desc) const;
+
+ // Negotiates the SSL role based off the offer and answer as specified by
+ // RFC 4145, section-4.1. Returns false if the SSL role cannot be determined
+ // from the local description and remote description.
+ virtual bool NegotiateRole(ContentAction local_role,
+ rtc::SSLRole* ssl_role,
+ std::string* error_desc) const;
+
private:
// If a candidate is not acceptable, returns false and sets error.
// Call this before calling OnRemoteCandidates.
@@ -333,8 +350,8 @@ class Transport : public sigslot::has_slots<> {
uint64_t tiebreaker_ = 0;
IceMode remote_ice_mode_ = ICEMODE_FULL;
IceConfig ice_config_;
- rtc::scoped_ptr<TransportDescription> local_description_;
- rtc::scoped_ptr<TransportDescription> remote_description_;
+ std::unique_ptr<TransportDescription> local_description_;
+ std::unique_ptr<TransportDescription> remote_description_;
bool local_description_set_ = false;
bool remote_description_set_ = false;
diff --git a/chromium/third_party/webrtc/p2p/base/transport_unittest.cc b/chromium/third_party/webrtc/p2p/base/transport_unittest.cc
index 96ebc5eb506..cde60ce964a 100644
--- a/chromium/third_party/webrtc/p2p/base/transport_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/transport_unittest.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/fakesslidentity.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/network.h"
@@ -50,7 +52,7 @@ class TransportTest : public testing::Test,
}
protected:
- rtc::scoped_ptr<FakeTransport> transport_;
+ std::unique_ptr<FakeTransport> transport_;
FakeTransportChannel* channel_;
};
@@ -219,9 +221,194 @@ TEST_F(TransportTest, TestGetStats) {
// Note that this tests the behavior of a FakeTransportChannel.
ASSERT_EQ(1U, stats.channel_stats.size());
EXPECT_EQ(1, stats.channel_stats[0].component);
+ // Set local transport description for FakeTransport before connecting.
+ TransportDescription faketransport_desc(
+ std::vector<std::string>(),
+ rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH),
+ rtc::CreateRandomString(cricket::ICE_PWD_LENGTH), cricket::ICEMODE_FULL,
+ cricket::CONNECTIONROLE_NONE, nullptr);
+ transport_->SetLocalTransportDescription(faketransport_desc,
+ cricket::CA_OFFER, nullptr);
transport_->ConnectChannels();
EXPECT_TRUE(transport_->GetStats(&stats));
ASSERT_EQ(1U, stats.channel_stats.size());
EXPECT_EQ(1, stats.channel_stats[0].component);
}
+// Tests that VerifyCertificateFingerprint only returns true when the
+// certificate matches the fingerprint.
+TEST_F(TransportTest, TestVerifyCertificateFingerprint) {
+ std::string error_desc;
+ EXPECT_FALSE(
+ transport_->VerifyCertificateFingerprint(nullptr, nullptr, &error_desc));
+ rtc::KeyType key_types[] = {rtc::KT_RSA, rtc::KT_ECDSA};
+
+ for (auto& key_type : key_types) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate("testing", key_type)));
+ ASSERT_NE(nullptr, certificate);
+
+ std::string digest_algorithm;
+ ASSERT_TRUE(certificate->ssl_certificate().GetSignatureDigestAlgorithm(
+ &digest_algorithm));
+ ASSERT_FALSE(digest_algorithm.empty());
+ std::unique_ptr<rtc::SSLFingerprint> good_fingerprint(
+ rtc::SSLFingerprint::Create(digest_algorithm, certificate->identity()));
+ ASSERT_NE(nullptr, good_fingerprint);
+
+ EXPECT_TRUE(transport_->VerifyCertificateFingerprint(
+ certificate.get(), good_fingerprint.get(), &error_desc));
+ EXPECT_FALSE(transport_->VerifyCertificateFingerprint(
+ certificate.get(), nullptr, &error_desc));
+ EXPECT_FALSE(transport_->VerifyCertificateFingerprint(
+ nullptr, good_fingerprint.get(), &error_desc));
+
+ rtc::SSLFingerprint bad_fingerprint = *good_fingerprint;
+ bad_fingerprint.digest.AppendData("0", 1);
+ EXPECT_FALSE(transport_->VerifyCertificateFingerprint(
+ certificate.get(), &bad_fingerprint, &error_desc));
+ }
+}
+
+// Tests that NegotiateRole sets the SSL role correctly.
+TEST_F(TransportTest, TestNegotiateRole) {
+ TransportDescription local_desc(kIceUfrag1, kIcePwd1);
+ TransportDescription remote_desc(kIceUfrag2, kIcePwd2);
+
+ struct NegotiateRoleParams {
+ cricket::ConnectionRole local_role;
+ cricket::ConnectionRole remote_role;
+ cricket::ContentAction local_action;
+ cricket::ContentAction remote_action;
+ };
+
+ rtc::SSLRole ssl_role;
+ std::string error_desc;
+
+ // Parameters which set the SSL role to SSL_CLIENT.
+ NegotiateRoleParams valid_client_params[] = {
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_ANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_PRANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_OFFER, cricket::CA_ANSWER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_OFFER, cricket::CA_PRANSWER}};
+
+ for (auto& param : valid_client_params) {
+ local_desc.connection_role = param.local_role;
+ remote_desc.connection_role = param.remote_role;
+
+ ASSERT_TRUE(transport_->SetRemoteTransportDescription(
+ remote_desc, param.remote_action, nullptr));
+ ASSERT_TRUE(transport_->SetLocalTransportDescription(
+ local_desc, param.local_action, nullptr));
+ EXPECT_TRUE(
+ transport_->NegotiateRole(param.local_action, &ssl_role, &error_desc));
+ EXPECT_EQ(rtc::SSL_CLIENT, ssl_role);
+ }
+
+ // Parameters which set the SSL role to SSL_SERVER.
+ NegotiateRoleParams valid_server_params[] = {
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_ANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_PRANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_OFFER, cricket::CA_ANSWER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_OFFER, cricket::CA_PRANSWER}};
+
+ for (auto& param : valid_server_params) {
+ local_desc.connection_role = param.local_role;
+ remote_desc.connection_role = param.remote_role;
+
+ ASSERT_TRUE(transport_->SetRemoteTransportDescription(
+ remote_desc, param.remote_action, nullptr));
+ ASSERT_TRUE(transport_->SetLocalTransportDescription(
+ local_desc, param.local_action, nullptr));
+ EXPECT_TRUE(
+ transport_->NegotiateRole(param.local_action, &ssl_role, &error_desc));
+ EXPECT_EQ(rtc::SSL_SERVER, ssl_role);
+ }
+
+ // Invalid parameters due to both peers having a duplicate role.
+ NegotiateRoleParams duplicate_params[] = {
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_ANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_ANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_ANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_PRANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_PRANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_PRANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_OFFER, cricket::CA_ANSWER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_OFFER, cricket::CA_ANSWER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_OFFER, cricket::CA_ANSWER},
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_OFFER, cricket::CA_PRANSWER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_OFFER, cricket::CA_PRANSWER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_OFFER, cricket::CA_PRANSWER}};
+
+ for (auto& param : duplicate_params) {
+ local_desc.connection_role = param.local_role;
+ remote_desc.connection_role = param.remote_role;
+
+ ASSERT_TRUE(transport_->SetRemoteTransportDescription(
+ remote_desc, param.remote_action, nullptr));
+ ASSERT_TRUE(transport_->SetLocalTransportDescription(
+ local_desc, param.local_action, nullptr));
+ EXPECT_FALSE(
+ transport_->NegotiateRole(param.local_action, &ssl_role, &error_desc));
+ }
+
+ // Invalid parameters due to the offerer not using ACTPASS.
+ NegotiateRoleParams offerer_without_actpass_params[] = {
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_ANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_ANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_ANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_PRANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_PRANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTPASS, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_PRANSWER, cricket::CA_OFFER},
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_OFFER, cricket::CA_ANSWER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_OFFER, cricket::CA_ANSWER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_OFFER, cricket::CA_ANSWER},
+ {cricket::CONNECTIONROLE_ACTIVE, cricket::CONNECTIONROLE_PASSIVE,
+ cricket::CA_OFFER, cricket::CA_PRANSWER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_ACTIVE,
+ cricket::CA_OFFER, cricket::CA_PRANSWER},
+ {cricket::CONNECTIONROLE_PASSIVE, cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CA_OFFER, cricket::CA_PRANSWER}};
+
+ for (auto& param : offerer_without_actpass_params) {
+ local_desc.connection_role = param.local_role;
+ remote_desc.connection_role = param.remote_role;
+
+ ASSERT_TRUE(transport_->SetRemoteTransportDescription(
+ remote_desc, param.remote_action, nullptr));
+ ASSERT_TRUE(transport_->SetLocalTransportDescription(
+ local_desc, param.local_action, nullptr));
+ EXPECT_FALSE(
+ transport_->NegotiateRole(param.local_action, &ssl_role, &error_desc));
+ }
+}
diff --git a/chromium/third_party/webrtc/p2p/base/transportchannel.h b/chromium/third_party/webrtc/p2p/base/transportchannel.h
index 24f90e3cf6f..87ed9fdea31 100644
--- a/chromium/third_party/webrtc/p2p/base/transportchannel.h
+++ b/chromium/third_party/webrtc/p2p/base/transportchannel.h
@@ -11,9 +11,11 @@
#ifndef WEBRTC_P2P_BASE_TRANSPORTCHANNEL_H_
#define WEBRTC_P2P_BASE_TRANSPORTCHANNEL_H_
+#include <memory>
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/p2p/base/candidate.h"
#include "webrtc/p2p/base/candidatepairinterface.h"
#include "webrtc/p2p/base/transport.h"
@@ -130,7 +132,7 @@ class TransportChannel : public sigslot::has_slots<> {
GetLocalCertificate() const = 0;
// Gets a copy of the remote side's SSL certificate.
- virtual rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate()
+ virtual std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate()
const = 0;
// Allows key material to be extracted for external encryption.
diff --git a/chromium/third_party/webrtc/p2p/base/transportchannelimpl.h b/chromium/third_party/webrtc/p2p/base/transportchannelimpl.h
index 904ebf563fd..f548aa012f8 100644
--- a/chromium/third_party/webrtc/p2p/base/transportchannelimpl.h
+++ b/chromium/third_party/webrtc/p2p/base/transportchannelimpl.h
@@ -12,6 +12,8 @@
#define WEBRTC_P2P_BASE_TRANSPORTCHANNELIMPL_H_
#include <string>
+
+#include "webrtc/base/constructormagic.h"
#include "webrtc/p2p/base/transportchannel.h"
namespace buzz { class XmlElement; }
diff --git a/chromium/third_party/webrtc/p2p/base/transportcontroller.cc b/chromium/third_party/webrtc/p2p/base/transportcontroller.cc
index 708c60f9df5..a961541dc3e 100644
--- a/chromium/third_party/webrtc/p2p/base/transportcontroller.cc
+++ b/chromium/third_party/webrtc/p2p/base/transportcontroller.cc
@@ -11,6 +11,7 @@
#include "webrtc/p2p/base/transportcontroller.h"
#include <algorithm>
+#include <memory>
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
@@ -19,6 +20,10 @@
#include "webrtc/p2p/base/p2ptransport.h"
#include "webrtc/p2p/base/port.h"
+#ifdef HAVE_QUIC
+#include "webrtc/p2p/quic/quictransport.h"
+#endif // HAVE_QUIC
+
namespace cricket {
enum {
@@ -38,59 +43,60 @@ struct CandidatesData : public rtc::MessageData {
};
TransportController::TransportController(rtc::Thread* signaling_thread,
- rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
PortAllocator* port_allocator)
: signaling_thread_(signaling_thread),
- worker_thread_(worker_thread),
+ network_thread_(network_thread),
port_allocator_(port_allocator) {}
TransportController::~TransportController() {
- worker_thread_->Invoke<void>(
- rtc::Bind(&TransportController::DestroyAllTransports_w, this));
+ network_thread_->Invoke<void>(
+ rtc::Bind(&TransportController::DestroyAllTransports_n, this));
signaling_thread_->Clear(this);
}
bool TransportController::SetSslMaxProtocolVersion(
rtc::SSLProtocolVersion version) {
- return worker_thread_->Invoke<bool>(rtc::Bind(
- &TransportController::SetSslMaxProtocolVersion_w, this, version));
+ return network_thread_->Invoke<bool>(rtc::Bind(
+ &TransportController::SetSslMaxProtocolVersion_n, this, version));
}
void TransportController::SetIceConfig(const IceConfig& config) {
- worker_thread_->Invoke<void>(
- rtc::Bind(&TransportController::SetIceConfig_w, this, config));
+ network_thread_->Invoke<void>(
+ rtc::Bind(&TransportController::SetIceConfig_n, this, config));
}
void TransportController::SetIceRole(IceRole ice_role) {
- worker_thread_->Invoke<void>(
- rtc::Bind(&TransportController::SetIceRole_w, this, ice_role));
+ network_thread_->Invoke<void>(
+ rtc::Bind(&TransportController::SetIceRole_n, this, ice_role));
}
bool TransportController::GetSslRole(const std::string& transport_name,
rtc::SSLRole* role) {
- return worker_thread_->Invoke<bool>(rtc::Bind(
- &TransportController::GetSslRole_w, this, transport_name, role));
+ return network_thread_->Invoke<bool>(rtc::Bind(
+ &TransportController::GetSslRole_n, this, transport_name, role));
}
bool TransportController::SetLocalCertificate(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
- return worker_thread_->Invoke<bool>(rtc::Bind(
- &TransportController::SetLocalCertificate_w, this, certificate));
+ return network_thread_->Invoke<bool>(rtc::Bind(
+ &TransportController::SetLocalCertificate_n, this, certificate));
}
bool TransportController::GetLocalCertificate(
const std::string& transport_name,
rtc::scoped_refptr<rtc::RTCCertificate>* certificate) {
- return worker_thread_->Invoke<bool>(
- rtc::Bind(&TransportController::GetLocalCertificate_w, this,
+ return network_thread_->Invoke<bool>(
+ rtc::Bind(&TransportController::GetLocalCertificate_n, this,
transport_name, certificate));
}
-rtc::scoped_ptr<rtc::SSLCertificate>
+std::unique_ptr<rtc::SSLCertificate>
TransportController::GetRemoteSSLCertificate(
const std::string& transport_name) {
- return worker_thread_->Invoke<rtc::scoped_ptr<rtc::SSLCertificate>>(rtc::Bind(
- &TransportController::GetRemoteSSLCertificate_w, this, transport_name));
+ return network_thread_->Invoke<std::unique_ptr<rtc::SSLCertificate>>(
+ rtc::Bind(&TransportController::GetRemoteSSLCertificate_n, this,
+ transport_name));
}
bool TransportController::SetLocalTransportDescription(
@@ -98,8 +104,8 @@ bool TransportController::SetLocalTransportDescription(
const TransportDescription& tdesc,
ContentAction action,
std::string* err) {
- return worker_thread_->Invoke<bool>(
- rtc::Bind(&TransportController::SetLocalTransportDescription_w, this,
+ return network_thread_->Invoke<bool>(
+ rtc::Bind(&TransportController::SetLocalTransportDescription_n, this,
transport_name, tdesc, action, err));
}
@@ -108,48 +114,48 @@ bool TransportController::SetRemoteTransportDescription(
const TransportDescription& tdesc,
ContentAction action,
std::string* err) {
- return worker_thread_->Invoke<bool>(
- rtc::Bind(&TransportController::SetRemoteTransportDescription_w, this,
+ return network_thread_->Invoke<bool>(
+ rtc::Bind(&TransportController::SetRemoteTransportDescription_n, this,
transport_name, tdesc, action, err));
}
void TransportController::MaybeStartGathering() {
- worker_thread_->Invoke<void>(
- rtc::Bind(&TransportController::MaybeStartGathering_w, this));
+ network_thread_->Invoke<void>(
+ rtc::Bind(&TransportController::MaybeStartGathering_n, this));
}
bool TransportController::AddRemoteCandidates(const std::string& transport_name,
const Candidates& candidates,
std::string* err) {
- return worker_thread_->Invoke<bool>(
- rtc::Bind(&TransportController::AddRemoteCandidates_w, this,
+ return network_thread_->Invoke<bool>(
+ rtc::Bind(&TransportController::AddRemoteCandidates_n, this,
transport_name, candidates, err));
}
bool TransportController::RemoveRemoteCandidates(const Candidates& candidates,
std::string* err) {
- return worker_thread_->Invoke<bool>(rtc::Bind(
- &TransportController::RemoveRemoteCandidates_w, this, candidates, err));
+ return network_thread_->Invoke<bool>(rtc::Bind(
+ &TransportController::RemoveRemoteCandidates_n, this, candidates, err));
}
bool TransportController::ReadyForRemoteCandidates(
const std::string& transport_name) {
- return worker_thread_->Invoke<bool>(rtc::Bind(
- &TransportController::ReadyForRemoteCandidates_w, this, transport_name));
+ return network_thread_->Invoke<bool>(rtc::Bind(
+ &TransportController::ReadyForRemoteCandidates_n, this, transport_name));
}
bool TransportController::GetStats(const std::string& transport_name,
TransportStats* stats) {
- return worker_thread_->Invoke<bool>(
- rtc::Bind(&TransportController::GetStats_w, this, transport_name, stats));
+ return network_thread_->Invoke<bool>(
+ rtc::Bind(&TransportController::GetStats_n, this, transport_name, stats));
}
-TransportChannel* TransportController::CreateTransportChannel_w(
+TransportChannel* TransportController::CreateTransportChannel_n(
const std::string& transport_name,
int component) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- auto it = FindChannel_w(transport_name, component);
+ auto it = FindChannel_n(transport_name, component);
if (it != channels_.end()) {
// Channel already exists; increment reference count and return.
it->AddRef();
@@ -157,34 +163,34 @@ TransportChannel* TransportController::CreateTransportChannel_w(
}
// Need to create a new channel.
- Transport* transport = GetOrCreateTransport_w(transport_name);
+ Transport* transport = GetOrCreateTransport_n(transport_name);
TransportChannelImpl* channel = transport->CreateChannel(component);
channel->SignalWritableState.connect(
- this, &TransportController::OnChannelWritableState_w);
+ this, &TransportController::OnChannelWritableState_n);
channel->SignalReceivingState.connect(
- this, &TransportController::OnChannelReceivingState_w);
+ this, &TransportController::OnChannelReceivingState_n);
channel->SignalGatheringState.connect(
- this, &TransportController::OnChannelGatheringState_w);
+ this, &TransportController::OnChannelGatheringState_n);
channel->SignalCandidateGathered.connect(
- this, &TransportController::OnChannelCandidateGathered_w);
+ this, &TransportController::OnChannelCandidateGathered_n);
channel->SignalCandidatesRemoved.connect(
- this, &TransportController::OnChannelCandidatesRemoved_w);
+ this, &TransportController::OnChannelCandidatesRemoved_n);
channel->SignalRoleConflict.connect(
- this, &TransportController::OnChannelRoleConflict_w);
+ this, &TransportController::OnChannelRoleConflict_n);
channel->SignalConnectionRemoved.connect(
- this, &TransportController::OnChannelConnectionRemoved_w);
+ this, &TransportController::OnChannelConnectionRemoved_n);
channels_.insert(channels_.end(), RefCountedChannel(channel))->AddRef();
// Adding a channel could cause aggregate state to change.
- UpdateAggregateStates_w();
+ UpdateAggregateStates_n();
return channel;
}
-void TransportController::DestroyTransportChannel_w(
+void TransportController::DestroyTransportChannel_n(
const std::string& transport_name,
int component) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- auto it = FindChannel_w(transport_name, component);
+ auto it = FindChannel_n(transport_name, component);
if (it == channels_.end()) {
LOG(LS_WARNING) << "Attempting to delete " << transport_name
<< " TransportChannel " << component
@@ -198,15 +204,15 @@ void TransportController::DestroyTransportChannel_w(
}
channels_.erase(it);
- Transport* transport = GetTransport_w(transport_name);
+ Transport* transport = GetTransport_n(transport_name);
transport->DestroyChannel(component);
// Just as we create a Transport when its first channel is created,
// we delete it when its last channel is deleted.
if (!transport->HasChannels()) {
- DestroyTransport_w(transport_name);
+ DestroyTransport_n(transport_name);
}
// Removing a channel could cause aggregate state to change.
- UpdateAggregateStates_w();
+ UpdateAggregateStates_n();
}
const rtc::scoped_refptr<rtc::RTCCertificate>&
@@ -214,18 +220,23 @@ TransportController::certificate_for_testing() {
return certificate_;
}
-Transport* TransportController::CreateTransport_w(
+Transport* TransportController::CreateTransport_n(
const std::string& transport_name) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
+#ifdef HAVE_QUIC
+ if (quic_) {
+ return new QuicTransport(transport_name, port_allocator(), certificate_);
+ }
+#endif // HAVE_QUIC
Transport* transport = new DtlsTransport<P2PTransport>(
transport_name, port_allocator(), certificate_);
return transport;
}
-Transport* TransportController::GetTransport_w(
+Transport* TransportController::GetTransport_n(
const std::string& transport_name) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
auto iter = transports_.find(transport_name);
return (iter != transports_.end()) ? iter->second : nullptr;
@@ -268,7 +279,7 @@ void TransportController::OnMessage(rtc::Message* pmsg) {
}
std::vector<TransportController::RefCountedChannel>::iterator
-TransportController::FindChannel_w(const std::string& transport_name,
+TransportController::FindChannel_n(const std::string& transport_name,
int component) {
return std::find_if(
channels_.begin(), channels_.end(),
@@ -278,16 +289,16 @@ TransportController::FindChannel_w(const std::string& transport_name,
});
}
-Transport* TransportController::GetOrCreateTransport_w(
+Transport* TransportController::GetOrCreateTransport_n(
const std::string& transport_name) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* transport = GetTransport_w(transport_name);
+ Transport* transport = GetTransport_n(transport_name);
if (transport) {
return transport;
}
- transport = CreateTransport_w(transport_name);
+ transport = CreateTransport_n(transport_name);
// The stuff below happens outside of CreateTransport_w so that unit tests
// can override CreateTransport_w to return a different type of transport.
transport->SetSslMaxProtocolVersion(ssl_max_version_);
@@ -302,9 +313,9 @@ Transport* TransportController::GetOrCreateTransport_w(
return transport;
}
-void TransportController::DestroyTransport_w(
+void TransportController::DestroyTransport_n(
const std::string& transport_name) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
auto iter = transports_.find(transport_name);
if (iter != transports_.end()) {
@@ -313,8 +324,8 @@ void TransportController::DestroyTransport_w(
}
}
-void TransportController::DestroyAllTransports_w() {
- RTC_DCHECK(worker_thread_->IsCurrent());
+void TransportController::DestroyAllTransports_n() {
+ RTC_DCHECK(network_thread_->IsCurrent());
for (const auto& kv : transports_) {
delete kv.second;
@@ -322,9 +333,9 @@ void TransportController::DestroyAllTransports_w() {
transports_.clear();
}
-bool TransportController::SetSslMaxProtocolVersion_w(
+bool TransportController::SetSslMaxProtocolVersion_n(
rtc::SSLProtocolVersion version) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
// Max SSL version can only be set before transports are created.
if (!transports_.empty()) {
@@ -335,27 +346,27 @@ bool TransportController::SetSslMaxProtocolVersion_w(
return true;
}
-void TransportController::SetIceConfig_w(const IceConfig& config) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+void TransportController::SetIceConfig_n(const IceConfig& config) {
+ RTC_DCHECK(network_thread_->IsCurrent());
ice_config_ = config;
for (const auto& kv : transports_) {
kv.second->SetIceConfig(ice_config_);
}
}
-void TransportController::SetIceRole_w(IceRole ice_role) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+void TransportController::SetIceRole_n(IceRole ice_role) {
+ RTC_DCHECK(network_thread_->IsCurrent());
ice_role_ = ice_role;
for (const auto& kv : transports_) {
kv.second->SetIceRole(ice_role_);
}
}
-bool TransportController::GetSslRole_w(const std::string& transport_name,
+bool TransportController::GetSslRole_n(const std::string& transport_name,
rtc::SSLRole* role) {
- RTC_DCHECK(worker_thread()->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* t = GetTransport_w(transport_name);
+ Transport* t = GetTransport_n(transport_name);
if (!t) {
return false;
}
@@ -363,9 +374,9 @@ bool TransportController::GetSslRole_w(const std::string& transport_name,
return t->GetSslRole(role);
}
-bool TransportController::SetLocalCertificate_w(
+bool TransportController::SetLocalCertificate_n(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
if (certificate_) {
return false;
@@ -381,12 +392,12 @@ bool TransportController::SetLocalCertificate_w(
return true;
}
-bool TransportController::GetLocalCertificate_w(
+bool TransportController::GetLocalCertificate_n(
const std::string& transport_name,
rtc::scoped_refptr<rtc::RTCCertificate>* certificate) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* t = GetTransport_w(transport_name);
+ Transport* t = GetTransport_n(transport_name);
if (!t) {
return false;
}
@@ -394,12 +405,12 @@ bool TransportController::GetLocalCertificate_w(
return t->GetLocalCertificate(certificate);
}
-rtc::scoped_ptr<rtc::SSLCertificate>
-TransportController::GetRemoteSSLCertificate_w(
+std::unique_ptr<rtc::SSLCertificate>
+TransportController::GetRemoteSSLCertificate_n(
const std::string& transport_name) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* t = GetTransport_w(transport_name);
+ Transport* t = GetTransport_n(transport_name);
if (!t) {
return nullptr;
}
@@ -407,14 +418,14 @@ TransportController::GetRemoteSSLCertificate_w(
return t->GetRemoteSSLCertificate();
}
-bool TransportController::SetLocalTransportDescription_w(
+bool TransportController::SetLocalTransportDescription_n(
const std::string& transport_name,
const TransportDescription& tdesc,
ContentAction action,
std::string* err) {
- RTC_DCHECK(worker_thread()->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* transport = GetTransport_w(transport_name);
+ Transport* transport = GetTransport_n(transport_name);
if (!transport) {
// If we didn't find a transport, that's not an error;
// it could have been deleted as a result of bundling.
@@ -426,14 +437,14 @@ bool TransportController::SetLocalTransportDescription_w(
return transport->SetLocalTransportDescription(tdesc, action, err);
}
-bool TransportController::SetRemoteTransportDescription_w(
+bool TransportController::SetRemoteTransportDescription_n(
const std::string& transport_name,
const TransportDescription& tdesc,
ContentAction action,
std::string* err) {
- RTC_DCHECK(worker_thread()->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* transport = GetTransport_w(transport_name);
+ Transport* transport = GetTransport_n(transport_name);
if (!transport) {
// If we didn't find a transport, that's not an error;
// it could have been deleted as a result of bundling.
@@ -445,19 +456,19 @@ bool TransportController::SetRemoteTransportDescription_w(
return transport->SetRemoteTransportDescription(tdesc, action, err);
}
-void TransportController::MaybeStartGathering_w() {
+void TransportController::MaybeStartGathering_n() {
for (const auto& kv : transports_) {
kv.second->MaybeStartGathering();
}
}
-bool TransportController::AddRemoteCandidates_w(
+bool TransportController::AddRemoteCandidates_n(
const std::string& transport_name,
const Candidates& candidates,
std::string* err) {
- RTC_DCHECK(worker_thread()->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* transport = GetTransport_w(transport_name);
+ Transport* transport = GetTransport_n(transport_name);
if (!transport) {
// If we didn't find a transport, that's not an error;
// it could have been deleted as a result of bundling.
@@ -467,9 +478,9 @@ bool TransportController::AddRemoteCandidates_w(
return transport->AddRemoteCandidates(candidates, err);
}
-bool TransportController::RemoveRemoteCandidates_w(const Candidates& candidates,
+bool TransportController::RemoveRemoteCandidates_n(const Candidates& candidates,
std::string* err) {
- RTC_DCHECK(worker_thread()->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
std::map<std::string, Candidates> candidates_by_transport_name;
for (const Candidate& cand : candidates) {
RTC_DCHECK(!cand.transport_name().empty());
@@ -478,7 +489,7 @@ bool TransportController::RemoveRemoteCandidates_w(const Candidates& candidates,
bool result = true;
for (auto kv : candidates_by_transport_name) {
- Transport* transport = GetTransport_w(kv.first);
+ Transport* transport = GetTransport_n(kv.first);
if (!transport) {
// If we didn't find a transport, that's not an error;
// it could have been deleted as a result of bundling.
@@ -489,51 +500,51 @@ bool TransportController::RemoveRemoteCandidates_w(const Candidates& candidates,
return result;
}
-bool TransportController::ReadyForRemoteCandidates_w(
+bool TransportController::ReadyForRemoteCandidates_n(
const std::string& transport_name) {
- RTC_DCHECK(worker_thread()->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* transport = GetTransport_w(transport_name);
+ Transport* transport = GetTransport_n(transport_name);
if (!transport) {
return false;
}
return transport->ready_for_remote_candidates();
}
-bool TransportController::GetStats_w(const std::string& transport_name,
+bool TransportController::GetStats_n(const std::string& transport_name,
TransportStats* stats) {
- RTC_DCHECK(worker_thread()->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
- Transport* transport = GetTransport_w(transport_name);
+ Transport* transport = GetTransport_n(transport_name);
if (!transport) {
return false;
}
return transport->GetStats(stats);
}
-void TransportController::OnChannelWritableState_w(TransportChannel* channel) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+void TransportController::OnChannelWritableState_n(TransportChannel* channel) {
+ RTC_DCHECK(network_thread_->IsCurrent());
LOG(LS_INFO) << channel->transport_name() << " TransportChannel "
<< channel->component() << " writability changed to "
<< channel->writable() << ".";
- UpdateAggregateStates_w();
+ UpdateAggregateStates_n();
}
-void TransportController::OnChannelReceivingState_w(TransportChannel* channel) {
- RTC_DCHECK(worker_thread_->IsCurrent());
- UpdateAggregateStates_w();
+void TransportController::OnChannelReceivingState_n(TransportChannel* channel) {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ UpdateAggregateStates_n();
}
-void TransportController::OnChannelGatheringState_w(
+void TransportController::OnChannelGatheringState_n(
TransportChannelImpl* channel) {
- RTC_DCHECK(worker_thread_->IsCurrent());
- UpdateAggregateStates_w();
+ RTC_DCHECK(network_thread_->IsCurrent());
+ UpdateAggregateStates_n();
}
-void TransportController::OnChannelCandidateGathered_w(
+void TransportController::OnChannelCandidateGathered_n(
TransportChannelImpl* channel,
const Candidate& candidate) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
// We should never signal peer-reflexive candidates.
if (candidate.type() == PRFLX_PORT_TYPE) {
@@ -547,7 +558,7 @@ void TransportController::OnChannelCandidateGathered_w(
signaling_thread_->Post(this, MSG_CANDIDATESGATHERED, data);
}
-void TransportController::OnChannelCandidatesRemoved_w(
+void TransportController::OnChannelCandidatesRemoved_n(
TransportChannelImpl* channel,
const Candidates& candidates) {
invoker_.AsyncInvoke<void>(
@@ -562,9 +573,9 @@ void TransportController::OnChannelCandidatesRemoved(
SignalCandidatesRemoved(candidates);
}
-void TransportController::OnChannelRoleConflict_w(
+void TransportController::OnChannelRoleConflict_n(
TransportChannelImpl* channel) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
if (ice_role_switch_) {
LOG(LS_WARNING)
@@ -581,17 +592,17 @@ void TransportController::OnChannelRoleConflict_w(
}
}
-void TransportController::OnChannelConnectionRemoved_w(
+void TransportController::OnChannelConnectionRemoved_n(
TransportChannelImpl* channel) {
- RTC_DCHECK(worker_thread_->IsCurrent());
+ RTC_DCHECK(network_thread_->IsCurrent());
LOG(LS_INFO) << channel->transport_name() << " TransportChannel "
<< channel->component()
<< " connection removed. Check if state is complete.";
- UpdateAggregateStates_w();
+ UpdateAggregateStates_n();
}
-void TransportController::UpdateAggregateStates_w() {
- RTC_DCHECK(worker_thread_->IsCurrent());
+void TransportController::UpdateAggregateStates_n() {
+ RTC_DCHECK(network_thread_->IsCurrent());
IceConnectionState new_connection_state = kIceConnectionConnecting;
IceGatheringState new_gathering_state = kIceGatheringNew;
diff --git a/chromium/third_party/webrtc/p2p/base/transportcontroller.h b/chromium/third_party/webrtc/p2p/base/transportcontroller.h
index 9d06823920f..cc16a768ab9 100644
--- a/chromium/third_party/webrtc/p2p/base/transportcontroller.h
+++ b/chromium/third_party/webrtc/p2p/base/transportcontroller.h
@@ -12,6 +12,7 @@
#define WEBRTC_P2P_BASE_TRANSPORTCONTROLLER_H_
#include <map>
+#include <memory>
#include <string>
#include <vector>
@@ -31,13 +32,13 @@ class TransportController : public sigslot::has_slots<>,
public rtc::MessageHandler {
public:
TransportController(rtc::Thread* signaling_thread,
- rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
PortAllocator* port_allocator);
virtual ~TransportController();
rtc::Thread* signaling_thread() const { return signaling_thread_; }
- rtc::Thread* worker_thread() const { return worker_thread_; }
+ rtc::Thread* network_thread() const { return network_thread_; }
PortAllocator* port_allocator() const { return port_allocator_; }
@@ -59,7 +60,7 @@ class TransportController : public sigslot::has_slots<>,
const std::string& transport_name,
rtc::scoped_refptr<rtc::RTCCertificate>* certificate);
// Caller owns returned certificate
- rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate(
+ std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate(
const std::string& transport_name);
bool SetLocalTransportDescription(const std::string& transport_name,
const TransportDescription& tdesc,
@@ -81,15 +82,18 @@ class TransportController : public sigslot::has_slots<>,
// Creates a channel if it doesn't exist. Otherwise, increments a reference
// count and returns an existing channel.
- virtual TransportChannel* CreateTransportChannel_w(
+ virtual TransportChannel* CreateTransportChannel_n(
const std::string& transport_name,
int component);
// Decrements a channel's reference count, and destroys the channel if
// nothing is referencing it.
- virtual void DestroyTransportChannel_w(const std::string& transport_name,
+ virtual void DestroyTransportChannel_n(const std::string& transport_name,
int component);
+ void use_quic() { quic_ = true; }
+ bool quic() const { return quic_; }
+
// All of these signals are fired on the signalling thread.
// If any transport failed => failed,
@@ -117,11 +121,11 @@ class TransportController : public sigslot::has_slots<>,
protected:
// Protected and virtual so we can override it in unit tests.
- virtual Transport* CreateTransport_w(const std::string& transport_name);
+ virtual Transport* CreateTransport_n(const std::string& transport_name);
// For unit tests
const std::map<std::string, Transport*>& transports() { return transports_; }
- Transport* GetTransport_w(const std::string& transport_name);
+ Transport* GetTransport_n(const std::string& transport_name);
private:
void OnMessage(rtc::Message* pmsg) override;
@@ -149,57 +153,57 @@ class TransportController : public sigslot::has_slots<>,
int ref_;
};
- std::vector<RefCountedChannel>::iterator FindChannel_w(
+ std::vector<RefCountedChannel>::iterator FindChannel_n(
const std::string& transport_name,
int component);
- Transport* GetOrCreateTransport_w(const std::string& transport_name);
- void DestroyTransport_w(const std::string& transport_name);
- void DestroyAllTransports_w();
+ Transport* GetOrCreateTransport_n(const std::string& transport_name);
+ void DestroyTransport_n(const std::string& transport_name);
+ void DestroyAllTransports_n();
- bool SetSslMaxProtocolVersion_w(rtc::SSLProtocolVersion version);
- void SetIceConfig_w(const IceConfig& config);
- void SetIceRole_w(IceRole ice_role);
- bool GetSslRole_w(const std::string& transport_name, rtc::SSLRole* role);
- bool SetLocalCertificate_w(
+ bool SetSslMaxProtocolVersion_n(rtc::SSLProtocolVersion version);
+ void SetIceConfig_n(const IceConfig& config);
+ void SetIceRole_n(IceRole ice_role);
+ bool GetSslRole_n(const std::string& transport_name, rtc::SSLRole* role);
+ bool SetLocalCertificate_n(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
- bool GetLocalCertificate_w(
+ bool GetLocalCertificate_n(
const std::string& transport_name,
rtc::scoped_refptr<rtc::RTCCertificate>* certificate);
- rtc::scoped_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate_w(
+ std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate_n(
const std::string& transport_name);
- bool SetLocalTransportDescription_w(const std::string& transport_name,
+ bool SetLocalTransportDescription_n(const std::string& transport_name,
const TransportDescription& tdesc,
ContentAction action,
std::string* err);
- bool SetRemoteTransportDescription_w(const std::string& transport_name,
+ bool SetRemoteTransportDescription_n(const std::string& transport_name,
const TransportDescription& tdesc,
ContentAction action,
std::string* err);
- void MaybeStartGathering_w();
- bool AddRemoteCandidates_w(const std::string& transport_name,
+ void MaybeStartGathering_n();
+ bool AddRemoteCandidates_n(const std::string& transport_name,
const Candidates& candidates,
std::string* err);
- bool RemoveRemoteCandidates_w(const Candidates& candidates, std::string* err);
- bool ReadyForRemoteCandidates_w(const std::string& transport_name);
- bool GetStats_w(const std::string& transport_name, TransportStats* stats);
+ bool RemoveRemoteCandidates_n(const Candidates& candidates, std::string* err);
+ bool ReadyForRemoteCandidates_n(const std::string& transport_name);
+ bool GetStats_n(const std::string& transport_name, TransportStats* stats);
// Handlers for signals from Transport.
- void OnChannelWritableState_w(TransportChannel* channel);
- void OnChannelReceivingState_w(TransportChannel* channel);
- void OnChannelGatheringState_w(TransportChannelImpl* channel);
- void OnChannelCandidateGathered_w(TransportChannelImpl* channel,
+ void OnChannelWritableState_n(TransportChannel* channel);
+ void OnChannelReceivingState_n(TransportChannel* channel);
+ void OnChannelGatheringState_n(TransportChannelImpl* channel);
+ void OnChannelCandidateGathered_n(TransportChannelImpl* channel,
const Candidate& candidate);
void OnChannelCandidatesRemoved(const Candidates& candidates);
- void OnChannelCandidatesRemoved_w(TransportChannelImpl* channel,
+ void OnChannelCandidatesRemoved_n(TransportChannelImpl* channel,
const Candidates& candidates);
- void OnChannelRoleConflict_w(TransportChannelImpl* channel);
- void OnChannelConnectionRemoved_w(TransportChannelImpl* channel);
+ void OnChannelRoleConflict_n(TransportChannelImpl* channel);
+ void OnChannelConnectionRemoved_n(TransportChannelImpl* channel);
- void UpdateAggregateStates_w();
+ void UpdateAggregateStates_n();
rtc::Thread* const signaling_thread_ = nullptr;
- rtc::Thread* const worker_thread_ = nullptr;
+ rtc::Thread* const network_thread_ = nullptr;
typedef std::map<std::string, Transport*> TransportMap;
TransportMap transports_;
@@ -221,6 +225,8 @@ class TransportController : public sigslot::has_slots<>,
uint64_t ice_tiebreaker_ = rtc::CreateRandomId64();
rtc::scoped_refptr<rtc::RTCCertificate> certificate_;
rtc::AsyncInvoker invoker_;
+ // True if QUIC is used instead of DTLS.
+ bool quic_ = false;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/transportcontroller_unittest.cc b/chromium/third_party/webrtc/p2p/base/transportcontroller_unittest.cc
index c90fd700ce3..627975c90ca 100644
--- a/chromium/third_party/webrtc/p2p/base/transportcontroller_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/transportcontroller_unittest.cc
@@ -9,19 +9,19 @@
*/
#include <map>
+#include <memory>
#include "webrtc/base/fakesslidentity.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/helpers.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sslidentity.h"
#include "webrtc/base/thread.h"
#include "webrtc/p2p/base/dtlstransportchannel.h"
+#include "webrtc/p2p/base/fakeportallocator.h"
#include "webrtc/p2p/base/faketransportcontroller.h"
#include "webrtc/p2p/base/p2ptransportchannel.h"
#include "webrtc/p2p/base/portallocator.h"
#include "webrtc/p2p/base/transportcontroller.h"
-#include "webrtc/p2p/client/fakeportallocator.h"
static const int kTimeout = 100;
static const char kIceUfrag1[] = "TESTICEUFRAG0001";
@@ -81,12 +81,12 @@ class TransportControllerTest : public testing::Test,
FakeTransportChannel* CreateChannel(const std::string& content,
int component) {
TransportChannel* channel =
- transport_controller_->CreateTransportChannel_w(content, component);
+ transport_controller_->CreateTransportChannel_n(content, component);
return static_cast<FakeTransportChannel*>(channel);
}
void DestroyChannel(const std::string& content, int component) {
- transport_controller_->DestroyTransportChannel_w(content, component);
+ transport_controller_->DestroyTransportChannel_n(content, component);
}
Candidate CreateCandidate(int component) {
@@ -177,8 +177,8 @@ class TransportControllerTest : public testing::Test,
++candidates_signal_count_;
}
- rtc::scoped_ptr<rtc::Thread> worker_thread_; // Not used for most tests.
- rtc::scoped_ptr<TransportControllerForTest> transport_controller_;
+ std::unique_ptr<rtc::Thread> worker_thread_; // Not used for most tests.
+ std::unique_ptr<TransportControllerForTest> transport_controller_;
// Information received from signals from transport controller.
IceConnectionState connection_state_ = cricket::kIceConnectionConnecting;
@@ -269,10 +269,10 @@ TEST_F(TransportControllerTest, TestGetSslRole) {
TEST_F(TransportControllerTest, TestSetAndGetLocalCertificate) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate1 =
- rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT)));
rtc::scoped_refptr<rtc::RTCCertificate> certificate2 =
- rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
rtc::SSLIdentity::Generate("session2", rtc::KT_DEFAULT)));
rtc::scoped_refptr<rtc::RTCCertificate> returned_certificate;
@@ -308,7 +308,7 @@ TEST_F(TransportControllerTest, TestGetRemoteSSLCertificate) {
ASSERT_NE(nullptr, channel);
channel->SetRemoteSSLCertificate(&fake_certificate);
- rtc::scoped_ptr<rtc::SSLCertificate> returned_certificate =
+ std::unique_ptr<rtc::SSLCertificate> returned_certificate =
transport_controller_->GetRemoteSSLCertificate("audio");
EXPECT_TRUE(returned_certificate);
EXPECT_EQ(fake_certificate.ToPEMString(),
diff --git a/chromium/third_party/webrtc/p2p/base/transportdescription.h b/chromium/third_party/webrtc/p2p/base/transportdescription.h
index 003780b7679..42e45a670f9 100644
--- a/chromium/third_party/webrtc/p2p/base/transportdescription.h
+++ b/chromium/third_party/webrtc/p2p/base/transportdescription.h
@@ -12,11 +12,11 @@
#define WEBRTC_P2P_BASE_TRANSPORTDESCRIPTION_H_
#include <algorithm>
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/p2p/base/p2pconstants.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sslfingerprint.h"
namespace cricket {
@@ -139,7 +139,7 @@ struct TransportDescription {
IceMode ice_mode;
ConnectionRole connection_role;
- rtc::scoped_ptr<rtc::SSLFingerprint> identity_fingerprint;
+ std::unique_ptr<rtc::SSLFingerprint> identity_fingerprint;
};
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/base/transportdescriptionfactory.cc b/chromium/third_party/webrtc/p2p/base/transportdescriptionfactory.cc
index 1ddf55d4a13..e57b7e3efae 100644
--- a/chromium/third_party/webrtc/p2p/base/transportdescriptionfactory.cc
+++ b/chromium/third_party/webrtc/p2p/base/transportdescriptionfactory.cc
@@ -10,6 +10,8 @@
#include "webrtc/p2p/base/transportdescriptionfactory.h"
+#include <memory>
+
#include "webrtc/p2p/base/transportdescription.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
@@ -25,7 +27,7 @@ TransportDescriptionFactory::TransportDescriptionFactory()
TransportDescription* TransportDescriptionFactory::CreateOffer(
const TransportOptions& options,
const TransportDescription* current_description) const {
- rtc::scoped_ptr<TransportDescription> desc(new TransportDescription());
+ std::unique_ptr<TransportDescription> desc(new TransportDescription());
// Generate the ICE credentials if we don't already have them.
if (!current_description || options.ice_restart) {
@@ -59,7 +61,7 @@ TransportDescription* TransportDescriptionFactory::CreateAnswer(
return NULL;
}
- rtc::scoped_ptr<TransportDescription> desc(new TransportDescription());
+ std::unique_ptr<TransportDescription> desc(new TransportDescription());
// Generate the ICE credentials if we don't already have them or ice is
// being restarted.
if (!current_description || options.ice_restart) {
diff --git a/chromium/third_party/webrtc/p2p/base/transportdescriptionfactory_unittest.cc b/chromium/third_party/webrtc/p2p/base/transportdescriptionfactory_unittest.cc
index 765c607a6d6..38675ba401e 100644
--- a/chromium/third_party/webrtc/p2p/base/transportdescriptionfactory_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/transportdescriptionfactory_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <string>
#include <vector>
@@ -18,7 +19,6 @@
#include "webrtc/base/gunit.h"
#include "webrtc/base/ssladapter.h"
-using rtc::scoped_ptr;
using cricket::TransportDescriptionFactory;
using cricket::TransportDescription;
using cricket::TransportOptions;
@@ -26,10 +26,10 @@ using cricket::TransportOptions;
class TransportDescriptionFactoryTest : public testing::Test {
public:
TransportDescriptionFactoryTest()
- : cert1_(rtc::RTCCertificate::Create(
- scoped_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("User1")))),
- cert2_(rtc::RTCCertificate::Create(
- scoped_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("User2")))) {}
+ : cert1_(rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
+ new rtc::FakeSSLIdentity("User1")))),
+ cert2_(rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
+ new rtc::FakeSSLIdentity("User2")))) {}
void CheckDesc(const TransportDescription* desc,
const std::string& opt, const std::string& ice_ufrag,
@@ -71,22 +71,20 @@ class TransportDescriptionFactoryTest : public testing::Test {
cricket::TransportOptions options;
// The initial offer / answer exchange.
- rtc::scoped_ptr<TransportDescription> offer(f1_.CreateOffer(
- options, NULL));
- rtc::scoped_ptr<TransportDescription> answer(
- f2_.CreateAnswer(offer.get(),
- options, NULL));
+ std::unique_ptr<TransportDescription> offer(f1_.CreateOffer(options, NULL));
+ std::unique_ptr<TransportDescription> answer(
+ f2_.CreateAnswer(offer.get(), options, NULL));
// Create an updated offer where we restart ice.
options.ice_restart = true;
- rtc::scoped_ptr<TransportDescription> restart_offer(f1_.CreateOffer(
- options, offer.get()));
+ std::unique_ptr<TransportDescription> restart_offer(
+ f1_.CreateOffer(options, offer.get()));
VerifyUfragAndPasswordChanged(dtls, offer.get(), restart_offer.get());
// Create a new answer. The transport ufrag and password is changed since
// |options.ice_restart == true|
- rtc::scoped_ptr<TransportDescription> restart_answer(
+ std::unique_ptr<TransportDescription> restart_answer(
f2_.CreateAnswer(restart_offer.get(), options, answer.get()));
ASSERT_TRUE(restart_answer.get() != NULL);
@@ -120,8 +118,8 @@ class TransportDescriptionFactoryTest : public testing::Test {
};
TEST_F(TransportDescriptionFactoryTest, TestOfferDefault) {
- scoped_ptr<TransportDescription> desc(f1_.CreateOffer(
- TransportOptions(), NULL));
+ std::unique_ptr<TransportDescription> desc(
+ f1_.CreateOffer(TransportOptions(), NULL));
CheckDesc(desc.get(), "", "", "", "");
}
@@ -131,8 +129,8 @@ TEST_F(TransportDescriptionFactoryTest, TestOfferDtls) {
std::string digest_alg;
ASSERT_TRUE(cert1_->ssl_certificate().GetSignatureDigestAlgorithm(
&digest_alg));
- scoped_ptr<TransportDescription> desc(f1_.CreateOffer(
- TransportOptions(), NULL));
+ std::unique_ptr<TransportDescription> desc(
+ f1_.CreateOffer(TransportOptions(), NULL));
CheckDesc(desc.get(), "", "", "", digest_alg);
// Ensure it also works with SEC_REQUIRED.
f1_.set_secure(cricket::SEC_REQUIRED);
@@ -143,8 +141,8 @@ TEST_F(TransportDescriptionFactoryTest, TestOfferDtls) {
// Test generating an offer with DTLS fails with no identity.
TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsWithNoIdentity) {
f1_.set_secure(cricket::SEC_ENABLED);
- scoped_ptr<TransportDescription> desc(f1_.CreateOffer(
- TransportOptions(), NULL));
+ std::unique_ptr<TransportDescription> desc(
+ f1_.CreateOffer(TransportOptions(), NULL));
ASSERT_TRUE(desc.get() == NULL);
}
@@ -156,21 +154,21 @@ TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsReofferDtls) {
std::string digest_alg;
ASSERT_TRUE(cert1_->ssl_certificate().GetSignatureDigestAlgorithm(
&digest_alg));
- scoped_ptr<TransportDescription> old_desc(f1_.CreateOffer(
- TransportOptions(), NULL));
+ std::unique_ptr<TransportDescription> old_desc(
+ f1_.CreateOffer(TransportOptions(), NULL));
ASSERT_TRUE(old_desc.get() != NULL);
- scoped_ptr<TransportDescription> desc(
+ std::unique_ptr<TransportDescription> desc(
f1_.CreateOffer(TransportOptions(), old_desc.get()));
CheckDesc(desc.get(), "",
old_desc->ice_ufrag, old_desc->ice_pwd, digest_alg);
}
TEST_F(TransportDescriptionFactoryTest, TestAnswerDefault) {
- scoped_ptr<TransportDescription> offer(f1_.CreateOffer(
- TransportOptions(), NULL));
+ std::unique_ptr<TransportDescription> offer(
+ f1_.CreateOffer(TransportOptions(), NULL));
ASSERT_TRUE(offer.get() != NULL);
- scoped_ptr<TransportDescription> desc(f2_.CreateAnswer(
- offer.get(), TransportOptions(), NULL));
+ std::unique_ptr<TransportDescription> desc(
+ f2_.CreateAnswer(offer.get(), TransportOptions(), NULL));
CheckDesc(desc.get(), "", "", "", "");
desc.reset(f2_.CreateAnswer(offer.get(), TransportOptions(),
NULL));
@@ -179,15 +177,14 @@ TEST_F(TransportDescriptionFactoryTest, TestAnswerDefault) {
// Test that we can update an answer properly; ICE credentials shouldn't change.
TEST_F(TransportDescriptionFactoryTest, TestReanswer) {
- scoped_ptr<TransportDescription> offer(
+ std::unique_ptr<TransportDescription> offer(
f1_.CreateOffer(TransportOptions(), NULL));
ASSERT_TRUE(offer.get() != NULL);
- scoped_ptr<TransportDescription> old_desc(
+ std::unique_ptr<TransportDescription> old_desc(
f2_.CreateAnswer(offer.get(), TransportOptions(), NULL));
ASSERT_TRUE(old_desc.get() != NULL);
- scoped_ptr<TransportDescription> desc(
- f2_.CreateAnswer(offer.get(), TransportOptions(),
- old_desc.get()));
+ std::unique_ptr<TransportDescription> desc(
+ f2_.CreateAnswer(offer.get(), TransportOptions(), old_desc.get()));
ASSERT_TRUE(desc.get() != NULL);
CheckDesc(desc.get(), "",
old_desc->ice_ufrag, old_desc->ice_pwd, "");
@@ -197,10 +194,10 @@ TEST_F(TransportDescriptionFactoryTest, TestReanswer) {
TEST_F(TransportDescriptionFactoryTest, TestAnswerDtlsToNoDtls) {
f1_.set_secure(cricket::SEC_ENABLED);
f1_.set_certificate(cert1_);
- scoped_ptr<TransportDescription> offer(
+ std::unique_ptr<TransportDescription> offer(
f1_.CreateOffer(TransportOptions(), NULL));
ASSERT_TRUE(offer.get() != NULL);
- scoped_ptr<TransportDescription> desc(
+ std::unique_ptr<TransportDescription> desc(
f2_.CreateAnswer(offer.get(), TransportOptions(), NULL));
CheckDesc(desc.get(), "", "", "", "");
}
@@ -210,10 +207,10 @@ TEST_F(TransportDescriptionFactoryTest, TestAnswerDtlsToNoDtls) {
TEST_F(TransportDescriptionFactoryTest, TestAnswerNoDtlsToDtls) {
f2_.set_secure(cricket::SEC_ENABLED);
f2_.set_certificate(cert2_);
- scoped_ptr<TransportDescription> offer(
+ std::unique_ptr<TransportDescription> offer(
f1_.CreateOffer(TransportOptions(), NULL));
ASSERT_TRUE(offer.get() != NULL);
- scoped_ptr<TransportDescription> desc(
+ std::unique_ptr<TransportDescription> desc(
f2_.CreateAnswer(offer.get(), TransportOptions(), NULL));
CheckDesc(desc.get(), "", "", "", "");
f2_.set_secure(cricket::SEC_REQUIRED);
@@ -236,10 +233,10 @@ TEST_F(TransportDescriptionFactoryTest, TestAnswerDtlsToDtls) {
ASSERT_TRUE(cert2_->ssl_certificate().GetSignatureDigestAlgorithm(
&digest_alg2));
- scoped_ptr<TransportDescription> offer(
+ std::unique_ptr<TransportDescription> offer(
f1_.CreateOffer(TransportOptions(), NULL));
ASSERT_TRUE(offer.get() != NULL);
- scoped_ptr<TransportDescription> desc(
+ std::unique_ptr<TransportDescription> desc(
f2_.CreateAnswer(offer.get(), TransportOptions(), NULL));
CheckDesc(desc.get(), "", "", "", digest_alg2);
f2_.set_secure(cricket::SEC_REQUIRED);
diff --git a/chromium/third_party/webrtc/p2p/base/turnport.cc b/chromium/third_party/webrtc/p2p/base/turnport.cc
index 9ea354c42cf..02553950129 100644
--- a/chromium/third_party/webrtc/p2p/base/turnport.cc
+++ b/chromium/third_party/webrtc/p2p/base/turnport.cc
@@ -533,11 +533,15 @@ int TurnPort::SendTo(const void* data, size_t size,
return static_cast<int>(size);
}
-void TurnPort::OnReadPacket(
- rtc::AsyncPacketSocket* socket, const char* data, size_t size,
- const rtc::SocketAddress& remote_addr,
- const rtc::PacketTime& packet_time) {
- ASSERT(socket == socket_);
+bool TurnPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket,
+ const char* data, size_t size,
+ const rtc::SocketAddress& remote_addr,
+ const rtc::PacketTime& packet_time) {
+ if (socket != socket_) {
+ // The packet was received on a shared socket after we've allocated a new
+ // socket for this TURN port.
+ return false;
+ }
// This is to guard against a STUN response from previous server after
// alternative server redirection. TODO(guoweis): add a unit test for this
@@ -547,19 +551,19 @@ void TurnPort::OnReadPacket(
<< remote_addr.ToString()
<< ", server_address_:"
<< server_address_.address.ToString();
- return;
+ return false;
}
// The message must be at least the size of a channel header.
if (size < TURN_CHANNEL_HEADER_SIZE) {
LOG_J(LS_WARNING, this) << "Received TURN message that was too short";
- return;
+ return false;
}
if (state_ == STATE_DISCONNECTED) {
LOG_J(LS_WARNING, this)
<< "Received TURN message while the Turn port is disconnected";
- return;
+ return false;
}
// Check the message type, to see if is a Channel Data message.
@@ -568,27 +572,41 @@ void TurnPort::OnReadPacket(
uint16_t msg_type = rtc::GetBE16(data);
if (IsTurnChannelData(msg_type)) {
HandleChannelData(msg_type, data, size, packet_time);
- } else if (msg_type == TURN_DATA_INDICATION) {
+ return true;
+
+ }
+
+ if (msg_type == TURN_DATA_INDICATION) {
HandleDataIndication(data, size, packet_time);
- } else {
- if (SharedSocket() &&
- (msg_type == STUN_BINDING_RESPONSE ||
- msg_type == STUN_BINDING_ERROR_RESPONSE)) {
- LOG_J(LS_VERBOSE, this) <<
- "Ignoring STUN binding response message on shared socket.";
- return;
- }
+ return true;
+ }
- // This must be a response for one of our requests.
- // Check success responses, but not errors, for MESSAGE-INTEGRITY.
- if (IsStunSuccessResponseType(msg_type) &&
- !StunMessage::ValidateMessageIntegrity(data, size, hash())) {
- LOG_J(LS_WARNING, this) << "Received TURN message with invalid "
- << "message integrity, msg_type=" << msg_type;
- return;
- }
- request_manager_.CheckResponse(data, size);
+ if (SharedSocket() && (msg_type == STUN_BINDING_RESPONSE ||
+ msg_type == STUN_BINDING_ERROR_RESPONSE)) {
+ LOG_J(LS_VERBOSE, this) <<
+ "Ignoring STUN binding response message on shared socket.";
+ return false;
}
+
+ // This must be a response for one of our requests.
+ // Check success responses, but not errors, for MESSAGE-INTEGRITY.
+ if (IsStunSuccessResponseType(msg_type) &&
+ !StunMessage::ValidateMessageIntegrity(data, size, hash())) {
+ LOG_J(LS_WARNING, this) << "Received TURN message with invalid "
+ << "message integrity, msg_type=" << msg_type;
+ return true;
+ }
+ request_manager_.CheckResponse(data, size);
+
+ return true;
+}
+
+void TurnPort::OnReadPacket(rtc::AsyncPacketSocket* socket,
+ const char* data,
+ size_t size,
+ const rtc::SocketAddress& remote_addr,
+ const rtc::PacketTime& packet_time) {
+ HandleIncomingPacket(socket, data, size, remote_addr, packet_time);
}
void TurnPort::OnSentPacket(rtc::AsyncPacketSocket* socket,
@@ -1011,7 +1029,7 @@ void TurnPort::OnConnectionDestroyed(Connection* conn) {
void TurnPort::ScheduleEntryDestruction(TurnEntry* entry) {
ASSERT(entry->destruction_timestamp() == 0);
- int64_t timestamp = rtc::Time64();
+ int64_t timestamp = rtc::TimeMillis();
entry->set_destruction_timestamp(timestamp);
invoker_.AsyncInvokeDelayed<void>(
thread(),
diff --git a/chromium/third_party/webrtc/p2p/base/turnport.h b/chromium/third_party/webrtc/p2p/base/turnport.h
index 797fa3f94f1..461fc1304db 100644
--- a/chromium/third_party/webrtc/p2p/base/turnport.h
+++ b/chromium/third_party/webrtc/p2p/base/turnport.h
@@ -94,13 +94,10 @@ class TurnPort : public Port {
virtual int GetOption(rtc::Socket::Option opt, int* value);
virtual int GetError();
- virtual bool HandleIncomingPacket(
- rtc::AsyncPacketSocket* socket, const char* data, size_t size,
- const rtc::SocketAddress& remote_addr,
- const rtc::PacketTime& packet_time) {
- OnReadPacket(socket, data, size, remote_addr, packet_time);
- return true;
- }
+ virtual bool HandleIncomingPacket(rtc::AsyncPacketSocket* socket,
+ const char* data, size_t size,
+ const rtc::SocketAddress& remote_addr,
+ const rtc::PacketTime& packet_time);
virtual void OnReadPacket(rtc::AsyncPacketSocket* socket,
const char* data, size_t size,
const rtc::SocketAddress& remote_addr,
diff --git a/chromium/third_party/webrtc/p2p/base/turnport_unittest.cc b/chromium/third_party/webrtc/p2p/base/turnport_unittest.cc
index 2c93ead9dfa..15a7954d6dc 100644
--- a/chromium/third_party/webrtc/p2p/base/turnport_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/base/turnport_unittest.cc
@@ -11,6 +11,8 @@
#include <dirent.h>
#endif
+#include <memory>
+
#include "webrtc/p2p/base/basicpacketsocketfactory.h"
#include "webrtc/p2p/base/p2pconstants.h"
#include "webrtc/p2p/base/portallocator.h"
@@ -26,7 +28,6 @@
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/thread.h"
@@ -507,15 +508,15 @@ class TurnPortTest : public testing::Test,
protected:
rtc::Thread* main_;
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<TurnPortTestVirtualSocketServer> ss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<TurnPortTestVirtualSocketServer> ss_;
rtc::SocketServerScope ss_scope_;
rtc::Network network_;
rtc::BasicPacketSocketFactory socket_factory_;
- rtc::scoped_ptr<rtc::AsyncPacketSocket> socket_;
+ std::unique_ptr<rtc::AsyncPacketSocket> socket_;
cricket::TestTurnServer turn_server_;
- rtc::scoped_ptr<TurnPort> turn_port_;
- rtc::scoped_ptr<UDPPort> udp_port_;
+ std::unique_ptr<TurnPort> turn_port_;
+ std::unique_ptr<UDPPort> udp_port_;
bool turn_ready_;
bool turn_error_;
bool turn_unknown_address_;
@@ -629,7 +630,7 @@ TEST_F(TurnPortTest, TestTurnAllocateNonceResetAfterAllocateMismatch) {
// using timestamp |ts_before| but then get an allocate mismatch error and
// receive an even newer nonce based on the system clock. |ts_before| is
// chosen so that the two NONCEs generated by the server will be different.
- int64_t ts_before = rtc::Time64() - 1;
+ int64_t ts_before = rtc::TimeMillis() - 1;
std::string first_nonce =
turn_server_.server()->SetTimestampForNextNonce(ts_before);
turn_port_->PrepareAddress();
@@ -665,6 +666,13 @@ TEST_F(TurnPortTest, TestTurnAllocateMismatch) {
// Verifies that the new port has a different address now.
EXPECT_NE(first_addr, turn_port_->socket()->GetLocalAddress());
+
+ // Verify that all packets received from the shared socket are ignored.
+ std::string test_packet = "Test packet";
+ EXPECT_FALSE(turn_port_->HandleIncomingPacket(
+ socket_.get(), test_packet.data(), test_packet.size(),
+ rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0),
+ rtc::CreatePacketTime(0)));
}
// Tests that a shared-socket-TurnPort creates its own socket after
diff --git a/chromium/third_party/webrtc/p2p/base/turnserver.cc b/chromium/third_party/webrtc/p2p/base/turnserver.cc
index 13974a0bd91..16df6f944ba 100644
--- a/chromium/third_party/webrtc/p2p/base/turnserver.cc
+++ b/chromium/third_party/webrtc/p2p/base/turnserver.cc
@@ -424,7 +424,7 @@ bool TurnServer::ValidateNonce(const std::string& nonce) const {
}
// Validate the timestamp.
- return rtc::Time64() - then < kNonceTimeout;
+ return rtc::TimeMillis() - then < kNonceTimeout;
}
TurnServerAllocation* TurnServer::FindAllocation(TurnServerConnection* conn) {
@@ -465,7 +465,7 @@ void TurnServer::SendErrorResponseWithRealmAndNonce(
TurnMessage resp;
InitErrorResponse(msg, code, reason, &resp);
- int64_t timestamp = rtc::Time64();
+ int64_t timestamp = rtc::TimeMillis();
if (ts_for_next_nonce_) {
timestamp = ts_for_next_nonce_;
ts_for_next_nonce_ = 0;
diff --git a/chromium/third_party/webrtc/p2p/base/turnserver.h b/chromium/third_party/webrtc/p2p/base/turnserver.h
index e520a9ef654..2bc3650f091 100644
--- a/chromium/third_party/webrtc/p2p/base/turnserver.h
+++ b/chromium/third_party/webrtc/p2p/base/turnserver.h
@@ -13,6 +13,7 @@
#include <list>
#include <map>
+#include <memory>
#include <set>
#include <string>
@@ -125,7 +126,7 @@ class TurnServerAllocation : public rtc::MessageHandler,
TurnServer* server_;
rtc::Thread* thread_;
TurnServerConnection conn_;
- rtc::scoped_ptr<rtc::AsyncPacketSocket> external_socket_;
+ std::unique_ptr<rtc::AsyncPacketSocket> external_socket_;
std::string key_;
std::string transaction_id_;
std::string username_;
@@ -143,6 +144,7 @@ class TurnAuthInterface {
// Return true if the given username and realm are valid, or false if not.
virtual bool GetKey(const std::string& username, const std::string& realm,
std::string* key) = 0;
+ virtual ~TurnAuthInterface() = default;
};
// An interface enables Turn Server to control redirection behavior.
@@ -269,8 +271,7 @@ class TurnServer : public sigslot::has_slots<> {
InternalSocketMap server_sockets_;
ServerSocketMap server_listen_sockets_;
- rtc::scoped_ptr<rtc::PacketSocketFactory>
- external_socket_factory_;
+ std::unique_ptr<rtc::PacketSocketFactory> external_socket_factory_;
rtc::SocketAddress external_addr_;
AllocationMap allocations_;
diff --git a/chromium/third_party/webrtc/p2p/client/basicportallocator.cc b/chromium/third_party/webrtc/p2p/client/basicportallocator.cc
index 22abf33e936..143b0361837 100644
--- a/chromium/third_party/webrtc/p2p/client/basicportallocator.cc
+++ b/chromium/third_party/webrtc/p2p/client/basicportallocator.cc
@@ -64,33 +64,26 @@ const uint32_t DISABLE_ALL_PHASES =
PORTALLOCATOR_DISABLE_STUN | PORTALLOCATOR_DISABLE_RELAY;
// BasicPortAllocator
-BasicPortAllocator::BasicPortAllocator(
- rtc::NetworkManager* network_manager,
- rtc::PacketSocketFactory* socket_factory)
- : network_manager_(network_manager),
- socket_factory_(socket_factory),
- stun_servers_() {
+BasicPortAllocator::BasicPortAllocator(rtc::NetworkManager* network_manager,
+ rtc::PacketSocketFactory* socket_factory)
+ : network_manager_(network_manager), socket_factory_(socket_factory) {
ASSERT(network_manager_ != nullptr);
ASSERT(socket_factory_ != nullptr);
Construct();
}
BasicPortAllocator::BasicPortAllocator(rtc::NetworkManager* network_manager)
- : network_manager_(network_manager),
- socket_factory_(nullptr),
- stun_servers_() {
+ : network_manager_(network_manager), socket_factory_(nullptr) {
ASSERT(network_manager_ != nullptr);
Construct();
}
-BasicPortAllocator::BasicPortAllocator(
- rtc::NetworkManager* network_manager,
- rtc::PacketSocketFactory* socket_factory,
- const ServerAddresses& stun_servers)
- : network_manager_(network_manager),
- socket_factory_(socket_factory),
- stun_servers_(stun_servers) {
+BasicPortAllocator::BasicPortAllocator(rtc::NetworkManager* network_manager,
+ rtc::PacketSocketFactory* socket_factory,
+ const ServerAddresses& stun_servers)
+ : network_manager_(network_manager), socket_factory_(socket_factory) {
ASSERT(socket_factory_ != NULL);
+ SetConfiguration(stun_servers, std::vector<RelayServerConfig>(), 0);
Construct();
}
@@ -100,10 +93,8 @@ BasicPortAllocator::BasicPortAllocator(
const rtc::SocketAddress& relay_address_udp,
const rtc::SocketAddress& relay_address_tcp,
const rtc::SocketAddress& relay_address_ssl)
- : network_manager_(network_manager),
- socket_factory_(NULL),
- stun_servers_(stun_servers) {
-
+ : network_manager_(network_manager), socket_factory_(NULL) {
+ std::vector<RelayServerConfig> turn_servers;
RelayServerConfig config(RELAY_GTURN);
if (!relay_address_udp.IsNil()) {
config.ports.push_back(ProtocolAddress(relay_address_udp, PROTO_UDP));
@@ -116,9 +107,10 @@ BasicPortAllocator::BasicPortAllocator(
}
if (!config.ports.empty()) {
- AddTurnServer(config);
+ turn_servers.push_back(config);
}
+ SetConfiguration(stun_servers, turn_servers, 0);
Construct();
}
@@ -136,6 +128,11 @@ PortAllocatorSession* BasicPortAllocator::CreateSessionInternal(
this, content_name, component, ice_ufrag, ice_pwd);
}
+void BasicPortAllocator::AddTurnServer(const RelayServerConfig& turn_server) {
+ std::vector<RelayServerConfig> new_turn_servers = turn_servers();
+ new_turn_servers.push_back(turn_server);
+ SetConfiguration(stun_servers(), new_turn_servers, candidate_pool_size());
+}
// BasicPortAllocatorSession
BasicPortAllocatorSession::BasicPortAllocatorSession(
@@ -207,6 +204,61 @@ void BasicPortAllocatorSession::ClearGettingPorts() {
sequences_[i]->Stop();
}
+std::vector<PortInterface*> BasicPortAllocatorSession::ReadyPorts() const {
+ std::vector<PortInterface*> ret;
+ for (const PortData& port : ports_) {
+ if (port.ready() || port.complete()) {
+ ret.push_back(port.port());
+ }
+ }
+ return ret;
+}
+
+std::vector<Candidate> BasicPortAllocatorSession::ReadyCandidates() const {
+ std::vector<Candidate> candidates;
+ for (const PortData& data : ports_) {
+ for (const Candidate& candidate : data.port()->Candidates()) {
+ if (!CheckCandidateFilter(candidate)) {
+ continue;
+ }
+ ProtocolType pvalue;
+ if (!StringToProto(candidate.protocol().c_str(), &pvalue) ||
+ !data.sequence()->ProtocolEnabled(pvalue)) {
+ continue;
+ }
+ candidates.push_back(candidate);
+ }
+ }
+ return candidates;
+}
+
+bool BasicPortAllocatorSession::CandidatesAllocationDone() const {
+ // Done only if all required AllocationSequence objects
+ // are created.
+ if (!allocation_sequences_created_) {
+ return false;
+ }
+
+ // Check that all port allocation sequences are complete (not running).
+ if (std::any_of(sequences_.begin(), sequences_.end(),
+ [](const AllocationSequence* sequence) {
+ return sequence->state() == AllocationSequence::kRunning;
+ })) {
+ return false;
+ }
+
+ // If all allocated ports are in complete state, session must have got all
+ // expected candidates. Session will trigger candidates allocation complete
+ // signal.
+ if (!std::all_of(ports_.begin(), ports_.end(), [](const PortData& port) {
+ return (port.complete() || port.error());
+ })) {
+ return false;
+ }
+
+ return true;
+}
+
void BasicPortAllocatorSession::OnMessage(rtc::Message *message) {
switch (message->message_id) {
case MSG_CONFIG_START:
@@ -241,6 +293,13 @@ void BasicPortAllocatorSession::OnMessage(rtc::Message *message) {
}
}
+void BasicPortAllocatorSession::UpdateIceParametersInternal() {
+ for (PortData& port : ports_) {
+ port.port()->set_content_name(content_name());
+ port.port()->SetIceParameters(component(), ice_ufrag(), ice_pwd());
+ }
+}
+
void BasicPortAllocatorSession::GetPortConfigurations() {
PortConfiguration* config = new PortConfiguration(allocator_->stun_servers(),
username(),
@@ -274,7 +333,7 @@ void BasicPortAllocatorSession::OnConfigStop() {
bool send_signal = false;
for (std::vector<PortData>::iterator it = ports_.begin();
it != ports_.end(); ++it) {
- if (!it->complete()) {
+ if (!it->complete() && !it->error()) {
// Updating port state to error, which didn't finish allocating candidates
// yet.
it->set_error();
@@ -436,12 +495,12 @@ void BasicPortAllocatorSession::AddAllocatedPort(Port* port,
LOG(LS_INFO) << "Adding allocated port for " << content_name();
port->set_content_name(content_name());
- port->set_component(component_);
+ port->set_component(component());
port->set_generation(generation());
if (allocator_->proxy().type != rtc::PROXY_NONE)
port->set_proxy(allocator_->user_agent(), allocator_->proxy());
- port->set_send_retransmit_count_attribute((allocator_->flags() &
- PORTALLOCATOR_ENABLE_STUN_RETRANSMIT_ATTRIBUTE) != 0);
+ port->set_send_retransmit_count_attribute(
+ (flags() & PORTALLOCATOR_ENABLE_STUN_RETRANSMIT_ATTRIBUTE) != 0);
// Push down the candidate_filter to individual port.
uint32_t candidate_filter = allocator_->candidate_filter();
@@ -484,8 +543,9 @@ void BasicPortAllocatorSession::OnCandidateReady(
ASSERT(data != NULL);
// Discarding any candidate signal if port allocation status is
// already in completed state.
- if (data->complete())
+ if (data->complete() || data->error()) {
return;
+ }
ProtocolType pvalue;
bool candidate_signalable = CheckCandidateFilter(c);
@@ -536,8 +596,9 @@ void BasicPortAllocatorSession::OnPortComplete(Port* port) {
ASSERT(data != NULL);
// Ignore any late signals.
- if (data->complete())
+ if (data->complete() || data->error()) {
return;
+ }
// Moving to COMPLETE state.
data->set_complete();
@@ -550,8 +611,9 @@ void BasicPortAllocatorSession::OnPortError(Port* port) {
PortData* data = FindPort(port);
ASSERT(data != NULL);
// We might have already given up on this port and stopped it.
- if (data->complete())
+ if (data->complete() || data->error()) {
return;
+ }
// SignalAddressError is currently sent from StunPort/TurnPort.
// But this signal itself is generic.
@@ -587,7 +649,7 @@ void BasicPortAllocatorSession::OnProtocolEnabled(AllocationSequence* seq,
}
}
-bool BasicPortAllocatorSession::CheckCandidateFilter(const Candidate& c) {
+bool BasicPortAllocatorSession::CheckCandidateFilter(const Candidate& c) const {
uint32_t filter = allocator_->candidate_filter();
// When binding to any address, before sending packets out, the getsockname
@@ -625,29 +687,15 @@ void BasicPortAllocatorSession::OnPortAllocationComplete(
}
void BasicPortAllocatorSession::MaybeSignalCandidatesAllocationDone() {
- // Send signal only if all required AllocationSequence objects
- // are created.
- if (!allocation_sequences_created_)
- return;
-
- // Check that all port allocation sequences are complete.
- for (std::vector<AllocationSequence*>::iterator it = sequences_.begin();
- it != sequences_.end(); ++it) {
- if ((*it)->state() == AllocationSequence::kRunning)
- return;
- }
-
- // If all allocated ports are in complete state, session must have got all
- // expected candidates. Session will trigger candidates allocation complete
- // signal.
- for (std::vector<PortData>::iterator it = ports_.begin();
- it != ports_.end(); ++it) {
- if (!it->complete())
- return;
+ if (CandidatesAllocationDone()) {
+ if (pooled()) {
+ LOG(LS_INFO) << "All candidates gathered for pooled session.";
+ } else {
+ LOG(LS_INFO) << "All candidates gathered for " << content_name() << ":"
+ << component() << ":" << generation();
+ }
+ SignalCandidatesAllocationDone(this);
}
- LOG(LS_INFO) << "All candidates gathered for " << content_name_ << ":"
- << component_ << ":" << generation();
- SignalCandidatesAllocationDone(this);
}
void BasicPortAllocatorSession::OnPortDestroyed(
@@ -1080,13 +1128,13 @@ void AllocationSequence::OnReadPacket(
// a STUN binding response, so we pass the message to TurnPort regardless of
// the message type. The TurnPort will just ignore the message since it will
// not find any request by transaction ID.
- for (std::vector<TurnPort*>::const_iterator it = turn_ports_.begin();
- it != turn_ports_.end(); ++it) {
- TurnPort* port = *it;
+ for (TurnPort* port : turn_ports_) {
if (port->server_address().address == remote_addr) {
- port->HandleIncomingPacket(socket, data, size, remote_addr, packet_time);
+ if (port->HandleIncomingPacket(socket, data, size, remote_addr,
+ packet_time)) {
+ return;
+ }
turn_port_found = true;
- break;
}
}
@@ -1097,8 +1145,9 @@ void AllocationSequence::OnReadPacket(
// the TURN server is also a STUN server.
if (!turn_port_found ||
stun_servers.find(remote_addr) != stun_servers.end()) {
- udp_port_->HandleIncomingPacket(
- socket, data, size, remote_addr, packet_time);
+ RTC_DCHECK(udp_port_->SharedSocket());
+ udp_port_->HandleIncomingPacket(socket, data, size, remote_addr,
+ packet_time);
}
}
}
diff --git a/chromium/third_party/webrtc/p2p/client/basicportallocator.h b/chromium/third_party/webrtc/p2p/client/basicportallocator.h
index ca1a23aaf2c..fd189c14e9b 100644
--- a/chromium/third_party/webrtc/p2p/client/basicportallocator.h
+++ b/chromium/third_party/webrtc/p2p/client/basicportallocator.h
@@ -11,13 +11,13 @@
#ifndef WEBRTC_P2P_CLIENT_BASICPORTALLOCATOR_H_
#define WEBRTC_P2P_CLIENT_BASICPORTALLOCATOR_H_
+#include <memory>
#include <string>
#include <vector>
#include "webrtc/p2p/base/portallocator.h"
#include "webrtc/base/messagequeue.h"
#include "webrtc/base/network.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread.h"
namespace cricket {
@@ -37,13 +37,6 @@ class BasicPortAllocator : public PortAllocator {
const rtc::SocketAddress& relay_server_ssl);
virtual ~BasicPortAllocator();
- void SetIceServers(
- const ServerAddresses& stun_servers,
- const std::vector<RelayServerConfig>& turn_servers) override {
- stun_servers_ = stun_servers;
- turn_servers_ = turn_servers;
- }
-
// Set to kDefaultNetworkIgnoreMask by default.
void SetNetworkIgnoreMask(int network_ignore_mask) override {
// TODO(phoglund): implement support for other types than loopback.
@@ -60,30 +53,20 @@ class BasicPortAllocator : public PortAllocator {
// creates its own socket factory.
rtc::PacketSocketFactory* socket_factory() { return socket_factory_; }
- const ServerAddresses& stun_servers() const {
- return stun_servers_;
- }
-
- const std::vector<RelayServerConfig>& turn_servers() const {
- return turn_servers_;
- }
- virtual void AddTurnServer(const RelayServerConfig& turn_server) {
- turn_servers_.push_back(turn_server);
- }
-
PortAllocatorSession* CreateSessionInternal(
const std::string& content_name,
int component,
const std::string& ice_ufrag,
const std::string& ice_pwd) override;
+ // Convenience method that adds a TURN server to the configuration.
+ void AddTurnServer(const RelayServerConfig& turn_server);
+
private:
void Construct();
rtc::NetworkManager* network_manager_;
rtc::PacketSocketFactory* socket_factory_;
- ServerAddresses stun_servers_;
- std::vector<RelayServerConfig> turn_servers_;
bool allow_tcp_listen_;
int network_ignore_mask_ = rtc::kDefaultNetworkIgnoreMask;
};
@@ -109,8 +92,14 @@ class BasicPortAllocatorSession : public PortAllocatorSession,
void StopGettingPorts() override;
void ClearGettingPorts() override;
bool IsGettingPorts() override { return running_; }
+ // These will all be cricket::Ports.
+ std::vector<PortInterface*> ReadyPorts() const override;
+ std::vector<Candidate> ReadyCandidates() const override;
+ bool CandidatesAllocationDone() const override;
protected:
+ void UpdateIceParametersInternal() override;
+
// Starts the process of getting the port configurations.
virtual void GetPortConfigurations();
@@ -129,13 +118,11 @@ class BasicPortAllocatorSession : public PortAllocatorSession,
: port_(port), sequence_(seq), state_(STATE_INIT) {
}
- Port* port() { return port_; }
- AllocationSequence* sequence() { return sequence_; }
+ Port* port() const { return port_; }
+ AllocationSequence* sequence() const { return sequence_; }
bool ready() const { return state_ == STATE_READY; }
- bool complete() const {
- // Returns true if candidate allocation has completed one way or another.
- return ((state_ == STATE_COMPLETE) || (state_ == STATE_ERROR));
- }
+ bool complete() const { return state_ == STATE_COMPLETE; }
+ bool error() const { return state_ == STATE_ERROR; }
void set_ready() { ASSERT(state_ == STATE_INIT); state_ = STATE_READY; }
void set_complete() {
@@ -181,11 +168,11 @@ class BasicPortAllocatorSession : public PortAllocatorSession,
PortData* FindPort(Port* port);
void GetNetworks(std::vector<rtc::Network*>* networks);
- bool CheckCandidateFilter(const Candidate& c);
+ bool CheckCandidateFilter(const Candidate& c) const;
BasicPortAllocator* allocator_;
rtc::Thread* network_thread_;
- rtc::scoped_ptr<rtc::PacketSocketFactory> owned_socket_factory_;
+ std::unique_ptr<rtc::PacketSocketFactory> owned_socket_factory_;
rtc::PacketSocketFactory* socket_factory_;
bool allocation_started_;
bool network_manager_started_;
@@ -320,7 +307,7 @@ class AllocationSequence : public rtc::MessageHandler,
State state_;
uint32_t flags_;
ProtocolList protocols_;
- rtc::scoped_ptr<rtc::AsyncPacketSocket> udp_socket_;
+ std::unique_ptr<rtc::AsyncPacketSocket> udp_socket_;
// There will be only one udp port per AllocationSequence.
UDPPort* udp_port_;
std::vector<TurnPort*> turn_ports_;
diff --git a/chromium/third_party/webrtc/p2p/client/portallocator_unittest.cc b/chromium/third_party/webrtc/p2p/client/basicportallocator_unittest.cc
index a76900ab6a8..83d904f1814 100644
--- a/chromium/third_party/webrtc/p2p/client/portallocator_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/client/basicportallocator_unittest.cc
@@ -8,6 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <algorithm>
+#include <memory>
+
#include "webrtc/p2p/base/basicpacketsocketfactory.h"
#include "webrtc/p2p/base/p2pconstants.h"
#include "webrtc/p2p/base/p2ptransportchannel.h"
@@ -40,8 +43,8 @@ static const SocketAddress kClientAddr("11.11.11.11", 0);
static const SocketAddress kLoopbackAddr("127.0.0.1", 0);
static const SocketAddress kPrivateAddr("192.168.1.11", 0);
static const SocketAddress kPrivateAddr2("192.168.1.12", 0);
-static const SocketAddress kClientIPv6Addr(
- "2401:fa00:4:1000:be30:5bff:fee5:c3", 0);
+static const SocketAddress kClientIPv6Addr("2401:fa00:4:1000:be30:5bff:fee5:c3",
+ 0);
static const SocketAddress kClientAddr2("22.22.22.22", 0);
static const SocketAddress kNatUdpAddr("77.77.77.77", rtc::NAT_SERVER_UDP_PORT);
static const SocketAddress kNatTcpAddr("77.77.77.77", rtc::NAT_SERVER_TCP_PORT);
@@ -82,20 +85,25 @@ std::ostream& operator<<(std::ostream& os, const cricket::Candidate& c) {
} // namespace cricket
-class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
+class BasicPortAllocatorTest : public testing::Test,
+ public sigslot::has_slots<> {
public:
- PortAllocatorTest()
+ BasicPortAllocatorTest()
: pss_(new rtc::PhysicalSocketServer),
vss_(new rtc::VirtualSocketServer(pss_.get())),
fss_(new rtc::FirewallSocketServer(vss_.get())),
ss_scope_(fss_.get()),
nat_factory_(vss_.get(), kNatUdpAddr, kNatTcpAddr),
nat_socket_factory_(new rtc::BasicPacketSocketFactory(&nat_factory_)),
- stun_server_(cricket::TestStunServer::Create(Thread::Current(),
- kStunAddr)),
- relay_server_(Thread::Current(), kRelayUdpIntAddr, kRelayUdpExtAddr,
- kRelayTcpIntAddr, kRelayTcpExtAddr,
- kRelaySslTcpIntAddr, kRelaySslTcpExtAddr),
+ stun_server_(
+ cricket::TestStunServer::Create(Thread::Current(), kStunAddr)),
+ relay_server_(Thread::Current(),
+ kRelayUdpIntAddr,
+ kRelayUdpExtAddr,
+ kRelayTcpIntAddr,
+ kRelayTcpExtAddr,
+ kRelaySslTcpIntAddr,
+ kRelaySslTcpExtAddr),
turn_server_(Thread::Current(), kTurnUdpIntAddr, kTurnUdpExtAddr),
candidate_allocation_done_(false) {
cricket::ServerAddresses stun_servers;
@@ -103,9 +111,8 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
// Passing the addresses of GTURN servers will enable GTURN in
// Basicportallocator.
allocator_.reset(new cricket::BasicPortAllocator(
- &network_manager_,
- stun_servers,
- kRelayUdpIntAddr, kRelayTcpIntAddr, kRelaySslTcpIntAddr));
+ &network_manager_, stun_servers, kRelayUdpIntAddr, kRelayTcpIntAddr,
+ kRelaySslTcpIntAddr));
allocator_->set_step_delay(cricket::kMinimumStepDelay);
}
@@ -176,55 +183,65 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
}
bool CreateSession(int component) {
- session_.reset(CreateSession("session", component));
- if (!session_)
+ session_ = CreateSession("session", component);
+ if (!session_) {
return false;
+ }
return true;
}
bool CreateSession(int component, const std::string& content_name) {
- session_.reset(CreateSession("session", content_name, component));
- if (!session_)
+ session_ = CreateSession("session", content_name, component);
+ if (!session_) {
return false;
+ }
return true;
}
- cricket::PortAllocatorSession* CreateSession(
- const std::string& sid, int component) {
+ std::unique_ptr<cricket::PortAllocatorSession> CreateSession(
+ const std::string& sid,
+ int component) {
return CreateSession(sid, kContentName, component);
}
- cricket::PortAllocatorSession* CreateSession(
- const std::string& sid, const std::string& content_name, int component) {
+ std::unique_ptr<cricket::PortAllocatorSession> CreateSession(
+ const std::string& sid,
+ const std::string& content_name,
+ int component) {
return CreateSession(sid, content_name, component, kIceUfrag0, kIcePwd0);
}
- cricket::PortAllocatorSession* CreateSession(
- const std::string& sid, const std::string& content_name, int component,
- const std::string& ice_ufrag, const std::string& ice_pwd) {
- cricket::PortAllocatorSession* session =
- allocator_->CreateSession(
- sid, content_name, component, ice_ufrag, ice_pwd);
+ std::unique_ptr<cricket::PortAllocatorSession> CreateSession(
+ const std::string& sid,
+ const std::string& content_name,
+ int component,
+ const std::string& ice_ufrag,
+ const std::string& ice_pwd) {
+ std::unique_ptr<cricket::PortAllocatorSession> session =
+ allocator_->CreateSession(sid, content_name, component, ice_ufrag,
+ ice_pwd);
session->SignalPortReady.connect(this,
- &PortAllocatorTest::OnPortReady);
- session->SignalCandidatesReady.connect(this,
- &PortAllocatorTest::OnCandidatesReady);
- session->SignalCandidatesAllocationDone.connect(this,
- &PortAllocatorTest::OnCandidatesAllocationDone);
+ &BasicPortAllocatorTest::OnPortReady);
+ session->SignalCandidatesReady.connect(
+ this, &BasicPortAllocatorTest::OnCandidatesReady);
+ session->SignalCandidatesAllocationDone.connect(
+ this, &BasicPortAllocatorTest::OnCandidatesAllocationDone);
return session;
}
static bool CheckCandidate(const cricket::Candidate& c,
- int component, const std::string& type,
+ int component,
+ const std::string& type,
const std::string& proto,
const SocketAddress& addr) {
return (c.component() == component && c.type() == type &&
- c.protocol() == proto && c.address().ipaddr() == addr.ipaddr() &&
- ((addr.port() == 0 && (c.address().port() != 0)) ||
- (c.address().port() == addr.port())));
+ c.protocol() == proto && c.address().ipaddr() == addr.ipaddr() &&
+ ((addr.port() == 0 && (c.address().port() != 0)) ||
+ (c.address().port() == addr.port())));
}
static bool CheckPort(const rtc::SocketAddress& addr,
- int min_port, int max_port) {
+ int min_port,
+ int max_port) {
return (addr.port() >= min_port && addr.port() <= max_port);
}
@@ -235,6 +252,7 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_FALSE(candidate_allocation_done_);
candidate_allocation_done_ = true;
}
+ EXPECT_TRUE(session->CandidatesAllocationDone());
}
// Check if all ports allocated have send-buffer size |expected|. If
@@ -245,11 +263,10 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
int send_buffer_size;
if (expected == -1) {
EXPECT_EQ(SOCKET_ERROR,
- (*it)->GetOption(rtc::Socket::OPT_SNDBUF,
- &send_buffer_size));
+ (*it)->GetOption(rtc::Socket::OPT_SNDBUF, &send_buffer_size));
} else {
- EXPECT_EQ(0, (*it)->GetOption(rtc::Socket::OPT_SNDBUF,
- &send_buffer_size));
+ EXPECT_EQ(0,
+ (*it)->GetOption(rtc::Socket::OPT_SNDBUF, &send_buffer_size));
ASSERT_EQ(expected, send_buffer_size);
}
}
@@ -320,14 +337,16 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
}
protected:
- cricket::BasicPortAllocator& allocator() {
- return *allocator_;
- }
+ cricket::BasicPortAllocator& allocator() { return *allocator_; }
void OnPortReady(cricket::PortAllocatorSession* ses,
cricket::PortInterface* port) {
LOG(LS_INFO) << "OnPortReady: " << port->ToString();
ports_.push_back(port);
+ // Make sure the new port is added to ReadyPorts.
+ auto ready_ports = ses->ReadyPorts();
+ EXPECT_NE(ready_ports.end(),
+ std::find(ready_ports.begin(), ready_ports.end(), port));
}
void OnCandidatesReady(cricket::PortAllocatorSession* ses,
const std::vector<cricket::Candidate>& candidates) {
@@ -335,6 +354,13 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
LOG(LS_INFO) << "OnCandidatesReady: " << candidates[i].ToString();
candidates_.push_back(candidates[i]);
}
+ // Make sure the new candidates are added to Candidates.
+ auto ses_candidates = ses->ReadyCandidates();
+ for (const cricket::Candidate& candidate : candidates) {
+ EXPECT_NE(
+ ses_candidates.end(),
+ std::find(ses_candidates.begin(), ses_candidates.end(), candidate));
+ }
}
bool HasRelayAddress(const cricket::ProtocolAddress& proto_addr) {
@@ -342,7 +368,7 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
cricket::RelayServerConfig server_config = allocator_->turn_servers()[i];
cricket::PortList::const_iterator relay_port;
for (relay_port = server_config.ports.begin();
- relay_port != server_config.ports.end(); ++relay_port) {
+ relay_port != server_config.ports.end(); ++relay_port) {
if (proto_addr.address == relay_port->address &&
proto_addr.proto == relay_port->proto)
return true;
@@ -370,26 +396,26 @@ class PortAllocatorTest : public testing::Test, public sigslot::has_slots<> {
allocator().set_step_delay(cricket::kMinimumStepDelay);
}
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> vss_;
- rtc::scoped_ptr<rtc::FirewallSocketServer> fss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> vss_;
+ std::unique_ptr<rtc::FirewallSocketServer> fss_;
rtc::SocketServerScope ss_scope_;
- rtc::scoped_ptr<rtc::NATServer> nat_server_;
+ std::unique_ptr<rtc::NATServer> nat_server_;
rtc::NATSocketFactory nat_factory_;
- rtc::scoped_ptr<rtc::BasicPacketSocketFactory> nat_socket_factory_;
- rtc::scoped_ptr<cricket::TestStunServer> stun_server_;
+ std::unique_ptr<rtc::BasicPacketSocketFactory> nat_socket_factory_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_;
cricket::TestRelayServer relay_server_;
cricket::TestTurnServer turn_server_;
rtc::FakeNetworkManager network_manager_;
- rtc::scoped_ptr<cricket::BasicPortAllocator> allocator_;
- rtc::scoped_ptr<cricket::PortAllocatorSession> session_;
+ std::unique_ptr<cricket::BasicPortAllocator> allocator_;
+ std::unique_ptr<cricket::PortAllocatorSession> session_;
std::vector<cricket::PortInterface*> ports_;
std::vector<cricket::Candidate> candidates_;
bool candidate_allocation_done_;
};
// Tests that we can init the port allocator and create a session.
-TEST_F(PortAllocatorTest, TestBasic) {
+TEST_F(BasicPortAllocatorTest, TestBasic) {
EXPECT_EQ(&network_manager_, allocator().network_manager());
EXPECT_EQ(kStunAddr, *allocator().stun_servers().begin());
ASSERT_EQ(1u, allocator().turn_servers().size());
@@ -397,17 +423,18 @@ TEST_F(PortAllocatorTest, TestBasic) {
// Empty relay credentials are used for GTURN.
EXPECT_TRUE(allocator().turn_servers()[0].credentials.username.empty());
EXPECT_TRUE(allocator().turn_servers()[0].credentials.password.empty());
- EXPECT_TRUE(HasRelayAddress(cricket::ProtocolAddress(
- kRelayUdpIntAddr, cricket::PROTO_UDP)));
- EXPECT_TRUE(HasRelayAddress(cricket::ProtocolAddress(
- kRelayTcpIntAddr, cricket::PROTO_TCP)));
- EXPECT_TRUE(HasRelayAddress(cricket::ProtocolAddress(
- kRelaySslTcpIntAddr, cricket::PROTO_SSLTCP)));
+ EXPECT_TRUE(HasRelayAddress(
+ cricket::ProtocolAddress(kRelayUdpIntAddr, cricket::PROTO_UDP)));
+ EXPECT_TRUE(HasRelayAddress(
+ cricket::ProtocolAddress(kRelayTcpIntAddr, cricket::PROTO_TCP)));
+ EXPECT_TRUE(HasRelayAddress(
+ cricket::ProtocolAddress(kRelaySslTcpIntAddr, cricket::PROTO_SSLTCP)));
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
+ EXPECT_FALSE(session_->CandidatesAllocationDone());
}
// Tests that our network filtering works properly.
-TEST_F(PortAllocatorTest, TestIgnoreOnlyLoopbackNetworkByDefault) {
+TEST_F(BasicPortAllocatorTest, TestIgnoreOnlyLoopbackNetworkByDefault) {
AddInterface(SocketAddress(IPAddress(0x12345600U), 0), "test_eth0",
rtc::ADAPTER_TYPE_ETHERNET);
AddInterface(SocketAddress(IPAddress(0x12345601U), 0), "test_wlan0",
@@ -430,7 +457,7 @@ TEST_F(PortAllocatorTest, TestIgnoreOnlyLoopbackNetworkByDefault) {
}
}
-TEST_F(PortAllocatorTest, TestIgnoreNetworksAccordingToIgnoreMask) {
+TEST_F(BasicPortAllocatorTest, TestIgnoreNetworksAccordingToIgnoreMask) {
AddInterface(SocketAddress(IPAddress(0x12345600U), 0), "test_eth0",
rtc::ADAPTER_TYPE_ETHERNET);
AddInterface(SocketAddress(IPAddress(0x12345601U), 0), "test_wlan0",
@@ -451,7 +478,7 @@ TEST_F(PortAllocatorTest, TestIgnoreNetworksAccordingToIgnoreMask) {
}
// Tests that we allocator session not trying to allocate ports for every 250ms.
-TEST_F(PortAllocatorTest, TestNoNetworkInterface) {
+TEST_F(BasicPortAllocatorTest, TestNoNetworkInterface) {
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
// Waiting for one second to make sure BasicPortAllocatorSession has not
@@ -464,7 +491,7 @@ TEST_F(PortAllocatorTest, TestNoNetworkInterface) {
}
// Test that we could use loopback interface as host candidate.
-TEST_F(PortAllocatorTest, TestLoopbackNetworkInterface) {
+TEST_F(BasicPortAllocatorTest, TestLoopbackNetworkInterface) {
AddInterface(kLoopbackAddr, "test_loopback", rtc::ADAPTER_TYPE_LOOPBACK);
allocator_->SetNetworkIgnoreMask(0);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -477,34 +504,40 @@ TEST_F(PortAllocatorTest, TestLoopbackNetworkInterface) {
}
// Tests that we can get all the desired addresses successfully.
-TEST_F(PortAllocatorTest, TestGetAllPortsWithMinimumStepDelay) {
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsWithMinimumStepDelay) {
AddInterface(kClientAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
ASSERT_EQ_WAIT(7U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_EQ(4U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[2],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp", kRelayUdpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ kRelayUdpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[3],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp", kRelayUdpExtAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ kRelayUdpExtAddr);
EXPECT_PRED5(CheckCandidate, candidates_[4],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "tcp", kRelayTcpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "tcp",
+ kRelayTcpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[5],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[6],
- cricket::ICE_CANDIDATE_COMPONENT_RTP,
- "relay", "ssltcp", kRelaySslTcpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "ssltcp",
+ kRelaySslTcpIntAddr);
EXPECT_TRUE(candidate_allocation_done_);
}
// Test that when the same network interface is brought down and up, the
// port allocator session will restart a new allocation sequence if
// it is not stopped.
-TEST_F(PortAllocatorTest, TestSameNetworkDownAndUpWhenSessionNotStopped) {
+TEST_F(BasicPortAllocatorTest, TestSameNetworkDownAndUpWhenSessionNotStopped) {
std::string if_name("test_net0");
AddInterface(kClientAddr, if_name);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -532,7 +565,7 @@ TEST_F(PortAllocatorTest, TestSameNetworkDownAndUpWhenSessionNotStopped) {
// Test that when the same network interface is brought down and up, the
// port allocator session will not restart a new allocation sequence if
// it is stopped.
-TEST_F(PortAllocatorTest, TestSameNetworkDownAndUpWhenSessionStopped) {
+TEST_F(BasicPortAllocatorTest, TestSameNetworkDownAndUpWhenSessionStopped) {
std::string if_name("test_net0");
AddInterface(kClientAddr, if_name);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -557,7 +590,7 @@ TEST_F(PortAllocatorTest, TestSameNetworkDownAndUpWhenSessionStopped) {
}
// Verify candidates with default step delay of 1sec.
-TEST_F(PortAllocatorTest, TestGetAllPortsWithOneSecondStepDelay) {
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsWithOneSecondStepDelay) {
AddInterface(kClientAddr);
allocator_->set_step_delay(cricket::kDefaultStepDelay);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -567,29 +600,33 @@ TEST_F(PortAllocatorTest, TestGetAllPortsWithOneSecondStepDelay) {
ASSERT_EQ_WAIT(4U, candidates_.size(), 2000);
EXPECT_EQ(3U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[2],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp", kRelayUdpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ kRelayUdpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[3],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp", kRelayUdpExtAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ kRelayUdpExtAddr);
ASSERT_EQ_WAIT(6U, candidates_.size(), 1500);
EXPECT_PRED5(CheckCandidate, candidates_[4],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "tcp", kRelayTcpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "tcp",
+ kRelayTcpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[5],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
+ kClientAddr);
EXPECT_EQ(4U, ports_.size());
ASSERT_EQ_WAIT(7U, candidates_.size(), 2000);
EXPECT_PRED5(CheckCandidate, candidates_[6],
- cricket::ICE_CANDIDATE_COMPONENT_RTP,
- "relay", "ssltcp", kRelaySslTcpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "ssltcp",
+ kRelaySslTcpIntAddr);
EXPECT_EQ(4U, ports_.size());
EXPECT_TRUE(candidate_allocation_done_);
// If we Stop gathering now, we shouldn't get a second "done" callback.
session_->StopGettingPorts();
}
-TEST_F(PortAllocatorTest, TestSetupVideoRtpPortsWithNormalSendBuffers) {
+TEST_F(BasicPortAllocatorTest, TestSetupVideoRtpPortsWithNormalSendBuffers) {
AddInterface(kClientAddr);
- EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP,
- cricket::CN_VIDEO));
+ EXPECT_TRUE(
+ CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP, cricket::CN_VIDEO));
session_->StartGettingPorts();
ASSERT_EQ_WAIT(7U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_TRUE(candidate_allocation_done_);
@@ -601,7 +638,7 @@ TEST_F(PortAllocatorTest, TestSetupVideoRtpPortsWithNormalSendBuffers) {
}
// Tests that we can get callback after StopGetAllPorts.
-TEST_F(PortAllocatorTest, TestStopGetAllPorts) {
+TEST_F(BasicPortAllocatorTest, TestStopGetAllPorts) {
AddInterface(kClientAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
@@ -614,7 +651,7 @@ TEST_F(PortAllocatorTest, TestStopGetAllPorts) {
// Test that we restrict client ports appropriately when a port range is set.
// We check the candidates for udp/stun/tcp ports, and the from address
// for relay ports.
-TEST_F(PortAllocatorTest, TestGetAllPortsPortRange) {
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsPortRange) {
AddInterface(kClientAddr);
// Check that an invalid port range fails.
EXPECT_FALSE(SetPortRange(kMaxPort, kMinPort));
@@ -631,15 +668,15 @@ TEST_F(PortAllocatorTest, TestGetAllPortsPortRange) {
// Check the port number for the STUN port object.
EXPECT_PRED3(CheckPort, candidates_[1].address(), kMinPort, kMaxPort);
// Check the port number used to connect to the relay server.
- EXPECT_PRED3(CheckPort, relay_server_.GetConnection(0).source(),
- kMinPort, kMaxPort);
+ EXPECT_PRED3(CheckPort, relay_server_.GetConnection(0).source(), kMinPort,
+ kMaxPort);
// Check the port number for the TCP port object.
EXPECT_PRED3(CheckPort, candidates_[5].address(), kMinPort, kMaxPort);
EXPECT_TRUE(candidate_allocation_done_);
}
// Test that we don't crash or malfunction if we have no network adapters.
-TEST_F(PortAllocatorTest, TestGetAllPortsNoAdapters) {
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoAdapters) {
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
rtc::Thread::Current()->ProcessMessages(100);
@@ -650,7 +687,7 @@ TEST_F(PortAllocatorTest, TestGetAllPortsNoAdapters) {
// Test that when enumeration is disabled, we should not have any ports when
// candidate_filter() is set to CF_RELAY and no relay is specified.
-TEST_F(PortAllocatorTest,
+TEST_F(BasicPortAllocatorTest,
TestDisableAdapterEnumerationWithoutNatRelayTransportOnly) {
ResetWithStunServerNoNat(kStunAddr);
allocator().set_candidate_filter(cricket::CF_RELAY);
@@ -662,7 +699,7 @@ TEST_F(PortAllocatorTest,
// Test that even with multiple interfaces, the result should still be a single
// default private, one STUN and one TURN candidate since we bind to any address
// (i.e. all 0s).
-TEST_F(PortAllocatorTest,
+TEST_F(BasicPortAllocatorTest,
TestDisableAdapterEnumerationBehindNatMultipleInterfaces) {
AddInterface(kPrivateAddr);
AddInterface(kPrivateAddr2);
@@ -685,7 +722,7 @@ TEST_F(PortAllocatorTest,
// Test that we should get a default private, STUN, TURN/UDP and TURN/TCP
// candidates when both TURN/UDP and TURN/TCP servers are specified.
-TEST_F(PortAllocatorTest, TestDisableAdapterEnumerationBehindNatWithTcp) {
+TEST_F(BasicPortAllocatorTest, TestDisableAdapterEnumerationBehindNatWithTcp) {
turn_server_.AddInternalSocket(kTurnTcpIntAddr, cricket::PROTO_TCP);
AddInterface(kPrivateAddr);
ResetWithStunServerAndNat(kStunAddr);
@@ -699,7 +736,8 @@ TEST_F(PortAllocatorTest, TestDisableAdapterEnumerationBehindNatWithTcp) {
// Test that when adapter enumeration is disabled, for endpoints without
// STUN/TURN specified, a default private candidate is still generated.
-TEST_F(PortAllocatorTest, TestDisableAdapterEnumerationWithoutNatOrServers) {
+TEST_F(BasicPortAllocatorTest,
+ TestDisableAdapterEnumerationWithoutNatOrServers) {
ResetWithNoServersOrNat();
// Expect to see 2 ports: STUN and TCP ports, one default private candidate.
CheckDisableAdapterEnumeration(2U, kPrivateAddr.ipaddr(), rtc::IPAddress(),
@@ -709,7 +747,7 @@ TEST_F(PortAllocatorTest, TestDisableAdapterEnumerationWithoutNatOrServers) {
// Test that when adapter enumeration is disabled, with
// PORTALLOCATOR_DISABLE_LOCALHOST_CANDIDATE specified, for endpoints not behind
// a NAT, there is no local candidate.
-TEST_F(PortAllocatorTest,
+TEST_F(BasicPortAllocatorTest,
TestDisableAdapterEnumerationWithoutNatLocalhostCandidateDisabled) {
ResetWithStunServerNoNat(kStunAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -726,7 +764,7 @@ TEST_F(PortAllocatorTest,
// (kClientAddr) which was discovered when sending STUN requests, will become
// the srflx addresses.
TEST_F(
- PortAllocatorTest,
+ BasicPortAllocatorTest,
TestDisableAdapterEnumerationWithoutNatLocalhostCandidateDisabledWithDifferentDefaultRoute) {
ResetWithStunServerNoNat(kStunAddr);
AddInterfaceAsDefaultRoute(kClientAddr);
@@ -741,7 +779,7 @@ TEST_F(
// Test that when adapter enumeration is disabled, with
// PORTALLOCATOR_DISABLE_LOCALHOST_CANDIDATE specified, for endpoints behind a
// NAT, there is only one STUN candidate.
-TEST_F(PortAllocatorTest,
+TEST_F(BasicPortAllocatorTest,
TestDisableAdapterEnumerationWithNatLocalhostCandidateDisabled) {
ResetWithStunServerAndNat(kStunAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -753,7 +791,7 @@ TEST_F(PortAllocatorTest,
// Test that we disable relay over UDP, and only TCP is used when connecting to
// the relay server.
-TEST_F(PortAllocatorTest, TestDisableUdpTurn) {
+TEST_F(BasicPortAllocatorTest, TestDisableUdpTurn) {
turn_server_.AddInternalSocket(kTurnTcpIntAddr, cricket::PROTO_TCP);
AddInterface(kClientAddr);
ResetWithStunServerAndNat(kStunAddr);
@@ -787,7 +825,7 @@ TEST_F(PortAllocatorTest, TestDisableUdpTurn) {
// Test that we can get OnCandidatesAllocationDone callback when all the ports
// are disabled.
-TEST_F(PortAllocatorTest, TestDisableAllPorts) {
+TEST_F(BasicPortAllocatorTest, TestDisableAllPorts) {
AddInterface(kClientAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->set_flags(cricket::PORTALLOCATOR_DISABLE_UDP |
@@ -801,7 +839,7 @@ TEST_F(PortAllocatorTest, TestDisableAllPorts) {
}
// Test that we don't crash or malfunction if we can't create UDP sockets.
-TEST_F(PortAllocatorTest, TestGetAllPortsNoUdpSockets) {
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoUdpSockets) {
AddInterface(kClientAddr);
fss_->set_udp_sockets_enabled(false);
EXPECT_TRUE(CreateSession(1));
@@ -809,25 +847,29 @@ TEST_F(PortAllocatorTest, TestGetAllPortsNoUdpSockets) {
ASSERT_EQ_WAIT(5U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_EQ(2U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp", kRelayUdpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ kRelayUdpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp", kRelayUdpExtAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ kRelayUdpExtAddr);
EXPECT_PRED5(CheckCandidate, candidates_[2],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "tcp", kRelayTcpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "tcp",
+ kRelayTcpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[3],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[4],
- cricket::ICE_CANDIDATE_COMPONENT_RTP,
- "relay", "ssltcp", kRelaySslTcpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "ssltcp",
+ kRelaySslTcpIntAddr);
EXPECT_TRUE(candidate_allocation_done_);
}
-#endif // if !defined(ADDRESS_SANITIZER)
+#endif // if !defined(ADDRESS_SANITIZER)
// Test that we don't crash or malfunction if we can't create UDP sockets or
// listen on TCP sockets. We still give out a local TCP address, since
// apparently this is needed for the remote side to accept our connection.
-TEST_F(PortAllocatorTest, TestGetAllPortsNoUdpSocketsNoTcpListen) {
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoUdpSocketsNoTcpListen) {
AddInterface(kClientAddr);
fss_->set_udp_sockets_enabled(false);
fss_->set_tcp_listen_enabled(false);
@@ -835,35 +877,34 @@ TEST_F(PortAllocatorTest, TestGetAllPortsNoUdpSocketsNoTcpListen) {
session_->StartGettingPorts();
ASSERT_EQ_WAIT(5U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_EQ(2U, ports_.size());
- EXPECT_PRED5(CheckCandidate, candidates_[0],
- 1, "relay", "udp", kRelayUdpIntAddr);
- EXPECT_PRED5(CheckCandidate, candidates_[1],
- 1, "relay", "udp", kRelayUdpExtAddr);
- EXPECT_PRED5(CheckCandidate, candidates_[2],
- 1, "relay", "tcp", kRelayTcpIntAddr);
- EXPECT_PRED5(CheckCandidate, candidates_[3],
- 1, "local", "tcp", kClientAddr);
- EXPECT_PRED5(CheckCandidate, candidates_[4],
- 1, "relay", "ssltcp", kRelaySslTcpIntAddr);
+ EXPECT_PRED5(CheckCandidate, candidates_[0], 1, "relay", "udp",
+ kRelayUdpIntAddr);
+ EXPECT_PRED5(CheckCandidate, candidates_[1], 1, "relay", "udp",
+ kRelayUdpExtAddr);
+ EXPECT_PRED5(CheckCandidate, candidates_[2], 1, "relay", "tcp",
+ kRelayTcpIntAddr);
+ EXPECT_PRED5(CheckCandidate, candidates_[3], 1, "local", "tcp", kClientAddr);
+ EXPECT_PRED5(CheckCandidate, candidates_[4], 1, "relay", "ssltcp",
+ kRelaySslTcpIntAddr);
EXPECT_TRUE(candidate_allocation_done_);
}
// Test that we don't crash or malfunction if we can't create any sockets.
-// TODO: Find a way to exit early here.
-TEST_F(PortAllocatorTest, TestGetAllPortsNoSockets) {
+// TODO(deadbeef): Find a way to exit early here.
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoSockets) {
AddInterface(kClientAddr);
fss_->set_tcp_sockets_enabled(false);
fss_->set_udp_sockets_enabled(false);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
WAIT(candidates_.size() > 0, 2000);
- // TODO - Check candidate_allocation_done signal.
+ // TODO(deadbeef): Check candidate_allocation_done signal.
// In case of Relay, ports creation will succeed but sockets will fail.
// There is no error reporting from RelayEntry to handle this failure.
}
// Testing STUN timeout.
-TEST_F(PortAllocatorTest, TestGetAllPortsNoUdpAllowed) {
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoUdpAllowed) {
fss_->AddRule(false, rtc::FP_UDP, rtc::FD_ANY, kClientAddr);
AddInterface(kClientAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -871,27 +912,32 @@ TEST_F(PortAllocatorTest, TestGetAllPortsNoUdpAllowed) {
EXPECT_EQ_WAIT(2U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_EQ(2U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
+ kClientAddr);
// RelayPort connection timeout is 3sec. TCP connection with RelayServer
// will be tried after 3 seconds.
EXPECT_EQ_WAIT(6U, candidates_.size(), 4000);
EXPECT_EQ(3U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[2],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp", kRelayUdpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ kRelayUdpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[3],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "tcp", kRelayTcpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "tcp",
+ kRelayTcpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[4],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "ssltcp",
- kRelaySslTcpIntAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "ssltcp",
+ kRelaySslTcpIntAddr);
EXPECT_PRED5(CheckCandidate, candidates_[5],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp", kRelayUdpExtAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ kRelayUdpExtAddr);
// Stun Timeout is 9sec.
EXPECT_TRUE_WAIT(candidate_allocation_done_, 9000);
}
-TEST_F(PortAllocatorTest, TestCandidatePriorityOfMultipleInterfaces) {
+TEST_F(BasicPortAllocatorTest, TestCandidatePriorityOfMultipleInterfaces) {
AddInterface(kClientAddr);
AddInterface(kClientAddr2);
// Allocating only host UDP ports. This is done purely for testing
@@ -909,14 +955,14 @@ TEST_F(PortAllocatorTest, TestCandidatePriorityOfMultipleInterfaces) {
}
// Test to verify ICE restart process.
-TEST_F(PortAllocatorTest, TestGetAllPortsRestarts) {
+TEST_F(BasicPortAllocatorTest, TestGetAllPortsRestarts) {
AddInterface(kClientAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
EXPECT_EQ_WAIT(7U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_EQ(4U, ports_.size());
EXPECT_TRUE(candidate_allocation_done_);
- // TODO - Extend this to verify ICE restart.
+ // TODO(deadbeef): Extend this to verify ICE restart.
}
// Test ICE candidate filter mechanism with options Relay/Host/Reflexive.
@@ -924,7 +970,7 @@ TEST_F(PortAllocatorTest, TestGetAllPortsRestarts) {
// relay (i.e. IceTransportsType is relay), the raddr is an empty
// address with the correct family. This is to prevent any local
// reflective address leakage in the sdp line.
-TEST_F(PortAllocatorTest, TestCandidateFilterWithRelayOnly) {
+TEST_F(BasicPortAllocatorTest, TestCandidateFilterWithRelayOnly) {
AddInterface(kClientAddr);
// GTURN is not configured here.
ResetWithTurnServersNoNat(kTurnUdpIntAddr, rtc::SocketAddress());
@@ -932,11 +978,8 @@ TEST_F(PortAllocatorTest, TestCandidateFilterWithRelayOnly) {
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
- EXPECT_PRED5(CheckCandidate,
- candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP,
- "relay",
- "udp",
+ EXPECT_PRED5(CheckCandidate, candidates_[0],
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
EXPECT_EQ(1U, candidates_.size());
@@ -949,22 +992,22 @@ TEST_F(PortAllocatorTest, TestCandidateFilterWithRelayOnly) {
}
}
-TEST_F(PortAllocatorTest, TestCandidateFilterWithHostOnly) {
+TEST_F(BasicPortAllocatorTest, TestCandidateFilterWithHostOnly) {
AddInterface(kClientAddr);
allocator().set_flags(cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET);
allocator().set_candidate_filter(cricket::CF_HOST);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
- EXPECT_EQ(2U, candidates_.size()); // Host UDP/TCP candidates only.
- EXPECT_EQ(2U, ports_.size()); // UDP/TCP ports only.
+ EXPECT_EQ(2U, candidates_.size()); // Host UDP/TCP candidates only.
+ EXPECT_EQ(2U, ports_.size()); // UDP/TCP ports only.
for (size_t i = 0; i < candidates_.size(); ++i) {
EXPECT_EQ(std::string(cricket::LOCAL_PORT_TYPE), candidates_[i].type());
}
}
// Host is behind the NAT.
-TEST_F(PortAllocatorTest, TestCandidateFilterWithReflexiveOnly) {
+TEST_F(BasicPortAllocatorTest, TestCandidateFilterWithReflexiveOnly) {
AddInterface(kPrivateAddr);
ResetWithStunServerAndNat(kStunAddr);
@@ -975,8 +1018,8 @@ TEST_F(PortAllocatorTest, TestCandidateFilterWithReflexiveOnly) {
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
// Host is behind NAT, no private address will be exposed. Hence only UDP
// port with STUN candidate will be sent outside.
- EXPECT_EQ(1U, candidates_.size()); // Only STUN candidate.
- EXPECT_EQ(1U, ports_.size()); // Only UDP port will be in ready state.
+ EXPECT_EQ(1U, candidates_.size()); // Only STUN candidate.
+ EXPECT_EQ(1U, ports_.size()); // Only UDP port will be in ready state.
for (size_t i = 0; i < candidates_.size(); ++i) {
EXPECT_EQ(std::string(cricket::STUN_PORT_TYPE), candidates_[i].type());
EXPECT_EQ(
@@ -986,7 +1029,7 @@ TEST_F(PortAllocatorTest, TestCandidateFilterWithReflexiveOnly) {
}
// Host is not behind the NAT.
-TEST_F(PortAllocatorTest, TestCandidateFilterWithReflexiveOnlyAndNoNAT) {
+TEST_F(BasicPortAllocatorTest, TestCandidateFilterWithReflexiveOnlyAndNoNAT) {
AddInterface(kClientAddr);
allocator().set_flags(cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET);
allocator().set_candidate_filter(cricket::CF_REFLEXIVE);
@@ -994,7 +1037,7 @@ TEST_F(PortAllocatorTest, TestCandidateFilterWithReflexiveOnlyAndNoNAT) {
session_->StartGettingPorts();
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
// Host has a public address, both UDP and TCP candidates will be exposed.
- EXPECT_EQ(2U, candidates_.size()); // Local UDP + TCP candidate.
+ EXPECT_EQ(2U, candidates_.size()); // Local UDP + TCP candidate.
EXPECT_EQ(2U, ports_.size()); // UDP and TCP ports will be in ready state.
for (size_t i = 0; i < candidates_.size(); ++i) {
EXPECT_EQ(std::string(cricket::LOCAL_PORT_TYPE), candidates_[i].type());
@@ -1002,17 +1045,20 @@ TEST_F(PortAllocatorTest, TestCandidateFilterWithReflexiveOnlyAndNoNAT) {
}
// Test that we get the same ufrag and pwd for all candidates.
-TEST_F(PortAllocatorTest, TestEnableSharedUfrag) {
+TEST_F(BasicPortAllocatorTest, TestEnableSharedUfrag) {
AddInterface(kClientAddr);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
session_->StartGettingPorts();
ASSERT_EQ_WAIT(7U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[5],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
+ kClientAddr);
EXPECT_EQ(4U, ports_.size());
EXPECT_EQ(kIceUfrag0, candidates_[0].username());
EXPECT_EQ(kIceUfrag0, candidates_[1].username());
@@ -1026,7 +1072,7 @@ TEST_F(PortAllocatorTest, TestEnableSharedUfrag) {
// is allocated for udp and stun. Also verify there is only one candidate
// (local) if stun candidate is same as local candidate, which will be the case
// in a public network like the below test.
-TEST_F(PortAllocatorTest, TestSharedSocketWithoutNat) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketWithoutNat) {
AddInterface(kClientAddr);
allocator_->set_flags(allocator().flags() |
cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET);
@@ -1035,14 +1081,15 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithoutNat) {
ASSERT_EQ_WAIT(6U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_EQ(3U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
}
// Test that when PORTALLOCATOR_ENABLE_SHARED_SOCKET is enabled only one port
// is allocated for udp and stun. In this test we should expect both stun and
// local candidates as client behind a nat.
-TEST_F(PortAllocatorTest, TestSharedSocketWithNat) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNat) {
AddInterface(kClientAddr);
ResetWithStunServerAndNat(kStunAddr);
@@ -1053,16 +1100,17 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithNat) {
ASSERT_EQ_WAIT(3U, candidates_.size(), kDefaultAllocationTimeout);
ASSERT_EQ(2U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
- rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0));
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
+ rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0));
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
EXPECT_EQ(3U, candidates_.size());
}
// Test TURN port in shared socket mode with UDP and TCP TURN server addresses.
-TEST_F(PortAllocatorTest, TestSharedSocketWithoutNatUsingTurn) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketWithoutNatUsingTurn) {
turn_server_.AddInternalSocket(kTurnTcpIntAddr, cricket::PROTO_TCP);
AddInterface(kClientAddr);
allocator_.reset(new cricket::BasicPortAllocator(&network_manager_));
@@ -1080,20 +1128,21 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithoutNatUsingTurn) {
ASSERT_EQ_WAIT(3U, candidates_.size(), kDefaultAllocationTimeout);
ASSERT_EQ(3U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
- rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
EXPECT_PRED5(CheckCandidate, candidates_[2],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
- rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
EXPECT_EQ(3U, candidates_.size());
}
// Testing DNS resolve for the TURN server, this will test AllocationSequence
// handling the unresolved address signal from TurnPort.
-TEST_F(PortAllocatorTest, TestSharedSocketWithServerAddressResolve) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketWithServerAddressResolve) {
turn_server_.AddInternalSocket(rtc::SocketAddress("127.0.0.1", 3478),
cricket::PROTO_UDP);
AddInterface(kClientAddr);
@@ -1119,7 +1168,7 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithServerAddressResolve) {
// Test that when PORTALLOCATOR_ENABLE_SHARED_SOCKET is enabled only one port
// is allocated for udp/stun/turn. In this test we should expect all local,
// stun and turn candidates.
-TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurn) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurn) {
AddInterface(kClientAddr);
ResetWithStunServerAndNat(kStunAddr);
@@ -1135,13 +1184,14 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurn) {
ASSERT_EQ_WAIT(3U, candidates_.size(), kDefaultAllocationTimeout);
ASSERT_EQ(2U, ports_.size());
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
- rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0));
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
+ rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0));
EXPECT_PRED5(CheckCandidate, candidates_[2],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
- rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
EXPECT_EQ(3U, candidates_.size());
// Local port will be created first and then TURN port.
@@ -1152,7 +1202,7 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurn) {
// Test that when PORTALLOCATOR_ENABLE_SHARED_SOCKET is enabled and the TURN
// server is also used as the STUN server, we should get 'local', 'stun', and
// 'relay' candidates.
-TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurnAsStun) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnAsStun) {
AddInterface(kClientAddr);
// Use an empty SocketAddress to add a NAT without STUN server.
ResetWithStunServerAndNat(SocketAddress());
@@ -1172,13 +1222,14 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurnAsStun) {
ASSERT_EQ_WAIT(3U, candidates_.size(), kDefaultAllocationTimeout);
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
- rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0));
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "stun", "udp",
+ rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0));
EXPECT_PRED5(CheckCandidate, candidates_[2],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
- rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "relay", "udp",
+ rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0));
EXPECT_EQ(candidates_[2].related_address(), candidates_[1].address());
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
@@ -1191,7 +1242,7 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurnAsStun) {
// Test that when only a TCP TURN server is available, we do NOT use it as
// a UDP STUN server, as this could leak our IP address. Thus we should only
// expect two ports, a UDPPort and TurnPort.
-TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurnTcpOnly) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnTcpOnly) {
turn_server_.AddInternalSocket(kTurnTcpIntAddr, cricket::PROTO_TCP);
AddInterface(kClientAddr);
ResetWithStunServerAndNat(rtc::SocketAddress());
@@ -1223,7 +1274,7 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurnTcpOnly) {
// 'relay' candidates.
// TODO(deadbeef): Remove this test when support for non-shared socket mode
// is removed.
-TEST_F(PortAllocatorTest, TestNonSharedSocketWithNatUsingTurnAsStun) {
+TEST_F(BasicPortAllocatorTest, TestNonSharedSocketWithNatUsingTurnAsStun) {
AddInterface(kClientAddr);
// Use an empty SocketAddress to add a NAT without STUN server.
ResetWithStunServerAndNat(SocketAddress());
@@ -1259,7 +1310,7 @@ TEST_F(PortAllocatorTest, TestNonSharedSocketWithNatUsingTurnAsStun) {
// Test that even when both a STUN and TURN server are configured, the TURN
// server is used as a STUN server and we get a 'stun' candidate.
-TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurnAndStun) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnAndStun) {
AddInterface(kClientAddr);
// Configure with STUN server but destroy it, so we can ensure that it's
// the TURN server actually being used as a STUN server.
@@ -1293,7 +1344,7 @@ TEST_F(PortAllocatorTest, TestSharedSocketWithNatUsingTurnAndStun) {
// This test verifies when PORTALLOCATOR_ENABLE_SHARED_SOCKET flag is enabled
// and fail to generate STUN candidate, local UDP candidate is generated
// properly.
-TEST_F(PortAllocatorTest, TestSharedSocketNoUdpAllowed) {
+TEST_F(BasicPortAllocatorTest, TestSharedSocketNoUdpAllowed) {
allocator().set_flags(allocator().flags() |
cricket::PORTALLOCATOR_DISABLE_RELAY |
cricket::PORTALLOCATOR_DISABLE_TCP |
@@ -1305,7 +1356,8 @@ TEST_F(PortAllocatorTest, TestSharedSocketNoUdpAllowed) {
ASSERT_EQ_WAIT(1U, ports_.size(), kDefaultAllocationTimeout);
EXPECT_EQ(1U, candidates_.size());
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp", kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
// STUN timeout is 9sec. We need to wait to get candidate done signal.
EXPECT_TRUE_WAIT(candidate_allocation_done_, 10000);
EXPECT_EQ(1U, candidates_.size());
@@ -1314,7 +1366,7 @@ TEST_F(PortAllocatorTest, TestSharedSocketNoUdpAllowed) {
// Test that when the NetworkManager doesn't have permission to enumerate
// adapters, the PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION is specified
// automatically.
-TEST_F(PortAllocatorTest, TestNetworkPermissionBlocked) {
+TEST_F(BasicPortAllocatorTest, TestNetworkPermissionBlocked) {
network_manager_.set_default_local_addresses(kPrivateAddr.ipaddr(),
rtc::IPAddress());
network_manager_.set_enumeration_permission(
@@ -1334,12 +1386,12 @@ TEST_F(PortAllocatorTest, TestNetworkPermissionBlocked) {
EXPECT_PRED5(CheckCandidate, candidates_[0],
cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
kPrivateAddr);
- EXPECT_TRUE((session_->flags() &
- cricket::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION) != 0);
+ EXPECT_NE(0U, session_->flags() &
+ cricket::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION);
}
// This test verifies allocator can use IPv6 addresses along with IPv4.
-TEST_F(PortAllocatorTest, TestEnableIPv6Addresses) {
+TEST_F(BasicPortAllocatorTest, TestEnableIPv6Addresses) {
allocator().set_flags(allocator().flags() |
cricket::PORTALLOCATOR_DISABLE_RELAY |
cricket::PORTALLOCATOR_ENABLE_IPV6 |
@@ -1353,21 +1405,21 @@ TEST_F(PortAllocatorTest, TestEnableIPv6Addresses) {
EXPECT_EQ(4U, candidates_.size());
EXPECT_TRUE_WAIT(candidate_allocation_done_, kDefaultAllocationTimeout);
EXPECT_PRED5(CheckCandidate, candidates_[0],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
- kClientIPv6Addr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientIPv6Addr);
EXPECT_PRED5(CheckCandidate, candidates_[1],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
- kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "udp",
+ kClientAddr);
EXPECT_PRED5(CheckCandidate, candidates_[2],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
- kClientIPv6Addr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
+ kClientIPv6Addr);
EXPECT_PRED5(CheckCandidate, candidates_[3],
- cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
- kClientAddr);
+ cricket::ICE_CANDIDATE_COMPONENT_RTP, "local", "tcp",
+ kClientAddr);
EXPECT_EQ(4U, candidates_.size());
}
-TEST_F(PortAllocatorTest, TestStopGettingPorts) {
+TEST_F(BasicPortAllocatorTest, TestStopGettingPorts) {
AddInterface(kClientAddr);
allocator_->set_step_delay(cricket::kDefaultStepDelay);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -1388,7 +1440,7 @@ TEST_F(PortAllocatorTest, TestStopGettingPorts) {
EXPECT_EQ(0U, ports_.size());
}
-TEST_F(PortAllocatorTest, TestClearGettingPorts) {
+TEST_F(BasicPortAllocatorTest, TestClearGettingPorts) {
AddInterface(kClientAddr);
allocator_->set_step_delay(cricket::kDefaultStepDelay);
EXPECT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP));
@@ -1408,3 +1460,41 @@ TEST_F(PortAllocatorTest, TestClearGettingPorts) {
ASSERT_EQ_WAIT(2U, candidates_.size(), 1000);
EXPECT_EQ(2U, ports_.size());
}
+
+// Test that the ports and candidates are updated with new ufrag/pwd/etc. when
+// a pooled session is taken out of the pool.
+TEST_F(BasicPortAllocatorTest, TestTransportInformationUpdated) {
+ AddInterface(kClientAddr);
+ int pool_size = 1;
+ allocator_->SetConfiguration(allocator_->stun_servers(),
+ allocator_->turn_servers(), pool_size);
+ const cricket::PortAllocatorSession* peeked_session =
+ allocator_->GetPooledSession();
+ ASSERT_NE(nullptr, peeked_session);
+ EXPECT_EQ_WAIT(true, peeked_session->CandidatesAllocationDone(),
+ kDefaultAllocationTimeout);
+ // Expect that when TakePooledSession is called,
+ // UpdateTransportInformationInternal will be called and the
+ // BasicPortAllocatorSession will update the ufrag/pwd of ports and
+ // candidates.
+ session_ =
+ allocator_->TakePooledSession(kContentName, 1, kIceUfrag0, kIcePwd0);
+ ASSERT_NE(nullptr, session_.get());
+ auto ready_ports = session_->ReadyPorts();
+ auto candidates = session_->ReadyCandidates();
+ EXPECT_FALSE(ready_ports.empty());
+ EXPECT_FALSE(candidates.empty());
+ for (const cricket::PortInterface* port_interface : ready_ports) {
+ const cricket::Port* port =
+ static_cast<const cricket::Port*>(port_interface);
+ EXPECT_EQ(kContentName, port->content_name());
+ EXPECT_EQ(1, port->component());
+ EXPECT_EQ(kIceUfrag0, port->username_fragment());
+ EXPECT_EQ(kIcePwd0, port->password());
+ }
+ for (const cricket::Candidate& candidate : candidates) {
+ EXPECT_EQ(1, candidate.component());
+ EXPECT_EQ(kIceUfrag0, candidate.username());
+ EXPECT_EQ(kIcePwd0, candidate.password());
+ }
+}
diff --git a/chromium/third_party/webrtc/p2p/p2p.gyp b/chromium/third_party/webrtc/p2p/p2p.gyp
index 5c9a575f8f2..7ec1814bc6b 100644
--- a/chromium/third_party/webrtc/p2p/p2p.gyp
+++ b/chromium/third_party/webrtc/p2p/p2p.gyp
@@ -16,9 +16,6 @@
'<(webrtc_root)/base/base.gyp:rtc_base',
'<(webrtc_root)/common.gyp:webrtc_common',
],
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'sources': [
'base/asyncstuntcpsocket.cc',
'base/asyncstuntcpsocket.h',
@@ -84,9 +81,6 @@
'client/socketmonitor.h',
],
'direct_dependent_settings': {
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'defines': [
'FEATURE_ENABLE_VOICEMAIL',
],
@@ -107,6 +101,8 @@
'quic/quicconnectionhelper.h',
'quic/quicsession.cc',
'quic/quicsession.h',
+ 'quic/quictransport.cc',
+ 'quic/quictransport.h',
'quic/quictransportchannel.cc',
'quic/quictransportchannel.h',
'quic/reliablequicstream.cc',
@@ -125,9 +121,6 @@
'<(webrtc_root)/base/base.gyp:rtc_base',
'<(webrtc_root)/common.gyp:webrtc_common',
],
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'sources': [
'stunprober/stunprober.cc',
],
@@ -139,9 +132,6 @@
'libstunprober',
'rtc_p2p'
],
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'sources': [
'stunprober/main.cc',
],
@@ -156,9 +146,11 @@
'direct_dependent_settings': {
'sources': [
'base/dtlstransportchannel_unittest.cc',
+ 'base/fakeportallocator.h',
'base/faketransportcontroller.h',
'base/p2ptransportchannel_unittest.cc',
'base/port_unittest.cc',
+ 'base/portallocator_unittest.cc',
'base/pseudotcp_unittest.cc',
'base/relayport_unittest.cc',
'base/relayserver_unittest.cc',
@@ -172,9 +164,9 @@
'base/transport_unittest.cc',
'base/transportcontroller_unittest.cc',
'base/transportdescriptionfactory_unittest.cc',
+ 'base/tcpport_unittest.cc',
'base/turnport_unittest.cc',
- 'client/fakeportallocator.h',
- 'client/portallocator_unittest.cc',
+ 'client/basicportallocator_unittest.cc',
'stunprober/stunprober_unittest.cc',
],
'conditions': [
@@ -182,6 +174,7 @@
'sources': [
'quic/quicconnectionhelper_unittest.cc',
'quic/quicsession_unittest.cc',
+ 'quic/quictransport_unittest.cc',
'quic/quictransportchannel_unittest.cc',
'quic/reliablequicstream_unittest.cc',
],
diff --git a/chromium/third_party/webrtc/p2p/quic/quicconnectionhelper_unittest.cc b/chromium/third_party/webrtc/p2p/quic/quicconnectionhelper_unittest.cc
index 1a7313c2457..5f16b52c62e 100644
--- a/chromium/third_party/webrtc/p2p/quic/quicconnectionhelper_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/quic/quicconnectionhelper_unittest.cc
@@ -8,11 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/p2p/quic/quicconnectionhelper.h"
#include "net/quic/quic_time.h"
#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
using cricket::QuicAlarm;
using cricket::QuicConnectionHelper;
@@ -52,10 +53,7 @@ class MockAlarmDelegate : public QuicAlarm::Delegate {
public:
MockAlarmDelegate() : fired_(false) {}
- QuicTime OnAlarm() override {
- fired_ = true;
- return QuicTime::Zero();
- }
+ void OnAlarm() override { fired_ = true; }
bool fired() const { return fired_; }
void Clear() { fired_ = false; }
@@ -89,7 +87,7 @@ class QuicAlarmTest : public ::testing::Test {
// Used for setting clock time relative to alarm.
MockClock clock_;
- rtc::scoped_ptr<QuicAlarm> alarm_;
+ std::unique_ptr<QuicAlarm> alarm_;
};
// Test that the alarm is fired.
diff --git a/chromium/third_party/webrtc/p2p/quic/quicsession.cc b/chromium/third_party/webrtc/p2p/quic/quicsession.cc
index 281af5e3a74..65ed1357545 100644
--- a/chromium/third_party/webrtc/p2p/quic/quicsession.cc
+++ b/chromium/third_party/webrtc/p2p/quic/quicsession.cc
@@ -20,7 +20,11 @@
namespace cricket {
-QuicSession::QuicSession(rtc::scoped_ptr<net::QuicConnection> connection,
+// Default priority for incoming QUIC streams.
+// TODO(mikescarlett): Determine if this value is correct.
+static const net::SpdyPriority kDefaultPriority = 3;
+
+QuicSession::QuicSession(std::unique_ptr<net::QuicConnection> connection,
const net::QuicConfig& config)
: net::QuicSession(connection.release(), config) {}
@@ -46,7 +50,7 @@ void QuicSession::SetCryptoStream(net::QuicCryptoStream* crypto_stream) {
bool QuicSession::ExportKeyingMaterial(base::StringPiece label,
base::StringPiece context,
size_t result_len,
- string* result) {
+ std::string* result) {
return crypto_stream_->ExportKeyingMaterial(label, context, result_len,
result);
}
@@ -62,9 +66,19 @@ void QuicSession::OnCryptoHandshakeEvent(CryptoHandshakeEvent event) {
}
}
+void QuicSession::CloseStream(net::QuicStreamId stream_id) {
+ if (IsClosedStream(stream_id)) {
+ // When CloseStream has been called recursively (via
+ // ReliableQuicStream::OnClose), the stream is already closed so return.
+ return;
+ }
+ write_blocked_streams()->UnregisterStream(stream_id);
+ net::QuicSession::CloseStream(stream_id);
+}
+
ReliableQuicStream* QuicSession::CreateIncomingDynamicStream(
net::QuicStreamId id) {
- ReliableQuicStream* stream = CreateDataStream(id);
+ ReliableQuicStream* stream = CreateDataStream(id, kDefaultPriority);
if (stream) {
SignalIncomingStream(stream);
}
@@ -73,32 +87,39 @@ ReliableQuicStream* QuicSession::CreateIncomingDynamicStream(
ReliableQuicStream* QuicSession::CreateOutgoingDynamicStream(
net::SpdyPriority priority) {
- ReliableQuicStream* stream = CreateDataStream(GetNextOutgoingStreamId());
- if (stream) {
- ActivateStream(stream);
- }
- return stream;
+ return CreateDataStream(GetNextOutgoingStreamId(), priority);
}
-ReliableQuicStream* QuicSession::CreateDataStream(net::QuicStreamId id) {
+ReliableQuicStream* QuicSession::CreateDataStream(net::QuicStreamId id,
+ net::SpdyPriority priority) {
if (crypto_stream_ == nullptr || !crypto_stream_->encryption_established()) {
// Encryption not active so no stream created
return nullptr;
}
- return new ReliableQuicStream(id, this);
+ ReliableQuicStream* stream = new ReliableQuicStream(id, this);
+ if (stream) {
+ // Make QuicSession take ownership of the stream.
+ ActivateStream(stream);
+ // Register the stream to the QuicWriteBlockedList. |priority| is clamped
+ // between 0 and 7, with 0 being the highest priority and 7 the lowest
+ // priority.
+ write_blocked_streams()->RegisterStream(stream->id(), priority);
+ }
+ return stream;
}
void QuicSession::OnConnectionClosed(net::QuicErrorCode error,
+ const std::string& error_details,
net::ConnectionCloseSource source) {
- net::QuicSession::OnConnectionClosed(error, source);
+ net::QuicSession::OnConnectionClosed(error, error_details, source);
SignalConnectionClosed(error,
source == net::ConnectionCloseSource::FROM_PEER);
}
bool QuicSession::OnReadPacket(const char* data, size_t data_len) {
- net::QuicEncryptedPacket packet(data, data_len);
- connection()->ProcessUdpPacket(connection()->self_address(),
- connection()->peer_address(), packet);
+ net::QuicReceivedPacket packet(data, data_len, clock_.Now());
+ ProcessUdpPacket(connection()->self_address(), connection()->peer_address(),
+ packet);
return true;
}
diff --git a/chromium/third_party/webrtc/p2p/quic/quicsession.h b/chromium/third_party/webrtc/p2p/quic/quicsession.h
index e0ea296b59c..f8b27e055e5 100644
--- a/chromium/third_party/webrtc/p2p/quic/quicsession.h
+++ b/chromium/third_party/webrtc/p2p/quic/quicsession.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_P2P_QUIC_QUICSESSION_H_
#define WEBRTC_P2P_QUIC_QUICSESSION_H_
+#include <memory>
#include <string>
#include "net/quic/quic_crypto_client_stream.h"
@@ -29,7 +30,7 @@ namespace cricket {
// reading/writing of data using QUIC packets.
class QuicSession : public net::QuicSession, public sigslot::has_slots<> {
public:
- QuicSession(rtc::scoped_ptr<net::QuicConnection> connection,
+ QuicSession(std::unique_ptr<net::QuicConnection> connection,
const net::QuicConfig& config);
~QuicSession() override;
@@ -48,16 +49,18 @@ class QuicSession : public net::QuicSession, public sigslot::has_slots<> {
// QuicSession optional overrides.
void OnCryptoHandshakeEvent(CryptoHandshakeEvent event) override;
+ void CloseStream(net::QuicStreamId stream_id) override;
// QuicConnectionVisitorInterface overrides.
void OnConnectionClosed(net::QuicErrorCode error,
+ const std::string& error_details,
net::ConnectionCloseSource source) override;
// Exports keying material for SRTP.
bool ExportKeyingMaterial(base::StringPiece label,
base::StringPiece context,
size_t result_len,
- string* result);
+ std::string* result);
// Decrypts an incoming QUIC packet to a data stream.
bool OnReadPacket(const char* data, size_t data_len);
@@ -79,10 +82,12 @@ class QuicSession : public net::QuicSession, public sigslot::has_slots<> {
ReliableQuicStream* CreateIncomingDynamicStream(
net::QuicStreamId id) override;
- virtual ReliableQuicStream* CreateDataStream(net::QuicStreamId id);
+ virtual ReliableQuicStream* CreateDataStream(net::QuicStreamId id,
+ net::SpdyPriority priority);
private:
- rtc::scoped_ptr<net::QuicCryptoStream> crypto_stream_;
+ std::unique_ptr<net::QuicCryptoStream> crypto_stream_;
+ net::QuicClock clock_; // For recording packet receipt time
RTC_DISALLOW_COPY_AND_ASSIGN(QuicSession);
};
diff --git a/chromium/third_party/webrtc/p2p/quic/quicsession_unittest.cc b/chromium/third_party/webrtc/p2p/quic/quicsession_unittest.cc
index 2e2d6ae0a6a..2f3aaae3322 100644
--- a/chromium/third_party/webrtc/p2p/quic/quicsession_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/quic/quicsession_unittest.cc
@@ -10,6 +10,7 @@
#include "webrtc/p2p/quic/quicsession.h"
+#include <memory>
#include <string>
#include <vector>
@@ -36,6 +37,7 @@ using net::ProofVerifyContext;
using net::ProofVerifyDetails;
using net::QuicByteCount;
using net::QuicClock;
+using net::QuicCompressedCertsCache;
using net::QuicConfig;
using net::QuicConnection;
using net::QuicCryptoClientConfig;
@@ -111,13 +113,16 @@ class FakeProofVerifier : public net::ProofVerifier {
// ProofVerifier override
net::QuicAsyncStatus VerifyProof(
const std::string& hostname,
+ const uint16_t port,
const std::string& server_config,
+ net::QuicVersion quic_version,
+ base::StringPiece chlo_hash,
const std::vector<std::string>& certs,
const std::string& cert_sct,
const std::string& signature,
- const net::ProofVerifyContext* verify_context,
+ const ProofVerifyContext* context,
std::string* error_details,
- scoped_ptr<net::ProofVerifyDetails>* verify_details,
+ std::unique_ptr<net::ProofVerifyDetails>* verify_details,
net::ProofVerifierCallback* callback) override {
return success_ ? net::QUIC_SUCCESS : net::QUIC_FAILURE;
}
@@ -179,9 +184,9 @@ class FakeQuicPacketWriter : public QuicPacketWriter {
// Wrapper for QuicSession and transport channel that stores incoming data.
class QuicSessionForTest : public QuicSession {
public:
- QuicSessionForTest(rtc::scoped_ptr<net::QuicConnection> connection,
+ QuicSessionForTest(std::unique_ptr<net::QuicConnection> connection,
const net::QuicConfig& config,
- rtc::scoped_ptr<FakeTransportChannel> channel)
+ std::unique_ptr<FakeTransportChannel> channel)
: QuicSession(std::move(connection), config),
channel_(std::move(channel)) {
channel_->SignalReadPacket.connect(
@@ -219,7 +224,7 @@ class QuicSessionForTest : public QuicSession {
private:
// Transports QUIC packets to/from peer.
- rtc::scoped_ptr<FakeTransportChannel> channel_;
+ std::unique_ptr<FakeTransportChannel> channel_;
// Stores data received by peer once it is sent from the other peer.
std::string last_received_data_;
// Handles incoming streams from sender.
@@ -230,13 +235,16 @@ class QuicSessionForTest : public QuicSession {
class QuicSessionTest : public ::testing::Test,
public QuicCryptoClientStream::ProofHandler {
public:
- QuicSessionTest() : quic_helper_(rtc::Thread::Current()) {}
+ QuicSessionTest()
+ : quic_helper_(rtc::Thread::Current()),
+ quic_compressed_certs_cache_(
+ QuicCompressedCertsCache::kQuicCompressedCertsCacheSize) {}
// Instantiates |client_peer_| and |server_peer_|.
void CreateClientAndServerSessions();
- rtc::scoped_ptr<QuicSessionForTest> CreateSession(
- rtc::scoped_ptr<FakeTransportChannel> channel,
+ std::unique_ptr<QuicSessionForTest> CreateSession(
+ std::unique_ptr<FakeTransportChannel> channel,
Perspective perspective);
QuicCryptoClientStream* CreateCryptoClientStream(QuicSessionForTest* session,
@@ -244,7 +252,7 @@ class QuicSessionTest : public ::testing::Test,
QuicCryptoServerStream* CreateCryptoServerStream(QuicSessionForTest* session,
bool handshake_success);
- rtc::scoped_ptr<QuicConnection> CreateConnection(
+ std::unique_ptr<QuicConnection> CreateConnection(
FakeTransportChannel* channel,
Perspective perspective);
@@ -267,17 +275,18 @@ class QuicSessionTest : public ::testing::Test,
QuicConnectionHelper quic_helper_;
QuicConfig config_;
QuicClock clock_;
+ QuicCompressedCertsCache quic_compressed_certs_cache_;
- rtc::scoped_ptr<QuicSessionForTest> client_peer_;
- rtc::scoped_ptr<QuicSessionForTest> server_peer_;
+ std::unique_ptr<QuicSessionForTest> client_peer_;
+ std::unique_ptr<QuicSessionForTest> server_peer_;
};
// Initializes "client peer" who begins crypto handshake and "server peer" who
// establishes encryption with client.
void QuicSessionTest::CreateClientAndServerSessions() {
- rtc::scoped_ptr<FakeTransportChannel> channel1(
+ std::unique_ptr<FakeTransportChannel> channel1(
new FakeTransportChannel("channel1", 0));
- rtc::scoped_ptr<FakeTransportChannel> channel2(
+ std::unique_ptr<FakeTransportChannel> channel2(
new FakeTransportChannel("channel2", 0));
// Prevent channel1->OnReadPacket and channel2->OnReadPacket from calling
@@ -294,12 +303,12 @@ void QuicSessionTest::CreateClientAndServerSessions() {
server_peer_ = CreateSession(std::move(channel2), Perspective::IS_SERVER);
}
-rtc::scoped_ptr<QuicSessionForTest> QuicSessionTest::CreateSession(
- rtc::scoped_ptr<FakeTransportChannel> channel,
+std::unique_ptr<QuicSessionForTest> QuicSessionTest::CreateSession(
+ std::unique_ptr<FakeTransportChannel> channel,
Perspective perspective) {
- rtc::scoped_ptr<QuicConnection> quic_connection =
+ std::unique_ptr<QuicConnection> quic_connection =
CreateConnection(channel.get(), perspective);
- return rtc::scoped_ptr<QuicSessionForTest>(new QuicSessionForTest(
+ return std::unique_ptr<QuicSessionForTest>(new QuicSessionForTest(
std::move(quic_connection), config_, std::move(channel)));
}
@@ -323,10 +332,13 @@ QuicCryptoServerStream* QuicSessionTest::CreateCryptoServerStream(
QuicServerConfigProtobuf* primary_config = server_config->GenerateConfig(
QuicRandom::GetInstance(), &clock_, options);
server_config->AddConfig(primary_config, clock_.WallNow());
- return new QuicCryptoServerStream(server_config, session);
+ bool use_stateless_rejects_if_peer_supported = false;
+ return new QuicCryptoServerStream(
+ server_config, &quic_compressed_certs_cache_,
+ use_stateless_rejects_if_peer_supported, session);
}
-rtc::scoped_ptr<QuicConnection> QuicSessionTest::CreateConnection(
+std::unique_ptr<QuicConnection> QuicSessionTest::CreateConnection(
FakeTransportChannel* channel,
Perspective perspective) {
FakeQuicPacketWriter* writer = new FakeQuicPacketWriter(channel);
@@ -334,7 +346,7 @@ rtc::scoped_ptr<QuicConnection> QuicSessionTest::CreateConnection(
IPAddress ip(0, 0, 0, 0);
bool owns_writer = true;
- return rtc::scoped_ptr<QuicConnection>(new QuicConnection(
+ return std::unique_ptr<QuicConnection>(new QuicConnection(
0, net::IPEndPoint(ip, 0), &quic_helper_, writer, owns_writer,
perspective, net::QuicSupportedVersions()));
}
@@ -357,8 +369,8 @@ void QuicSessionTest::TestStreamConnection(QuicSessionForTest* from_session,
ASSERT_TRUE(from_session->IsEncryptionEstablished());
ASSERT_TRUE(to_session->IsEncryptionEstablished());
- string from_key;
- string to_key;
+ std::string from_key;
+ std::string to_key;
bool from_success = from_session->ExportKeyingMaterial(
kExporterLabel, kExporterContext, kExporterContextLen, &from_key);
@@ -450,3 +462,17 @@ TEST_F(QuicSessionTest, CannotCreateDataStreamBeforeHandshake) {
EXPECT_EQ(nullptr, server_peer_->CreateOutgoingDynamicStream(5));
EXPECT_EQ(nullptr, client_peer_->CreateOutgoingDynamicStream(5));
}
+
+// Test that closing a QUIC stream causes the QuicSession to remove it.
+TEST_F(QuicSessionTest, CloseQuicStream) {
+ CreateClientAndServerSessions();
+ StartHandshake(true, true);
+ ASSERT_TRUE_WAIT(client_peer_->IsCryptoHandshakeConfirmed() &&
+ server_peer_->IsCryptoHandshakeConfirmed(),
+ kTimeoutMs);
+ ReliableQuicStream* stream = client_peer_->CreateOutgoingDynamicStream(5);
+ ASSERT_NE(nullptr, stream);
+ EXPECT_FALSE(client_peer_->IsClosedStream(stream->id()));
+ stream->Close();
+ EXPECT_TRUE(client_peer_->IsClosedStream(stream->id()));
+}
diff --git a/chromium/third_party/webrtc/p2p/quic/quictransport.cc b/chromium/third_party/webrtc/p2p/quic/quictransport.cc
new file mode 100644
index 00000000000..51f9a2b0a3f
--- /dev/null
+++ b/chromium/third_party/webrtc/p2p/quic/quictransport.cc
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/p2p/quic/quictransport.h"
+
+#include "webrtc/p2p/base/p2ptransportchannel.h"
+
+namespace cricket {
+
+QuicTransport::QuicTransport(
+ const std::string& name,
+ PortAllocator* allocator,
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate)
+ : Transport(name, allocator), local_certificate_(certificate) {}
+
+QuicTransport::~QuicTransport() {
+ DestroyAllChannels();
+}
+
+void QuicTransport::SetLocalCertificate(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
+ local_certificate_ = certificate;
+}
+bool QuicTransport::GetLocalCertificate(
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) {
+ if (!local_certificate_) {
+ return false;
+ }
+ *certificate = local_certificate_;
+ return true;
+}
+
+bool QuicTransport::ApplyLocalTransportDescription(
+ TransportChannelImpl* channel,
+ std::string* error_desc) {
+ rtc::SSLFingerprint* local_fp =
+ local_description()->identity_fingerprint.get();
+ if (!VerifyCertificateFingerprint(local_certificate_.get(), local_fp,
+ error_desc)) {
+ return false;
+ }
+ if (!channel->SetLocalCertificate(local_certificate_)) {
+ return BadTransportDescription("Failed to set local identity.", error_desc);
+ }
+ return Transport::ApplyLocalTransportDescription(channel, error_desc);
+}
+
+bool QuicTransport::NegotiateTransportDescription(ContentAction action,
+ std::string* error_desc) {
+ if (!local_description() || !remote_description()) {
+ const std::string msg =
+ "Local and Remote description must be set before "
+ "transport descriptions are negotiated";
+ return BadTransportDescription(msg, error_desc);
+ }
+ rtc::SSLFingerprint* local_fp =
+ local_description()->identity_fingerprint.get();
+ rtc::SSLFingerprint* remote_fp =
+ remote_description()->identity_fingerprint.get();
+ if (!local_fp || !remote_fp) {
+ return BadTransportDescription("Fingerprints must be supplied for QUIC.",
+ error_desc);
+ }
+ remote_fingerprint_.reset(new rtc::SSLFingerprint(*remote_fp));
+ if (!NegotiateRole(action, &local_role_, error_desc)) {
+ return false;
+ }
+ // Now run the negotiation for the Transport class.
+ return Transport::NegotiateTransportDescription(action, error_desc);
+}
+
+QuicTransportChannel* QuicTransport::CreateTransportChannel(int component) {
+ P2PTransportChannel* ice_channel =
+ new P2PTransportChannel(name(), component, port_allocator());
+ return new QuicTransportChannel(ice_channel);
+}
+
+void QuicTransport::DestroyTransportChannel(TransportChannelImpl* channel) {
+ delete channel;
+}
+
+bool QuicTransport::GetSslRole(rtc::SSLRole* ssl_role) const {
+ ASSERT(ssl_role != NULL);
+ *ssl_role = local_role_;
+ return true;
+}
+
+bool QuicTransport::ApplyNegotiatedTransportDescription(
+ TransportChannelImpl* channel,
+ std::string* error_desc) {
+ // Set ssl role and remote fingerprint. These are required for QUIC setup.
+ if (!channel->SetSslRole(local_role_)) {
+ return BadTransportDescription("Failed to set ssl role for the channel.",
+ error_desc);
+ }
+ // Apply remote fingerprint.
+ if (!channel->SetRemoteFingerprint(
+ remote_fingerprint_->algorithm,
+ reinterpret_cast<const uint8_t*>(remote_fingerprint_->digest.data()),
+ remote_fingerprint_->digest.size())) {
+ return BadTransportDescription("Failed to apply remote fingerprint.",
+ error_desc);
+ }
+ return Transport::ApplyNegotiatedTransportDescription(channel, error_desc);
+}
+
+} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/quic/quictransport.h b/chromium/third_party/webrtc/p2p/quic/quictransport.h
new file mode 100644
index 00000000000..14bd13f3b2b
--- /dev/null
+++ b/chromium/third_party/webrtc/p2p/quic/quictransport.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_P2P_QUIC_QUICTRANSPORT_H_
+#define WEBRTC_P2P_QUIC_QUICTRANSPORT_H_
+
+#include <string>
+#include <map>
+#include <memory>
+
+#include "webrtc/p2p/base/transport.h"
+#include "webrtc/p2p/quic/quictransportchannel.h"
+
+namespace cricket {
+
+class P2PTransportChannel;
+class PortAllocator;
+
+// TODO(mikescarlett): Refactor to avoid code duplication with DtlsTransport.
+class QuicTransport : public Transport {
+ public:
+ QuicTransport(const std::string& name,
+ PortAllocator* allocator,
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
+
+ ~QuicTransport() override;
+
+ // Transport overrides.
+ void SetLocalCertificate(
+ const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) override;
+ bool GetLocalCertificate(
+ rtc::scoped_refptr<rtc::RTCCertificate>* certificate) override;
+ bool SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) override {
+ return true; // Not needed by QUIC
+ }
+ bool GetSslRole(rtc::SSLRole* ssl_role) const override;
+
+ protected:
+ // Transport overrides.
+ QuicTransportChannel* CreateTransportChannel(int component) override;
+ void DestroyTransportChannel(TransportChannelImpl* channel) override;
+ bool ApplyLocalTransportDescription(TransportChannelImpl* channel,
+ std::string* error_desc) override;
+ bool NegotiateTransportDescription(ContentAction action,
+ std::string* error_desc) override;
+ bool ApplyNegotiatedTransportDescription(TransportChannelImpl* channel,
+ std::string* error_desc) override;
+
+ private:
+ rtc::scoped_refptr<rtc::RTCCertificate> local_certificate_;
+ rtc::SSLRole local_role_ = rtc::SSL_CLIENT;
+ std::unique_ptr<rtc::SSLFingerprint> remote_fingerprint_;
+};
+
+} // namespace cricket
+
+#endif // WEBRTC_P2P_QUIC_QUICTRANSPORT_H_
diff --git a/chromium/third_party/webrtc/p2p/quic/quictransport_unittest.cc b/chromium/third_party/webrtc/p2p/quic/quictransport_unittest.cc
new file mode 100644
index 00000000000..1fd48f7ec41
--- /dev/null
+++ b/chromium/third_party/webrtc/p2p/quic/quictransport_unittest.cc
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2016 The WebRTC Project Authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/p2p/quic/quictransport.h"
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "webrtc/base/gunit.h"
+#include "webrtc/base/rtccertificate.h"
+#include "webrtc/base/sslidentity.h"
+
+using cricket::TransportChannelImpl;
+using cricket::QuicTransport;
+using cricket::Transport;
+using cricket::TransportDescription;
+
+static const char kIceUfrag1[] = "TESTICEUFRAG0001";
+static const char kIcePwd1[] = "TESTICEPWD00000000000001";
+
+static const char kIceUfrag2[] = "TESTICEUFRAG0002";
+static const char kIcePwd2[] = "TESTICEPWD00000000000002";
+
+static rtc::scoped_refptr<rtc::RTCCertificate> CreateCertificate(
+ std::string name) {
+ return rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
+ rtc::SSLIdentity::Generate(name, rtc::KT_DEFAULT)));
+}
+
+static std::unique_ptr<rtc::SSLFingerprint> CreateFingerprint(
+ rtc::RTCCertificate* cert) {
+ std::string digest_algorithm;
+ cert->ssl_certificate().GetSignatureDigestAlgorithm(&digest_algorithm);
+ return std::unique_ptr<rtc::SSLFingerprint>(
+ rtc::SSLFingerprint::Create(digest_algorithm, cert->identity()));
+}
+
+class QuicTransportTest : public testing::Test {
+ public:
+ QuicTransportTest() : transport_("testing", nullptr, nullptr) {}
+
+ void SetTransportDescription(cricket::ConnectionRole local_role,
+ cricket::ConnectionRole remote_role,
+ cricket::ContentAction local_action,
+ cricket::ContentAction remote_action,
+ rtc::SSLRole expected_ssl_role) {
+ TransportChannelImpl* channel = transport_.CreateChannel(1);
+ ASSERT_NE(nullptr, channel);
+
+ rtc::scoped_refptr<rtc::RTCCertificate> local_certificate(
+ CreateCertificate("local"));
+ ASSERT_NE(nullptr, local_certificate);
+ transport_.SetLocalCertificate(local_certificate);
+
+ std::unique_ptr<rtc::SSLFingerprint> local_fingerprint =
+ CreateFingerprint(local_certificate.get());
+ ASSERT_NE(nullptr, local_fingerprint);
+ TransportDescription local_desc(std::vector<std::string>(), kIceUfrag1,
+ kIcePwd1, cricket::ICEMODE_FULL, local_role,
+ local_fingerprint.get());
+ ASSERT_TRUE(transport_.SetLocalTransportDescription(local_desc,
+ local_action, nullptr));
+ // The certificate is applied to QuicTransportChannel when the local
+ // description is set.
+ rtc::scoped_refptr<rtc::RTCCertificate> channel_local_certificate =
+ channel->GetLocalCertificate();
+ ASSERT_NE(nullptr, channel_local_certificate);
+ EXPECT_EQ(local_certificate, channel_local_certificate);
+ std::unique_ptr<rtc::SSLFingerprint> remote_fingerprint =
+ CreateFingerprint(CreateCertificate("remote").get());
+ // NegotiateTransportDescription was not called yet. The SSL role should
+ // not be set and neither should the remote fingerprint.
+ std::unique_ptr<rtc::SSLRole> role(new rtc::SSLRole());
+ EXPECT_FALSE(channel->GetSslRole(role.get()));
+ // Setting the remote description should set the SSL role.
+ ASSERT_NE(nullptr, remote_fingerprint);
+ TransportDescription remote_desc(std::vector<std::string>(), kIceUfrag2,
+ kIcePwd2, cricket::ICEMODE_FULL,
+ remote_role, remote_fingerprint.get());
+ ASSERT_TRUE(transport_.SetRemoteTransportDescription(
+ remote_desc, remote_action, nullptr));
+ ASSERT_TRUE(channel->GetSslRole(role.get()));
+ // SSL role should be client because the remote description is an ANSWER.
+ EXPECT_EQ(expected_ssl_role, *role);
+ }
+
+ protected:
+ QuicTransport transport_;
+};
+
+// Test setting the local certificate.
+TEST_F(QuicTransportTest, SetLocalCertificate) {
+ rtc::scoped_refptr<rtc::RTCCertificate> local_certificate(
+ CreateCertificate("local"));
+ ASSERT_NE(nullptr, local_certificate);
+ rtc::scoped_refptr<rtc::RTCCertificate> transport_local_certificate;
+ EXPECT_FALSE(transport_.GetLocalCertificate(&transport_local_certificate));
+ transport_.SetLocalCertificate(local_certificate);
+ ASSERT_TRUE(transport_.GetLocalCertificate(&transport_local_certificate));
+ ASSERT_NE(nullptr, transport_local_certificate);
+ EXPECT_EQ(local_certificate, transport_local_certificate);
+}
+
+// Test setting the ICE role.
+TEST_F(QuicTransportTest, SetIceRole) {
+ TransportChannelImpl* channel1 = transport_.CreateChannel(1);
+ ASSERT_NE(nullptr, channel1);
+ transport_.SetIceRole(cricket::ICEROLE_CONTROLLING);
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING, transport_.ice_role());
+ TransportChannelImpl* channel2 = transport_.CreateChannel(2);
+ ASSERT_NE(nullptr, channel2);
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING, channel1->GetIceRole());
+ EXPECT_EQ(cricket::ICEROLE_CONTROLLING, channel2->GetIceRole());
+}
+
+// Test setting the ICE tie breaker.
+TEST_F(QuicTransportTest, SetIceTiebreaker) {
+ transport_.SetIceTiebreaker(1u);
+ EXPECT_EQ(1u, transport_.IceTiebreaker());
+}
+
+// Test setting the local and remote descriptions for a SSL client.
+TEST_F(QuicTransportTest, SetLocalAndRemoteTransportDescriptionClient) {
+ SetTransportDescription(cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CONNECTIONROLE_PASSIVE, cricket::CA_OFFER,
+ cricket::CA_ANSWER, rtc::SSL_CLIENT);
+}
+
+// Test setting the local and remote descriptions for a SSL server.
+TEST_F(QuicTransportTest, SetLocalAndRemoteTransportDescriptionServer) {
+ SetTransportDescription(cricket::CONNECTIONROLE_ACTPASS,
+ cricket::CONNECTIONROLE_ACTIVE, cricket::CA_OFFER,
+ cricket::CA_ANSWER, rtc::SSL_SERVER);
+}
+
+// Test creation and destruction of channels.
+TEST_F(QuicTransportTest, CreateAndDestroyChannels) {
+ TransportChannelImpl* channel1 = transport_.CreateChannel(1);
+ ASSERT_NE(nullptr, channel1);
+ EXPECT_TRUE(transport_.HasChannel(1));
+ EXPECT_EQ(channel1, transport_.GetChannel(1));
+ TransportChannelImpl* channel2 = transport_.CreateChannel(2);
+ ASSERT_NE(nullptr, channel2);
+ EXPECT_TRUE(transport_.HasChannel(2));
+ EXPECT_EQ(channel2, transport_.GetChannel(2));
+ transport_.DestroyChannel(1);
+ EXPECT_FALSE(transport_.HasChannel(1));
+ EXPECT_EQ(nullptr, transport_.GetChannel(1));
+ transport_.DestroyChannel(2);
+ EXPECT_FALSE(transport_.HasChannel(2));
+ EXPECT_EQ(nullptr, transport_.GetChannel(2));
+}
diff --git a/chromium/third_party/webrtc/p2p/quic/quictransportchannel.cc b/chromium/third_party/webrtc/p2p/quic/quictransportchannel.cc
index cc0576ddb23..968faee7bd1 100644
--- a/chromium/third_party/webrtc/p2p/quic/quictransportchannel.cc
+++ b/chromium/third_party/webrtc/p2p/quic/quictransportchannel.cc
@@ -104,13 +104,16 @@ class InsecureProofVerifier : public net::ProofVerifier {
// ProofVerifier override.
net::QuicAsyncStatus VerifyProof(
const std::string& hostname,
+ const uint16_t port,
const std::string& server_config,
+ net::QuicVersion quic_version,
+ base::StringPiece chlo_hash,
const std::vector<std::string>& certs,
const std::string& cert_sct,
const std::string& signature,
- const net::ProofVerifyContext* verify_context,
+ const net::ProofVerifyContext* context,
std::string* error_details,
- scoped_ptr<net::ProofVerifyDetails>* verify_details,
+ std::unique_ptr<net::ProofVerifyDetails>* verify_details,
net::ProofVerifierCallback* callback) override {
LOG(LS_INFO) << "VerifyProof() ignoring credentials and returning success";
return net::QUIC_SUCCESS;
@@ -271,7 +274,7 @@ int QuicTransportChannel::SendPacket(const char* data,
// |channel_| again.
void QuicTransportChannel::OnWritableState(TransportChannel* channel) {
ASSERT(rtc::Thread::Current() == worker_thread_);
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
LOG_J(LS_VERBOSE, this)
<< "QuicTransportChannel: channel writable state changed to "
<< channel_->writable();
@@ -305,7 +308,7 @@ void QuicTransportChannel::OnWritableState(TransportChannel* channel) {
void QuicTransportChannel::OnReceivingState(TransportChannel* channel) {
ASSERT(rtc::Thread::Current() == worker_thread_);
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
LOG_J(LS_VERBOSE, this)
<< "QuicTransportChannel: channel receiving state changed to "
<< channel_->receiving();
@@ -321,7 +324,7 @@ void QuicTransportChannel::OnReadPacket(TransportChannel* channel,
const rtc::PacketTime& packet_time,
int flags) {
ASSERT(rtc::Thread::Current() == worker_thread_);
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
ASSERT(flags == 0);
switch (quic_state_) {
@@ -368,24 +371,24 @@ void QuicTransportChannel::OnReadyToSend(TransportChannel* channel) {
}
void QuicTransportChannel::OnGatheringState(TransportChannelImpl* channel) {
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
SignalGatheringState(this);
}
void QuicTransportChannel::OnCandidateGathered(TransportChannelImpl* channel,
const Candidate& c) {
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
SignalCandidateGathered(this, c);
}
void QuicTransportChannel::OnRoleConflict(TransportChannelImpl* channel) {
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
SignalRoleConflict(this);
}
void QuicTransportChannel::OnRouteChange(TransportChannel* channel,
const Candidate& candidate) {
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
SignalRouteChange(this, candidate);
}
@@ -393,13 +396,13 @@ void QuicTransportChannel::OnSelectedCandidatePairChanged(
TransportChannel* channel,
CandidatePairInterface* selected_candidate_pair,
int last_sent_packet_id) {
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
SignalSelectedCandidatePairChanged(this, selected_candidate_pair,
last_sent_packet_id);
}
void QuicTransportChannel::OnConnectionRemoved(TransportChannelImpl* channel) {
- ASSERT(channel == channel_);
+ ASSERT(channel == channel_.get());
SignalConnectionRemoved(this);
}
@@ -435,7 +438,7 @@ bool QuicTransportChannel::CreateQuicSession() {
? net::Perspective::IS_CLIENT
: net::Perspective::IS_SERVER;
bool owns_writer = false;
- rtc::scoped_ptr<net::QuicConnection> connection(new net::QuicConnection(
+ std::unique_ptr<net::QuicConnection> connection(new net::QuicConnection(
kConnectionId, kConnectionIpEndpoint, &helper_, this, owns_writer,
perspective, net::QuicSupportedVersions()));
quic_.reset(new QuicSession(std::move(connection), config_));
@@ -443,6 +446,8 @@ bool QuicTransportChannel::CreateQuicSession() {
this, &QuicTransportChannel::OnHandshakeComplete);
quic_->SignalConnectionClosed.connect(
this, &QuicTransportChannel::OnConnectionClosed);
+ quic_->SignalIncomingStream.connect(this,
+ &QuicTransportChannel::OnIncomingStream);
return true;
}
@@ -483,8 +488,14 @@ bool QuicTransportChannel::StartQuicHandshake() {
net::QuicCryptoServerConfig::ConfigOptions options;
quic_crypto_server_config_->AddDefaultConfig(helper_.GetRandomGenerator(),
helper_.GetClock(), options);
+ quic_compressed_certs_cache_.reset(new net::QuicCompressedCertsCache(
+ net::QuicCompressedCertsCache::kQuicCompressedCertsCacheSize));
+ // TODO(mikescarlett): Add support for stateless rejects.
+ bool use_stateless_rejects_if_peer_supported = false;
net::QuicCryptoServerStream* crypto_stream =
new net::QuicCryptoServerStream(quic_crypto_server_config_.get(),
+ quic_compressed_certs_cache_.get(),
+ use_stateless_rejects_if_peer_supported,
quic_.get());
quic_->StartServerHandshake(crypto_stream);
LOG_J(LS_INFO, this) << "QuicTransportChannel: Started server handshake.";
@@ -541,6 +552,7 @@ void QuicTransportChannel::OnConnectionClosed(net::QuicErrorCode error,
// does not close due to failure.
set_quic_state(QUIC_TRANSPORT_CLOSED);
set_writable(false);
+ SignalClosed();
}
void QuicTransportChannel::OnProofValid(
@@ -569,4 +581,16 @@ void QuicTransportChannel::set_quic_state(QuicTransportState state) {
quic_state_ = state;
}
+ReliableQuicStream* QuicTransportChannel::CreateQuicStream() {
+ if (quic_) {
+ net::SpdyPriority priority = 0; // Priority of the QUIC stream
+ return quic_->CreateOutgoingDynamicStream(priority);
+ }
+ return nullptr;
+}
+
+void QuicTransportChannel::OnIncomingStream(ReliableQuicStream* stream) {
+ SignalIncomingStream(stream);
+}
+
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/quic/quictransportchannel.h b/chromium/third_party/webrtc/p2p/quic/quictransportchannel.h
index ab02c77b952..dec24d21543 100644
--- a/chromium/third_party/webrtc/p2p/quic/quictransportchannel.h
+++ b/chromium/third_party/webrtc/p2p/quic/quictransportchannel.h
@@ -11,13 +11,14 @@
#ifndef WEBRTC_P2P_QUIC_QUICTRANSPORTCHANNEL_H_
#define WEBRTC_P2P_QUIC_QUICTRANSPORTCHANNEL_H_
+#include <memory>
#include <string>
#include <vector>
#include "net/quic/quic_crypto_client_stream.h"
#include "net/quic/quic_packet_writer.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/optional.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/p2p/base/transportchannelimpl.h"
#include "webrtc/p2p/quic/quicconnectionhelper.h"
#include "webrtc/p2p/quic/quicsession.h"
@@ -48,7 +49,7 @@ enum QuicTransportState {
// TransportChannelImpl* channel_;
// }
//
-// - Data written to SendPacket() is passed directly to |channel_| if it is
+// - Data written to SendPacket() is passed directly to |channel_| if it is
// an SRTP packet with the PF_SRTP_BYPASS flag.
//
// - |quic_| passes outgoing packets to WritePacket(), which transfers them
@@ -61,8 +62,11 @@ enum QuicTransportState {
// - When the QUIC handshake is completed, quic_state() returns
// QUIC_TRANSPORT_CONNECTED and SRTP keying material can be exported.
//
-// TODO(mikescarlett): Implement secure QUIC handshake, 0-RTT handshakes, and
-// QUIC data streams.
+// - CreateQuicStream() creates an outgoing QUIC stream. Once the local peer
+// sends data from this stream, the remote peer emits SignalIncomingStream
+// with a QUIC stream of the same id to handle received data.
+//
+// TODO(mikescarlett): Implement secure QUIC handshake and 0-RTT handshakes.
class QuicTransportChannel : public TransportChannelImpl,
public net::QuicPacketWriter,
public net::QuicCryptoClientStream::ProofHandler {
@@ -113,8 +117,9 @@ class QuicTransportChannel : public TransportChannelImpl,
size_t result_len) override;
// TODO(mikescarlett): Remove this method once TransportChannel does not
// require defining it.
- bool GetRemoteSSLCertificate(rtc::SSLCertificate** cert) const override {
- return false;
+ std::unique_ptr<rtc::SSLCertificate> GetRemoteSSLCertificate()
+ const override {
+ return nullptr;
}
// TransportChannelImpl overrides that we forward to the wrapped transport.
@@ -206,6 +211,14 @@ class QuicTransportChannel : public TransportChannelImpl,
void OnCanWrite();
// Connectivity state of QuicTransportChannel.
QuicTransportState quic_state() const { return quic_state_; }
+ // Creates a new QUIC stream that can send data.
+ ReliableQuicStream* CreateQuicStream();
+
+ // Emitted when |quic_| creates a QUIC stream to receive data from the remote
+ // peer, when the stream did not exist previously.
+ sigslot::signal1<ReliableQuicStream*> SignalIncomingStream;
+ // Emitted when the QuicTransportChannel state becomes QUIC_TRANSPORT_CLOSED.
+ sigslot::signal0<> SignalClosed;
private:
// Fingerprint of remote peer.
@@ -241,6 +254,8 @@ class QuicTransportChannel : public TransportChannelImpl,
void OnHandshakeComplete();
// Called when |quic_| has closed the connection.
void OnConnectionClosed(net::QuicErrorCode error, bool from_peer);
+ // Called when |quic_| has created a new QUIC stream for incoming data.
+ void OnIncomingStream(ReliableQuicStream* stream);
// Called by OnReadPacket() when a QUIC packet is received.
bool HandleQuicPacket(const char* data, size_t size);
@@ -257,12 +272,12 @@ class QuicTransportChannel : public TransportChannelImpl,
rtc::Thread* worker_thread_;
// Underlying channel which is responsible for connecting with the remote peer
// and sending/receiving packets across the network.
- TransportChannelImpl* const channel_;
+ std::unique_ptr<TransportChannelImpl> channel_;
// Connectivity state of QuicTransportChannel.
QuicTransportState quic_state_ = QUIC_TRANSPORT_NEW;
// QUIC session which establishes the crypto handshake and converts data
// to/from QUIC packets.
- rtc::scoped_ptr<QuicSession> quic_;
+ std::unique_ptr<QuicSession> quic_;
// Non-crypto config for |quic_|.
net::QuicConfig config_;
// Helper for net::QuicConnection that provides timing and
@@ -273,9 +288,11 @@ class QuicTransportChannel : public TransportChannelImpl,
// the handshake. This must be set before we start QUIC.
rtc::Optional<rtc::SSLRole> ssl_role_;
// Config for QUIC crypto client stream, used when |ssl_role_| is SSL_CLIENT.
- rtc::scoped_ptr<net::QuicCryptoClientConfig> quic_crypto_client_config_;
+ std::unique_ptr<net::QuicCryptoClientConfig> quic_crypto_client_config_;
// Config for QUIC crypto server stream, used when |ssl_role_| is SSL_SERVER.
- rtc::scoped_ptr<net::QuicCryptoServerConfig> quic_crypto_server_config_;
+ std::unique_ptr<net::QuicCryptoServerConfig> quic_crypto_server_config_;
+ // Used by QUIC crypto server stream to track most recently compressed certs.
+ std::unique_ptr<net::QuicCompressedCertsCache> quic_compressed_certs_cache_;
// This peer's certificate.
rtc::scoped_refptr<rtc::RTCCertificate> local_certificate_;
// Fingerprint of the remote peer. This must be set before we start QUIC.
diff --git a/chromium/third_party/webrtc/p2p/quic/quictransportchannel_unittest.cc b/chromium/third_party/webrtc/p2p/quic/quictransportchannel_unittest.cc
index 7b5b51848aa..0e16390a89b 100644
--- a/chromium/third_party/webrtc/p2p/quic/quictransportchannel_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/quic/quictransportchannel_unittest.cc
@@ -10,19 +10,20 @@
#include "webrtc/p2p/quic/quictransportchannel.h"
+#include <memory>
#include <set>
#include <string>
#include <vector>
#include "webrtc/base/common.h"
#include "webrtc/base/gunit.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sslidentity.h"
#include "webrtc/p2p/base/faketransportcontroller.h"
using cricket::ConnectionRole;
using cricket::IceRole;
using cricket::QuicTransportChannel;
+using cricket::ReliableQuicStream;
using cricket::TransportChannel;
using cricket::TransportDescription;
@@ -93,13 +94,17 @@ class QuicTestPeer : public sigslot::has_slots<> {
explicit QuicTestPeer(const std::string& name)
: name_(name),
bytes_sent_(0),
- ice_channel_(name_, 0),
- quic_channel_(&ice_channel_) {
+ ice_channel_(new FailableTransportChannel(name_, 0)),
+ quic_channel_(ice_channel_),
+ incoming_stream_count_(0) {
quic_channel_.SignalReadPacket.connect(
this, &QuicTestPeer::OnTransportChannelReadPacket);
- ice_channel_.SetAsync(true);
+ quic_channel_.SignalIncomingStream.connect(this,
+ &QuicTestPeer::OnIncomingStream);
+ quic_channel_.SignalClosed.connect(this, &QuicTestPeer::OnClosed);
+ ice_channel_->SetAsync(true);
rtc::scoped_refptr<rtc::RTCCertificate> local_cert =
- rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
rtc::SSLIdentity::Generate(name_, rtc::KT_DEFAULT)));
quic_channel_.SetLocalCertificate(local_cert);
local_fingerprint_.reset(CreateFingerprint(local_cert.get()));
@@ -107,13 +112,13 @@ class QuicTestPeer : public sigslot::has_slots<> {
// Connects |ice_channel_| to that of the other peer.
void Connect(QuicTestPeer* other_peer) {
- ice_channel_.Connect();
- other_peer->ice_channel_.Connect();
- ice_channel_.SetDestination(&other_peer->ice_channel_);
+ ice_channel_->Connect();
+ other_peer->ice_channel_->Connect();
+ ice_channel_->SetDestination(other_peer->ice_channel_);
}
// Disconnects |ice_channel_|.
- void Disconnect() { ice_channel_.SetDestination(nullptr); }
+ void Disconnect() { ice_channel_->SetDestination(nullptr); }
// Generates ICE credentials and passes them to |quic_channel_|.
void SetIceParameters(IceRole local_ice_role,
@@ -144,7 +149,7 @@ class QuicTestPeer : public sigslot::has_slots<> {
if (!get_digest_algorithm || digest_algorithm.empty()) {
return nullptr;
}
- rtc::scoped_ptr<rtc::SSLFingerprint> fingerprint(
+ std::unique_ptr<rtc::SSLFingerprint> fingerprint(
rtc::SSLFingerprint::Create(digest_algorithm, cert->identity()));
if (digest_algorithm != rtc::DIGEST_SHA_256) {
return nullptr;
@@ -184,22 +189,28 @@ class QuicTestPeer : public sigslot::has_slots<> {
void ClearBytesReceived() { bytes_received_ = 0; }
- void SetWriteError(int error) { ice_channel_.SetError(error); }
+ void SetWriteError(int error) { ice_channel_->SetError(error); }
size_t bytes_received() const { return bytes_received_; }
size_t bytes_sent() const { return bytes_sent_; }
- FailableTransportChannel* ice_channel() { return &ice_channel_; }
+ FailableTransportChannel* ice_channel() { return ice_channel_; }
QuicTransportChannel* quic_channel() { return &quic_channel_; }
- rtc::scoped_ptr<rtc::SSLFingerprint>& local_fingerprint() {
+ std::unique_ptr<rtc::SSLFingerprint>& local_fingerprint() {
return local_fingerprint_;
}
+ ReliableQuicStream* incoming_quic_stream() { return incoming_quic_stream_; }
+
+ size_t incoming_stream_count() const { return incoming_stream_count_; }
+
+ bool signal_closed_emitted() const { return signal_closed_emitted_; }
+
private:
- // QUIC channel callback.
+ // QuicTransportChannel callbacks.
void OnTransportChannelReadPacket(TransportChannel* channel,
const char* data,
size_t size,
@@ -210,13 +221,21 @@ class QuicTestPeer : public sigslot::has_slots<> {
int expected_flags = IsRtpLeadByte(data[0]) ? cricket::PF_SRTP_BYPASS : 0;
ASSERT_EQ(expected_flags, flags);
}
+ void OnIncomingStream(ReliableQuicStream* stream) {
+ incoming_quic_stream_ = stream;
+ ++incoming_stream_count_;
+ }
+ void OnClosed() { signal_closed_emitted_ = true; }
std::string name_; // Channel name.
size_t bytes_sent_; // Bytes sent by QUIC channel.
size_t bytes_received_; // Bytes received by QUIC channel.
- FailableTransportChannel ice_channel_; // Simulates an ICE channel.
+ FailableTransportChannel* ice_channel_; // Simulates an ICE channel.
QuicTransportChannel quic_channel_; // QUIC channel to test.
- rtc::scoped_ptr<rtc::SSLFingerprint> local_fingerprint_;
+ std::unique_ptr<rtc::SSLFingerprint> local_fingerprint_;
+ ReliableQuicStream* incoming_quic_stream_ = nullptr;
+ size_t incoming_stream_count_;
+ bool signal_closed_emitted_ = false;
};
class QuicTransportChannelTest : public testing::Test {
@@ -244,9 +263,9 @@ class QuicTransportChannelTest : public testing::Test {
peer1_.quic_channel()->SetSslRole(peer1_ssl_role);
peer2_.quic_channel()->SetSslRole(peer2_ssl_role);
- rtc::scoped_ptr<rtc::SSLFingerprint>& peer1_fingerprint =
+ std::unique_ptr<rtc::SSLFingerprint>& peer1_fingerprint =
peer1_.local_fingerprint();
- rtc::scoped_ptr<rtc::SSLFingerprint>& peer2_fingerprint =
+ std::unique_ptr<rtc::SSLFingerprint>& peer2_fingerprint =
peer2_.local_fingerprint();
peer1_.quic_channel()->SetRemoteFingerprint(
@@ -450,7 +469,7 @@ TEST_F(QuicTransportChannelTest, QuicRoleReversalAfterQuic) {
// Set the SSL role, then test that GetSslRole returns the same value.
TEST_F(QuicTransportChannelTest, SetGetSslRole) {
ASSERT_TRUE(peer1_.quic_channel()->SetSslRole(rtc::SSL_SERVER));
- rtc::scoped_ptr<rtc::SSLRole> role(new rtc::SSLRole());
+ std::unique_ptr<rtc::SSLRole> role(new rtc::SSLRole());
ASSERT_TRUE(peer1_.quic_channel()->GetSslRole(role.get()));
EXPECT_EQ(rtc::SSL_SERVER, *role);
}
@@ -486,3 +505,52 @@ TEST_F(QuicTransportChannelTest, IceReceivingBeforeConnected) {
ASSERT_TRUE_WAIT(quic_connected(), kTimeoutMs);
EXPECT_TRUE(peer1_.quic_channel()->receiving());
}
+
+// Test that when peer 1 creates an outgoing stream, peer 2 creates an incoming
+// QUIC stream with the same ID and fires OnIncomingStream.
+TEST_F(QuicTransportChannelTest, CreateOutgoingAndIncomingQuicStream) {
+ Connect();
+ EXPECT_EQ(nullptr, peer1_.quic_channel()->CreateQuicStream());
+ ASSERT_TRUE_WAIT(quic_connected(), kTimeoutMs);
+ ReliableQuicStream* stream = peer1_.quic_channel()->CreateQuicStream();
+ ASSERT_NE(nullptr, stream);
+ stream->Write("Hi", 2);
+ EXPECT_TRUE_WAIT(peer2_.incoming_quic_stream() != nullptr, kTimeoutMs);
+ EXPECT_EQ(stream->id(), peer2_.incoming_quic_stream()->id());
+}
+
+// Test that if the QuicTransportChannel is unwritable, then all outgoing QUIC
+// streams can send data once the QuicTransprotChannel becomes writable again.
+TEST_F(QuicTransportChannelTest, OutgoingQuicStreamSendsDataAfterReconnect) {
+ Connect();
+ ASSERT_TRUE_WAIT(quic_connected(), kTimeoutMs);
+ ReliableQuicStream* stream1 = peer1_.quic_channel()->CreateQuicStream();
+ ASSERT_NE(nullptr, stream1);
+ ReliableQuicStream* stream2 = peer1_.quic_channel()->CreateQuicStream();
+ ASSERT_NE(nullptr, stream2);
+
+ peer1_.ice_channel()->SetWritable(false);
+ stream1->Write("First", 5);
+ EXPECT_EQ(5u, stream1->queued_data_bytes());
+ stream2->Write("Second", 6);
+ EXPECT_EQ(6u, stream2->queued_data_bytes());
+ EXPECT_EQ(0u, peer2_.incoming_stream_count());
+
+ peer1_.ice_channel()->SetWritable(true);
+ EXPECT_EQ_WAIT(0u, stream1->queued_data_bytes(), kTimeoutMs);
+ EXPECT_EQ_WAIT(0u, stream2->queued_data_bytes(), kTimeoutMs);
+ EXPECT_EQ_WAIT(2u, peer2_.incoming_stream_count(), kTimeoutMs);
+}
+
+// Test that SignalClosed is emitted when the QuicConnection closes.
+TEST_F(QuicTransportChannelTest, SignalClosedEmitted) {
+ Connect();
+ ASSERT_TRUE_WAIT(quic_connected(), kTimeoutMs);
+ ASSERT_FALSE(peer1_.signal_closed_emitted());
+ ReliableQuicStream* stream = peer1_.quic_channel()->CreateQuicStream();
+ ASSERT_NE(nullptr, stream);
+ stream->CloseConnectionWithDetails(net::QuicErrorCode::QUIC_NO_ERROR,
+ "Closing QUIC for testing");
+ EXPECT_TRUE(peer1_.signal_closed_emitted());
+ EXPECT_TRUE_WAIT(peer2_.signal_closed_emitted(), kTimeoutMs);
+}
diff --git a/chromium/third_party/webrtc/p2p/quic/reliablequicstream.cc b/chromium/third_party/webrtc/p2p/quic/reliablequicstream.cc
index ca2e3f0d3be..5b65f1e7c75 100644
--- a/chromium/third_party/webrtc/p2p/quic/reliablequicstream.cc
+++ b/chromium/third_party/webrtc/p2p/quic/reliablequicstream.cc
@@ -12,6 +12,7 @@
#include <string>
+#include "net/quic/quic_session.h"
#include "webrtc/base/checks.h"
namespace cricket {
@@ -38,14 +39,26 @@ void ReliableQuicStream::OnClose() {
SignalClosed(id(), connection_error());
}
-rtc::StreamResult ReliableQuicStream::Write(const char* data, size_t len) {
+rtc::StreamResult ReliableQuicStream::Write(const char* data,
+ size_t len,
+ bool fin) {
// Writes the data, or buffers it.
- WriteOrBufferData(std::string(data, len), false, nullptr);
+ WriteOrBufferData(base::StringPiece(data, len), fin, nullptr);
if (HasBufferedData()) {
return rtc::StreamResult(rtc::SR_BLOCK);
}
-
return rtc::StreamResult(rtc::SR_SUCCESS);
}
+void ReliableQuicStream::Close() {
+ net::ReliableQuicStream::session()->CloseStream(id());
+}
+
+void ReliableQuicStream::OnCanWrite() {
+ uint64_t prev_queued_bytes = queued_data_bytes();
+ net::ReliableQuicStream::OnCanWrite();
+ uint64_t queued_bytes_written = prev_queued_bytes - queued_data_bytes();
+ SignalQueuedBytesWritten(id(), queued_bytes_written);
+}
+
} // namespace cricket
diff --git a/chromium/third_party/webrtc/p2p/quic/reliablequicstream.h b/chromium/third_party/webrtc/p2p/quic/reliablequicstream.h
index 61d060f850b..9493a3fcc18 100644
--- a/chromium/third_party/webrtc/p2p/quic/reliablequicstream.h
+++ b/chromium/third_party/webrtc/p2p/quic/reliablequicstream.h
@@ -29,16 +29,23 @@ class ReliableQuicStream : public net::ReliableQuicStream,
// ReliableQuicStream overrides.
void OnDataAvailable() override;
void OnClose() override;
+ void OnCanWrite() override;
// Process decrypted data into encrypted QUIC packets, which get sent to the
// QuicPacketWriter. rtc::SR_BLOCK is returned if the operation blocks instead
// of writing, in which case the data is queued until OnCanWrite() is called.
- rtc::StreamResult Write(const char* data, size_t len);
+ // If |fin| == true, then this stream closes after sending data.
+ rtc::StreamResult Write(const char* data, size_t len, bool fin = false);
+ // Removes this stream from the QuicSession's stream map.
+ void Close();
// Called when decrypted data is ready to be read.
sigslot::signal3<net::QuicStreamId, const char*, size_t> SignalDataReceived;
- // Called when stream closed.
- sigslot::signal2<net::QuicStreamId, net::QuicErrorCode> SignalClosed;
+ // Called when the stream is closed.
+ sigslot::signal2<net::QuicStreamId, int> SignalClosed;
+ // Emits the number of queued bytes that were written by OnCanWrite(), after
+ // the stream was previously write blocked.
+ sigslot::signal2<net::QuicStreamId, uint64_t> SignalQueuedBytesWritten;
private:
RTC_DISALLOW_COPY_AND_ASSIGN(ReliableQuicStream);
diff --git a/chromium/third_party/webrtc/p2p/quic/reliablequicstream_unittest.cc b/chromium/third_party/webrtc/p2p/quic/reliablequicstream_unittest.cc
index aeb3e1af87c..cf9f5e92ddf 100644
--- a/chromium/third_party/webrtc/p2p/quic/reliablequicstream_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/quic/reliablequicstream_unittest.cc
@@ -10,6 +10,7 @@
#include "webrtc/p2p/quic/reliablequicstream.h"
+#include <memory>
#include <string>
#include "net/base/ip_address_number.h"
@@ -25,7 +26,6 @@
using cricket::QuicConnectionHelper;
using cricket::ReliableQuicStream;
-using net::FecProtection;
using net::IPAddress;
using net::IPEndPoint;
using net::PerPacketOptions;
@@ -65,7 +65,6 @@ class MockQuicSession : public QuicSession {
QuicIOVector iovector,
QuicStreamOffset offset,
bool fin,
- FecProtection fec_protection,
QuicAckListenerInterface* ack_notifier_delegate) override {
if (!writable_) {
return QuicConsumedData(0, false);
@@ -117,11 +116,11 @@ class DummyPacketWriter : public QuicPacketWriter {
DummyPacketWriter() {}
// QuicPacketWriter overrides.
- virtual net::WriteResult WritePacket(const char* buffer,
- size_t buf_len,
- const IPAddress& self_address,
- const IPEndPoint& peer_address,
- PerPacketOptions* options) {
+ net::WriteResult WritePacket(const char* buffer,
+ size_t buf_len,
+ const IPAddress& self_address,
+ const IPEndPoint& peer_address,
+ PerPacketOptions* options) override {
return net::WriteResult(net::WRITE_STATUS_ERROR, 0);
}
@@ -163,6 +162,8 @@ class ReliableQuicStreamTest : public ::testing::Test,
stream_->SignalDataReceived.connect(
this, &ReliableQuicStreamTest::OnDataReceived);
stream_->SignalClosed.connect(this, &ReliableQuicStreamTest::OnClosed);
+ stream_->SignalQueuedBytesWritten.connect(
+ this, &ReliableQuicStreamTest::OnQueuedBytesWritten);
session_->register_write_blocked_stream(stream_->id(), kDefaultPriority);
}
@@ -172,11 +173,15 @@ class ReliableQuicStreamTest : public ::testing::Test,
read_buffer_.append(data, length);
}
- void OnClosed(QuicStreamId id, QuicErrorCode err) { closed_ = true; }
+ void OnClosed(QuicStreamId id, int err) { closed_ = true; }
+
+ void OnQueuedBytesWritten(QuicStreamId id, uint64_t queued_bytes_written) {
+ queued_bytes_written_ = queued_bytes_written;
+ }
protected:
- rtc::scoped_ptr<ReliableQuicStream> stream_;
- rtc::scoped_ptr<MockQuicSession> session_;
+ std::unique_ptr<ReliableQuicStream> stream_;
+ std::unique_ptr<MockQuicSession> session_;
// Data written by the ReliableQuicStream.
std::string write_buffer_;
@@ -184,6 +189,8 @@ class ReliableQuicStreamTest : public ::testing::Test,
std::string read_buffer_;
// Whether the ReliableQuicStream is closed.
bool closed_ = false;
+ // Bytes written by OnCanWrite().
+ uint64_t queued_bytes_written_;
};
// Write an entire string.
@@ -213,6 +220,7 @@ TEST_F(ReliableQuicStreamTest, BufferData) {
session_->set_writable(true);
stream_->OnCanWrite();
+ EXPECT_EQ(7ul, queued_bytes_written_);
EXPECT_FALSE(stream_->HasBufferedData());
EXPECT_EQ("Foo bar", write_buffer_);
diff --git a/chromium/third_party/webrtc/p2p/stunprober/main.cc b/chromium/third_party/webrtc/p2p/stunprober/main.cc
index 9ef91e071b5..9d0fff87322 100644
--- a/chromium/third_party/webrtc/p2p/stunprober/main.cc
+++ b/chromium/third_party/webrtc/p2p/stunprober/main.cc
@@ -14,13 +14,14 @@
#include <iostream>
#include <map>
+#include <memory>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/flags.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/nethelpers.h"
#include "webrtc/base/network.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/base/thread.h"
@@ -119,9 +120,9 @@ int main(int argc, char** argv) {
rtc::InitializeSSL();
rtc::InitRandom(rtc::Time32());
rtc::Thread* thread = rtc::ThreadManager::Instance()->WrapCurrentThread();
- rtc::scoped_ptr<rtc::BasicPacketSocketFactory> socket_factory(
+ std::unique_ptr<rtc::BasicPacketSocketFactory> socket_factory(
new rtc::BasicPacketSocketFactory());
- rtc::scoped_ptr<rtc::BasicNetworkManager> network_manager(
+ std::unique_ptr<rtc::BasicNetworkManager> network_manager(
new rtc::BasicNetworkManager());
rtc::NetworkManager::NetworkList networks;
network_manager->GetNetworks(&networks);
diff --git a/chromium/third_party/webrtc/p2p/stunprober/stunprober.cc b/chromium/third_party/webrtc/p2p/stunprober/stunprober.cc
index eb9a7a7d3b7..90dc60e1514 100644
--- a/chromium/third_party/webrtc/p2p/stunprober/stunprober.cc
+++ b/chromium/third_party/webrtc/p2p/stunprober/stunprober.cc
@@ -9,6 +9,7 @@
*/
#include <map>
+#include <memory>
#include <set>
#include <string>
@@ -16,6 +17,7 @@
#include "webrtc/base/asyncresolverinterface.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/timeutils.h"
@@ -89,11 +91,11 @@ class StunProber::Requester : public sigslot::has_slots<> {
StunProber* prober_;
// The socket for this session.
- rtc::scoped_ptr<rtc::AsyncPacketSocket> socket_;
+ std::unique_ptr<rtc::AsyncPacketSocket> socket_;
// Temporary SocketAddress and buffer for RecvFrom.
rtc::SocketAddress addr_;
- rtc::scoped_ptr<rtc::ByteBufferWriter> response_packet_;
+ std::unique_ptr<rtc::ByteBufferWriter> response_packet_;
std::vector<Request*> requests_;
std::vector<rtc::SocketAddress> server_ips_;
@@ -140,7 +142,7 @@ void StunProber::Requester::SendStunRequest() {
rtc::CreateRandomString(cricket::kStunTransactionIdLength));
message.SetType(cricket::STUN_BINDING_REQUEST);
- rtc::scoped_ptr<rtc::ByteBufferWriter> request_packet(
+ std::unique_ptr<rtc::ByteBufferWriter> request_packet(
new rtc::ByteBufferWriter(nullptr, kMaxUdpBufferSize));
if (!message.Write(request_packet.get())) {
prober_->ReportOnFinished(WRITE_FAILED);
@@ -161,7 +163,7 @@ void StunProber::Requester::SendStunRequest() {
return;
}
- request.sent_time_ms = rtc::Time64();
+ request.sent_time_ms = rtc::TimeMillis();
num_request_sent_++;
RTC_DCHECK(static_cast<size_t>(num_request_sent_) <= server_ips_.size());
@@ -169,7 +171,7 @@ void StunProber::Requester::SendStunRequest() {
void StunProber::Requester::Request::ProcessResponse(const char* buf,
size_t buf_len) {
- int64_t now = rtc::Time64();
+ int64_t now = rtc::TimeMillis();
rtc::ByteBufferReader message(buf, buf_len);
cricket::StunMessage stun_response;
if (!stun_response.Read(&message)) {
@@ -344,7 +346,7 @@ void StunProber::OnServerResolved(rtc::AsyncResolverInterface* resolver) {
// Prepare all the sockets beforehand. All of them will bind to "any" address.
while (sockets_.size() < total_socket_required()) {
- rtc::scoped_ptr<rtc::AsyncPacketSocket> socket(
+ std::unique_ptr<rtc::AsyncPacketSocket> socket(
socket_factory_->CreateUdpSocket(rtc::SocketAddress(INADDR_ANY, 0), 0,
0));
if (!socket) {
@@ -412,7 +414,7 @@ int StunProber::get_wake_up_interval_ms() {
void StunProber::MaybeScheduleStunRequests() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
- int64_t now = rtc::Time64();
+ int64_t now = rtc::TimeMillis();
if (Done()) {
invoker_.AsyncInvokeDelayed<void>(
diff --git a/chromium/third_party/webrtc/p2p/stunprober/stunprober.h b/chromium/third_party/webrtc/p2p/stunprober/stunprober.h
index 44999a201ed..dbb67c61674 100644
--- a/chromium/third_party/webrtc/p2p/stunprober/stunprober.h
+++ b/chromium/third_party/webrtc/p2p/stunprober/stunprober.h
@@ -19,9 +19,9 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/bytebuffer.h"
#include "webrtc/base/callback.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/ipaddress.h"
#include "webrtc/base/network.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/socketaddress.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/thread_checker.h"
diff --git a/chromium/third_party/webrtc/p2p/stunprober/stunprober_unittest.cc b/chromium/third_party/webrtc/p2p/stunprober/stunprober_unittest.cc
index cdcc14a36f2..f5ee4bffeff 100644
--- a/chromium/third_party/webrtc/p2p/stunprober/stunprober_unittest.cc
+++ b/chromium/third_party/webrtc/p2p/stunprober/stunprober_unittest.cc
@@ -8,13 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/asyncresolverinterface.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/physicalsocketserver.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/ssladapter.h"
#include "webrtc/base/virtualsocketserver.h"
#include "webrtc/p2p/base/basicpacketsocketfactory.h"
@@ -83,7 +84,7 @@ class StunProberTest : public testing::Test {
rtc::NetworkManager::NetworkList networks;
networks.push_back(&ipv4_network1);
- rtc::scoped_ptr<rtc::BasicPacketSocketFactory> socket_factory(
+ std::unique_ptr<rtc::BasicPacketSocketFactory> socket_factory(
new rtc::BasicPacketSocketFactory());
// Set up the expected results for verification.
@@ -119,14 +120,14 @@ class StunProberTest : public testing::Test {
}
rtc::Thread* main_;
- rtc::scoped_ptr<rtc::PhysicalSocketServer> pss_;
- rtc::scoped_ptr<rtc::VirtualSocketServer> ss_;
+ std::unique_ptr<rtc::PhysicalSocketServer> pss_;
+ std::unique_ptr<rtc::VirtualSocketServer> ss_;
rtc::SocketServerScope ss_scope_;
- rtc::scoped_ptr<StunProber> prober;
+ std::unique_ptr<StunProber> prober;
int result_ = 0;
bool stopped_ = false;
- rtc::scoped_ptr<cricket::TestStunServer> stun_server_1_;
- rtc::scoped_ptr<cricket::TestStunServer> stun_server_2_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_1_;
+ std::unique_ptr<cricket::TestStunServer> stun_server_2_;
};
TEST_F(StunProberTest, NonSharedMode) {
diff --git a/chromium/third_party/webrtc/pc/audiomonitor.cc b/chromium/third_party/webrtc/pc/audiomonitor.cc
index 952ee9fbda8..085ab931913 100644
--- a/chromium/third_party/webrtc/pc/audiomonitor.cc
+++ b/chromium/third_party/webrtc/pc/audiomonitor.cc
@@ -101,4 +101,4 @@ rtc::Thread *AudioMonitor::monitor_thread() {
return monitoring_thread_;
}
-}
+} // namespace cricket
diff --git a/chromium/third_party/webrtc/pc/audiomonitor.h b/chromium/third_party/webrtc/pc/audiomonitor.h
index 741709b60eb..885c60f08b5 100644
--- a/chromium/third_party/webrtc/pc/audiomonitor.h
+++ b/chromium/third_party/webrtc/pc/audiomonitor.h
@@ -8,10 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_SESSION_MEDIA_AUDIOMONITOR_H_
-#define TALK_SESSION_MEDIA_AUDIOMONITOR_H_
+#ifndef WEBRTC_PC_AUDIOMONITOR_H_
+#define WEBRTC_PC_AUDIOMONITOR_H_
#include <vector>
+#include <utility>
+
#include "webrtc/base/sigslot.h"
#include "webrtc/base/thread.h"
#include "webrtc/p2p/base/port.h"
@@ -24,7 +26,7 @@ struct AudioInfo {
int input_level;
int output_level;
typedef std::vector<std::pair<uint32_t, int> > StreamList;
- StreamList active_streams; // ssrcs contributing to output_level
+ StreamList active_streams; // ssrcs contributing to output_level
};
class AudioMonitor : public rtc::MessageHandler,
@@ -53,6 +55,6 @@ class AudioMonitor : public rtc::MessageHandler,
bool monitoring_;
};
-}
+} // namespace cricket
-#endif // TALK_SESSION_MEDIA_AUDIOMONITOR_H_
+#endif // WEBRTC_PC_AUDIOMONITOR_H_
diff --git a/chromium/third_party/webrtc/pc/bundlefilter.h b/chromium/third_party/webrtc/pc/bundlefilter.h
index a69868171de..44a45da5280 100644
--- a/chromium/third_party/webrtc/pc/bundlefilter.h
+++ b/chromium/third_party/webrtc/pc/bundlefilter.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_SESSION_MEDIA_BUNDLEFILTER_H_
-#define TALK_SESSION_MEDIA_BUNDLEFILTER_H_
+#ifndef WEBRTC_PC_BUNDLEFILTER_H_
+#define WEBRTC_PC_BUNDLEFILTER_H_
#include <stdint.h>
@@ -51,4 +51,4 @@ class BundleFilter {
} // namespace cricket
-#endif // TALK_SESSION_MEDIA_BUNDLEFILTER_H_
+#endif // WEBRTC_PC_BUNDLEFILTER_H_
diff --git a/chromium/third_party/webrtc/pc/channel.cc b/chromium/third_party/webrtc/pc/channel.cc
index b76d7bdb1d2..97c1d669101 100644
--- a/chromium/third_party/webrtc/pc/channel.cc
+++ b/chromium/third_party/webrtc/pc/channel.cc
@@ -37,12 +37,18 @@ bool SetRawAudioSink_w(VoiceMediaChannel* channel,
channel->SetRawAudioSink(ssrc, std::move(*sink));
return true;
}
+
+struct SendPacketMessageData : public rtc::MessageData {
+ rtc::CopyOnWriteBuffer packet;
+ rtc::PacketOptions options;
+};
+
} // namespace
enum {
MSG_EARLYMEDIATIMEOUT = 1,
- MSG_RTPPACKET,
- MSG_RTCPPACKET,
+ MSG_SEND_RTP_PACKET,
+ MSG_SEND_RTCP_PACKET,
MSG_CHANNEL_ERROR,
MSG_READYTOSENDDATA,
MSG_DATARECEIVED,
@@ -61,11 +67,6 @@ static void SafeSetError(const std::string& message, std::string* error_desc) {
}
}
-struct PacketMessageData : public rtc::MessageData {
- rtc::CopyOnWriteBuffer packet;
- rtc::PacketOptions options;
-};
-
struct VoiceChannelErrorMessageData : public rtc::MessageData {
VoiceChannelErrorMessageData(uint32_t in_ssrc,
VoiceMediaChannel::Error in_error)
@@ -142,30 +143,38 @@ void RtpSendParametersFromMediaDescription(
send_params->max_bandwidth_bps = desc->bandwidth();
}
-BaseChannel::BaseChannel(rtc::Thread* thread,
+BaseChannel::BaseChannel(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
MediaChannel* media_channel,
TransportController* transport_controller,
const std::string& content_name,
bool rtcp)
- : worker_thread_(thread),
- transport_controller_(transport_controller),
- media_channel_(media_channel),
+ : worker_thread_(worker_thread),
+ network_thread_(network_thread),
+
content_name_(content_name),
+
+ transport_controller_(transport_controller),
rtcp_transport_enabled_(rtcp),
transport_channel_(nullptr),
rtcp_transport_channel_(nullptr),
- enabled_(false),
- writable_(false),
rtp_ready_to_send_(false),
rtcp_ready_to_send_(false),
+ writable_(false),
was_ever_writable_(false),
- local_content_direction_(MD_INACTIVE),
- remote_content_direction_(MD_INACTIVE),
has_received_packet_(false),
dtls_keyed_(false),
secure_required_(false),
- rtp_abs_sendtime_extn_id_(-1) {
+ rtp_abs_sendtime_extn_id_(-1),
+
+ media_channel_(media_channel),
+ enabled_(false),
+ local_content_direction_(MD_INACTIVE),
+ remote_content_direction_(MD_INACTIVE) {
ASSERT(worker_thread_ == rtc::Thread::Current());
+ if (transport_controller) {
+ RTC_DCHECK_EQ(network_thread, transport_controller->network_thread());
+ }
LOG(LS_INFO) << "Created channel for " << content_name;
}
@@ -174,57 +183,100 @@ BaseChannel::~BaseChannel() {
ASSERT(worker_thread_ == rtc::Thread::Current());
Deinit();
StopConnectionMonitor();
- FlushRtcpMessages(); // Send any outstanding RTCP packets.
- worker_thread_->Clear(this); // eats any outstanding messages or packets
+ // Eats any outstanding messages or packets.
+ worker_thread_->Clear(&invoker_);
+ worker_thread_->Clear(this);
// We must destroy the media channel before the transport channel, otherwise
// the media channel may try to send on the dead transport channel. NULLing
// is not an effective strategy since the sends will come on another thread.
delete media_channel_;
- // Note that we don't just call set_transport_channel(nullptr) because that
+ // Note that we don't just call SetTransportChannel_n(nullptr) because that
// would call a pure virtual method which we can't do from a destructor.
+ network_thread_->Invoke<void>(
+ Bind(&BaseChannel::DestroyTransportChannels_n, this));
+ LOG(LS_INFO) << "Destroyed channel";
+}
+
+void BaseChannel::DisconnectTransportChannels_n() {
+ // Send any outstanding RTCP packets.
+ FlushRtcpMessages_n();
+
+ // Stop signals from transport channels, but keep them alive because
+ // media_channel may use them from a different thread.
if (transport_channel_) {
DisconnectFromTransportChannel(transport_channel_);
- transport_controller_->DestroyTransportChannel_w(
- transport_name_, cricket::ICE_CANDIDATE_COMPONENT_RTP);
}
if (rtcp_transport_channel_) {
DisconnectFromTransportChannel(rtcp_transport_channel_);
- transport_controller_->DestroyTransportChannel_w(
+ }
+
+ // Clear pending read packets/messages.
+ network_thread_->Clear(&invoker_);
+ network_thread_->Clear(this);
+}
+
+void BaseChannel::DestroyTransportChannels_n() {
+ if (transport_channel_) {
+ transport_controller_->DestroyTransportChannel_n(
+ transport_name_, cricket::ICE_CANDIDATE_COMPONENT_RTP);
+ }
+ if (rtcp_transport_channel_) {
+ transport_controller_->DestroyTransportChannel_n(
transport_name_, cricket::ICE_CANDIDATE_COMPONENT_RTCP);
}
- LOG(LS_INFO) << "Destroyed channel";
+ // Clear pending send packets/messages.
+ network_thread_->Clear(&invoker_);
+ network_thread_->Clear(this);
+}
+
+bool BaseChannel::Init_w(const std::string* bundle_transport_name) {
+ if (!network_thread_->Invoke<bool>(
+ Bind(&BaseChannel::InitNetwork_n, this, bundle_transport_name))) {
+ return false;
+ }
+
+ // Both RTP and RTCP channels are set, we can call SetInterface on
+ // media channel and it can set network options.
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ media_channel_->SetInterface(this);
+ return true;
}
-bool BaseChannel::Init() {
- if (!SetTransport(content_name())) {
+bool BaseChannel::InitNetwork_n(const std::string* bundle_transport_name) {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ const std::string& transport_name =
+ (bundle_transport_name ? *bundle_transport_name : content_name());
+ if (!SetTransport_n(transport_name)) {
return false;
}
- if (!SetDtlsSrtpCryptoSuites(transport_channel(), false)) {
+ if (!SetDtlsSrtpCryptoSuites_n(transport_channel_, false)) {
return false;
}
if (rtcp_transport_enabled() &&
- !SetDtlsSrtpCryptoSuites(rtcp_transport_channel(), true)) {
+ !SetDtlsSrtpCryptoSuites_n(rtcp_transport_channel_, true)) {
return false;
}
-
- // Both RTP and RTCP channels are set, we can call SetInterface on
- // media channel and it can set network options.
- media_channel_->SetInterface(this);
return true;
}
void BaseChannel::Deinit() {
+ RTC_DCHECK(worker_thread_->IsCurrent());
media_channel_->SetInterface(NULL);
+ // Packets arrive on the network thread, processing packets calls virtual
+ // functions, so need to stop this process in Deinit that is called in
+ // derived classes destructor.
+ network_thread_->Invoke<void>(
+ Bind(&BaseChannel::DisconnectTransportChannels_n, this));
}
bool BaseChannel::SetTransport(const std::string& transport_name) {
- return worker_thread_->Invoke<bool>(
- Bind(&BaseChannel::SetTransport_w, this, transport_name));
+ return network_thread_->Invoke<bool>(
+ Bind(&BaseChannel::SetTransport_n, this, transport_name));
}
-bool BaseChannel::SetTransport_w(const std::string& transport_name) {
- ASSERT(worker_thread_ == rtc::Thread::Current());
+bool BaseChannel::SetTransport_n(const std::string& transport_name) {
+ RTC_DCHECK(network_thread_->IsCurrent());
if (transport_name == transport_name_) {
// Nothing to do if transport name isn't changing
@@ -234,7 +286,7 @@ bool BaseChannel::SetTransport_w(const std::string& transport_name) {
// When using DTLS-SRTP, we must reset the SrtpFilter every time the transport
// changes and wait until the DTLS handshake is complete to set the newly
// negotiated parameters.
- if (ShouldSetupDtlsSrtp()) {
+ if (ShouldSetupDtlsSrtp_n()) {
// Set |writable_| to false such that UpdateWritableState_w can set up
// DTLS-SRTP when the writable_ becomes true again.
writable_ = false;
@@ -245,19 +297,19 @@ bool BaseChannel::SetTransport_w(const std::string& transport_name) {
if (rtcp_transport_enabled()) {
LOG(LS_INFO) << "Create RTCP TransportChannel for " << content_name()
<< " on " << transport_name << " transport ";
- set_rtcp_transport_channel(
- transport_controller_->CreateTransportChannel_w(
+ SetRtcpTransportChannel_n(
+ transport_controller_->CreateTransportChannel_n(
transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTCP),
false /* update_writablity */);
- if (!rtcp_transport_channel()) {
+ if (!rtcp_transport_channel_) {
return false;
}
}
// We're not updating the writablity during the transition state.
- set_transport_channel(transport_controller_->CreateTransportChannel_w(
+ SetTransportChannel_n(transport_controller_->CreateTransportChannel_n(
transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP));
- if (!transport_channel()) {
+ if (!transport_channel_) {
return false;
}
@@ -266,14 +318,14 @@ bool BaseChannel::SetTransport_w(const std::string& transport_name) {
// We can only update the RTCP ready to send after set_transport_channel has
// handled channel writability.
SetReadyToSend(
- true, rtcp_transport_channel() && rtcp_transport_channel()->writable());
+ true, rtcp_transport_channel_ && rtcp_transport_channel_->writable());
}
transport_name_ = transport_name;
return true;
}
-void BaseChannel::set_transport_channel(TransportChannel* new_tc) {
- ASSERT(worker_thread_ == rtc::Thread::Current());
+void BaseChannel::SetTransportChannel_n(TransportChannel* new_tc) {
+ RTC_DCHECK(network_thread_->IsCurrent());
TransportChannel* old_tc = transport_channel_;
if (!old_tc && !new_tc) {
@@ -284,7 +336,7 @@ void BaseChannel::set_transport_channel(TransportChannel* new_tc) {
if (old_tc) {
DisconnectFromTransportChannel(old_tc);
- transport_controller_->DestroyTransportChannel_w(
+ transport_controller_->DestroyTransportChannel_n(
transport_name_, cricket::ICE_CANDIDATE_COMPONENT_RTP);
}
@@ -299,13 +351,13 @@ void BaseChannel::set_transport_channel(TransportChannel* new_tc) {
// Update aggregate writable/ready-to-send state between RTP and RTCP upon
// setting new channel
- UpdateWritableState_w();
+ UpdateWritableState_n();
SetReadyToSend(false, new_tc && new_tc->writable());
}
-void BaseChannel::set_rtcp_transport_channel(TransportChannel* new_tc,
- bool update_writablity) {
- ASSERT(worker_thread_ == rtc::Thread::Current());
+void BaseChannel::SetRtcpTransportChannel_n(TransportChannel* new_tc,
+ bool update_writablity) {
+ RTC_DCHECK(network_thread_->IsCurrent());
TransportChannel* old_tc = rtcp_transport_channel_;
if (!old_tc && !new_tc) {
@@ -316,14 +368,14 @@ void BaseChannel::set_rtcp_transport_channel(TransportChannel* new_tc,
if (old_tc) {
DisconnectFromTransportChannel(old_tc);
- transport_controller_->DestroyTransportChannel_w(
+ transport_controller_->DestroyTransportChannel_n(
transport_name_, cricket::ICE_CANDIDATE_COMPONENT_RTCP);
}
rtcp_transport_channel_ = new_tc;
if (new_tc) {
- RTC_CHECK(!(ShouldSetupDtlsSrtp() && srtp_filter_.IsActive()))
+ RTC_CHECK(!(ShouldSetupDtlsSrtp_n() && srtp_filter_.IsActive()))
<< "Setting RTCP for DTLS/SRTP after SrtpFilter is active "
<< "should never happen.";
ConnectToTransportChannel(new_tc);
@@ -335,13 +387,13 @@ void BaseChannel::set_rtcp_transport_channel(TransportChannel* new_tc,
if (update_writablity) {
// Update aggregate writable/ready-to-send state between RTP and RTCP upon
// setting new channel
- UpdateWritableState_w();
+ UpdateWritableState_n();
SetReadyToSend(true, new_tc && new_tc->writable());
}
}
void BaseChannel::ConnectToTransportChannel(TransportChannel* tc) {
- ASSERT(worker_thread_ == rtc::Thread::Current());
+ RTC_DCHECK(network_thread_->IsCurrent());
tc->SignalWritableState.connect(this, &BaseChannel::OnWritableState);
tc->SignalReadPacket.connect(this, &BaseChannel::OnChannelRead);
@@ -349,15 +401,18 @@ void BaseChannel::ConnectToTransportChannel(TransportChannel* tc) {
tc->SignalDtlsState.connect(this, &BaseChannel::OnDtlsState);
tc->SignalSelectedCandidatePairChanged.connect(
this, &BaseChannel::OnSelectedCandidatePairChanged);
+ tc->SignalSentPacket.connect(this, &BaseChannel::SignalSentPacket_n);
}
void BaseChannel::DisconnectFromTransportChannel(TransportChannel* tc) {
- ASSERT(worker_thread_ == rtc::Thread::Current());
+ RTC_DCHECK(network_thread_->IsCurrent());
tc->SignalWritableState.disconnect(this);
tc->SignalReadPacket.disconnect(this);
tc->SignalReadyToSend.disconnect(this);
tc->SignalDtlsState.disconnect(this);
+ tc->SignalSelectedCandidatePairChanged.disconnect(this);
+ tc->SignalSentPacket.disconnect(this);
}
bool BaseChannel::Enable(bool enable) {
@@ -405,8 +460,11 @@ void BaseChannel::StartConnectionMonitor(int cms) {
// We pass in the BaseChannel instead of the transport_channel_
// because if the transport_channel_ changes, the ConnectionMonitor
// would be pointing to the wrong TransportChannel.
- connection_monitor_.reset(new ConnectionMonitor(
- this, worker_thread(), rtc::Thread::Current()));
+ // We pass in the network thread because on that thread connection monitor
+ // will call BaseChannel::GetConnectionStats which must be called on the
+ // network thread.
+ connection_monitor_.reset(
+ new ConnectionMonitor(this, network_thread(), rtc::Thread::Current()));
connection_monitor_->SignalUpdate.connect(
this, &BaseChannel::OnConnectionMonitorUpdate);
connection_monitor_->Start(cms);
@@ -420,22 +478,27 @@ void BaseChannel::StopConnectionMonitor() {
}
bool BaseChannel::GetConnectionStats(ConnectionInfos* infos) {
- ASSERT(worker_thread_ == rtc::Thread::Current());
+ RTC_DCHECK(network_thread_->IsCurrent());
return transport_channel_->GetStats(infos);
}
-bool BaseChannel::IsReadyToReceive() const {
+bool BaseChannel::IsReadyToReceive_w() const {
// Receive data if we are enabled and have local content,
return enabled() && IsReceiveContentDirection(local_content_direction_);
}
-bool BaseChannel::IsReadyToSend() const {
+bool BaseChannel::IsReadyToSend_w() const {
// Send outgoing data if we are enabled, have local and remote content,
// and we have had some form of connectivity.
return enabled() && IsReceiveContentDirection(remote_content_direction_) &&
IsSendContentDirection(local_content_direction_) &&
- was_ever_writable() &&
- (srtp_filter_.IsActive() || !ShouldSetupDtlsSrtp());
+ network_thread_->Invoke<bool>(
+ Bind(&BaseChannel::IsTransportReadyToSend_n, this));
+}
+
+bool BaseChannel::IsTransportReadyToSend_n() const {
+ return was_ever_writable() &&
+ (srtp_filter_.IsActive() || !ShouldSetupDtlsSrtp_n());
}
bool BaseChannel::SendPacket(rtc::CopyOnWriteBuffer* packet,
@@ -450,7 +513,15 @@ bool BaseChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet,
int BaseChannel::SetOption(SocketType type, rtc::Socket::Option opt,
int value) {
- TransportChannel* channel = NULL;
+ return network_thread_->Invoke<int>(
+ Bind(&BaseChannel::SetOption_n, this, type, opt, value));
+}
+
+int BaseChannel::SetOption_n(SocketType type,
+ rtc::Socket::Option opt,
+ int value) {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ TransportChannel* channel = nullptr;
switch (type) {
case ST_RTP:
channel = transport_channel_;
@@ -467,8 +538,10 @@ int BaseChannel::SetOption(SocketType type, rtc::Socket::Option opt,
}
void BaseChannel::OnWritableState(TransportChannel* channel) {
- ASSERT(channel == transport_channel_ || channel == rtcp_transport_channel_);
- UpdateWritableState_w();
+ RTC_DCHECK(channel == transport_channel_ ||
+ channel == rtcp_transport_channel_);
+ RTC_DCHECK(network_thread_->IsCurrent());
+ UpdateWritableState_n();
}
void BaseChannel::OnChannelRead(TransportChannel* channel,
@@ -477,7 +550,7 @@ void BaseChannel::OnChannelRead(TransportChannel* channel,
int flags) {
TRACE_EVENT0("webrtc", "BaseChannel::OnChannelRead");
// OnChannelRead gets called from P2PSocket; now pass data to MediaEngine
- ASSERT(worker_thread_ == rtc::Thread::Current());
+ RTC_DCHECK(network_thread_->IsCurrent());
// When using RTCP multiplexing we might get RTCP packets on the RTP
// transport. We feed RTP traffic into the demuxer to determine if it is RTCP.
@@ -493,7 +566,7 @@ void BaseChannel::OnReadyToSend(TransportChannel* channel) {
void BaseChannel::OnDtlsState(TransportChannel* channel,
DtlsTransportState state) {
- if (!ShouldSetupDtlsSrtp()) {
+ if (!ShouldSetupDtlsSrtp_n()) {
return;
}
@@ -512,33 +585,36 @@ void BaseChannel::OnSelectedCandidatePairChanged(
CandidatePairInterface* selected_candidate_pair,
int last_sent_packet_id) {
ASSERT(channel == transport_channel_ || channel == rtcp_transport_channel_);
- NetworkRoute network_route;
+ RTC_DCHECK(network_thread_->IsCurrent());
+ std::string transport_name = channel->transport_name();
+ rtc::NetworkRoute network_route;
if (selected_candidate_pair) {
- network_route =
- NetworkRoute(selected_candidate_pair->local_candidate().network_id(),
- selected_candidate_pair->remote_candidate().network_id(),
- last_sent_packet_id);
+ network_route = rtc::NetworkRoute(
+ selected_candidate_pair->local_candidate().network_id(),
+ selected_candidate_pair->remote_candidate().network_id(),
+ last_sent_packet_id);
}
- media_channel()->OnNetworkRouteChanged(channel->transport_name(),
- network_route);
+ invoker_.AsyncInvoke<void>(
+ worker_thread_, Bind(&MediaChannel::OnNetworkRouteChanged, media_channel_,
+ transport_name, network_route));
}
void BaseChannel::SetReadyToSend(bool rtcp, bool ready) {
+ RTC_DCHECK(network_thread_->IsCurrent());
if (rtcp) {
rtcp_ready_to_send_ = ready;
} else {
rtp_ready_to_send_ = ready;
}
- if (rtp_ready_to_send_ &&
- // In the case of rtcp mux |rtcp_transport_channel_| will be null.
- (rtcp_ready_to_send_ || !rtcp_transport_channel_)) {
- // Notify the MediaChannel when both rtp and rtcp channel can send.
- media_channel_->OnReadyToSend(true);
- } else {
- // Notify the MediaChannel when either rtp or rtcp channel can't send.
- media_channel_->OnReadyToSend(false);
- }
+ bool ready_to_send =
+ (rtp_ready_to_send_ &&
+ // In the case of rtcp mux |rtcp_transport_channel_| will be null.
+ (rtcp_ready_to_send_ || !rtcp_transport_channel_));
+
+ invoker_.AsyncInvoke<void>(
+ worker_thread_,
+ Bind(&MediaChannel::OnReadyToSend, media_channel_, ready_to_send));
}
bool BaseChannel::PacketIsRtcp(const TransportChannel* channel,
@@ -550,22 +626,23 @@ bool BaseChannel::PacketIsRtcp(const TransportChannel* channel,
bool BaseChannel::SendPacket(bool rtcp,
rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options) {
- // SendPacket gets called from MediaEngine, typically on an encoder thread.
- // If the thread is not our worker thread, we will post to our worker
- // so that the real work happens on our worker. This avoids us having to
+ // SendPacket gets called from MediaEngine, on a pacer or an encoder thread.
+ // If the thread is not our network thread, we will post to our network
+ // so that the real work happens on our network. This avoids us having to
// synchronize access to all the pieces of the send path, including
// SRTP and the inner workings of the transport channels.
// The only downside is that we can't return a proper failure code if
// needed. Since UDP is unreliable anyway, this should be a non-issue.
- if (rtc::Thread::Current() != worker_thread_) {
+ if (!network_thread_->IsCurrent()) {
// Avoid a copy by transferring the ownership of the packet data.
- int message_id = (!rtcp) ? MSG_RTPPACKET : MSG_RTCPPACKET;
- PacketMessageData* data = new PacketMessageData;
+ int message_id = rtcp ? MSG_SEND_RTCP_PACKET : MSG_SEND_RTP_PACKET;
+ SendPacketMessageData* data = new SendPacketMessageData;
data->packet = std::move(*packet);
data->options = options;
- worker_thread_->Post(this, message_id, data);
+ network_thread_->Post(this, message_id, data);
return true;
}
+ TRACE_EVENT0("webrtc", "BaseChannel::SendPacket");
// Now that we are on the correct thread, ensure we have a place to send this
// packet before doing anything. (We might get RTCP packets that we don't
@@ -589,6 +666,7 @@ bool BaseChannel::SendPacket(bool rtcp,
updated_options = options;
// Protect if needed.
if (srtp_filter_.IsActive()) {
+ TRACE_EVENT0("webrtc", "SRTP Encode");
bool res;
uint8_t* data = packet->data();
int len = static_cast<int>(packet->size());
@@ -656,9 +734,9 @@ bool BaseChannel::SendPacket(bool rtcp,
}
// Bon voyage.
- int ret =
- channel->SendPacket(packet->data<char>(), packet->size(), updated_options,
- (secure() && secure_dtls()) ? PF_SRTP_BYPASS : 0);
+ int flags = (secure() && secure_dtls()) ? PF_SRTP_BYPASS : PF_NORMAL;
+ int ret = channel->SendPacket(packet->data<char>(), packet->size(),
+ updated_options, flags);
if (ret != static_cast<int>(packet->size())) {
if (channel->GetError() == EWOULDBLOCK) {
LOG(LS_WARNING) << "Got EWOULDBLOCK from socket.";
@@ -687,6 +765,7 @@ bool BaseChannel::WantsPacket(bool rtcp, const rtc::CopyOnWriteBuffer* packet) {
void BaseChannel::HandlePacket(bool rtcp, rtc::CopyOnWriteBuffer* packet,
const rtc::PacketTime& packet_time) {
+ RTC_DCHECK(network_thread_->IsCurrent());
if (!WantsPacket(rtcp, packet)) {
return;
}
@@ -700,6 +779,7 @@ void BaseChannel::HandlePacket(bool rtcp, rtc::CopyOnWriteBuffer* packet,
// Unprotect the packet, if needed.
if (srtp_filter_.IsActive()) {
+ TRACE_EVENT0("webrtc", "SRTP Decode");
char* data = packet->data<char>();
int len = static_cast<int>(packet->size());
bool res;
@@ -743,11 +823,22 @@ void BaseChannel::HandlePacket(bool rtcp, rtc::CopyOnWriteBuffer* packet,
return;
}
- // Push it down to the media channel.
- if (!rtcp) {
- media_channel_->OnPacketReceived(packet, packet_time);
+ invoker_.AsyncInvoke<void>(
+ worker_thread_,
+ Bind(&BaseChannel::OnPacketReceived, this, rtcp, *packet, packet_time));
+}
+
+void BaseChannel::OnPacketReceived(bool rtcp,
+ const rtc::CopyOnWriteBuffer& packet,
+ const rtc::PacketTime& packet_time) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ // Need to copy variable because OnRtcpReceived/OnPacketReceived
+ // requires non-const pointer to buffer. This doesn't memcpy the actual data.
+ rtc::CopyOnWriteBuffer data(packet);
+ if (rtcp) {
+ media_channel_->OnRtcpReceived(&data, packet_time);
} else {
- media_channel_->OnRtcpReceived(packet, packet_time);
+ media_channel_->OnPacketReceived(&data, packet_time);
}
}
@@ -786,7 +877,7 @@ void BaseChannel::EnableMedia_w() {
LOG(LS_INFO) << "Channel enabled";
enabled_ = true;
- ChangeState();
+ ChangeState_w();
}
void BaseChannel::DisableMedia_w() {
@@ -796,20 +887,20 @@ void BaseChannel::DisableMedia_w() {
LOG(LS_INFO) << "Channel disabled";
enabled_ = false;
- ChangeState();
+ ChangeState_w();
}
-void BaseChannel::UpdateWritableState_w() {
+void BaseChannel::UpdateWritableState_n() {
if (transport_channel_ && transport_channel_->writable() &&
(!rtcp_transport_channel_ || rtcp_transport_channel_->writable())) {
- ChannelWritable_w();
+ ChannelWritable_n();
} else {
- ChannelNotWritable_w();
+ ChannelNotWritable_n();
}
}
-void BaseChannel::ChannelWritable_w() {
- ASSERT(worker_thread_ == rtc::Thread::Current());
+void BaseChannel::ChannelWritable_n() {
+ RTC_DCHECK(network_thread_->IsCurrent());
if (writable_) {
return;
}
@@ -829,15 +920,16 @@ void BaseChannel::ChannelWritable_w() {
}
was_ever_writable_ = true;
- MaybeSetupDtlsSrtp_w();
+ MaybeSetupDtlsSrtp_n();
writable_ = true;
ChangeState();
}
-void BaseChannel::SignalDtlsSetupFailure_w(bool rtcp) {
- ASSERT(worker_thread() == rtc::Thread::Current());
- signaling_thread()->Invoke<void>(Bind(
- &BaseChannel::SignalDtlsSetupFailure_s, this, rtcp));
+void BaseChannel::SignalDtlsSetupFailure_n(bool rtcp) {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ invoker_.AsyncInvoke<void>(
+ signaling_thread(),
+ Bind(&BaseChannel::SignalDtlsSetupFailure_s, this, rtcp));
}
void BaseChannel::SignalDtlsSetupFailure_s(bool rtcp) {
@@ -845,26 +937,27 @@ void BaseChannel::SignalDtlsSetupFailure_s(bool rtcp) {
SignalDtlsSetupFailure(this, rtcp);
}
-bool BaseChannel::SetDtlsSrtpCryptoSuites(TransportChannel* tc, bool rtcp) {
+bool BaseChannel::SetDtlsSrtpCryptoSuites_n(TransportChannel* tc, bool rtcp) {
std::vector<int> crypto_suites;
// We always use the default SRTP crypto suites for RTCP, but we may use
// different crypto suites for RTP depending on the media type.
if (!rtcp) {
- GetSrtpCryptoSuites(&crypto_suites);
+ GetSrtpCryptoSuites_n(&crypto_suites);
} else {
GetDefaultSrtpCryptoSuites(&crypto_suites);
}
return tc->SetSrtpCryptoSuites(crypto_suites);
}
-bool BaseChannel::ShouldSetupDtlsSrtp() const {
+bool BaseChannel::ShouldSetupDtlsSrtp_n() const {
// Since DTLS is applied to all channels, checking RTP should be enough.
return transport_channel_ && transport_channel_->IsDtlsActive();
}
// This function returns true if either DTLS-SRTP is not in use
// *or* DTLS-SRTP is successfully set up.
-bool BaseChannel::SetupDtlsSrtp(bool rtcp_channel) {
+bool BaseChannel::SetupDtlsSrtp_n(bool rtcp_channel) {
+ RTC_DCHECK(network_thread_->IsCurrent());
bool ret = false;
TransportChannel* channel =
@@ -950,30 +1043,30 @@ bool BaseChannel::SetupDtlsSrtp(bool rtcp_channel) {
return ret;
}
-void BaseChannel::MaybeSetupDtlsSrtp_w() {
+void BaseChannel::MaybeSetupDtlsSrtp_n() {
if (srtp_filter_.IsActive()) {
return;
}
- if (!ShouldSetupDtlsSrtp()) {
+ if (!ShouldSetupDtlsSrtp_n()) {
return;
}
- if (!SetupDtlsSrtp(false)) {
- SignalDtlsSetupFailure_w(false);
+ if (!SetupDtlsSrtp_n(false)) {
+ SignalDtlsSetupFailure_n(false);
return;
}
if (rtcp_transport_channel_) {
- if (!SetupDtlsSrtp(true)) {
- SignalDtlsSetupFailure_w(true);
+ if (!SetupDtlsSrtp_n(true)) {
+ SignalDtlsSetupFailure_n(true);
return;
}
}
}
-void BaseChannel::ChannelNotWritable_w() {
- ASSERT(worker_thread_ == rtc::Thread::Current());
+void BaseChannel::ChannelNotWritable_n() {
+ RTC_DCHECK(network_thread_->IsCurrent());
if (!writable_)
return;
@@ -982,7 +1075,7 @@ void BaseChannel::ChannelNotWritable_w() {
ChangeState();
}
-bool BaseChannel::SetRtpTransportParameters_w(
+bool BaseChannel::SetRtpTransportParameters(
const MediaContentDescription* content,
ContentAction action,
ContentSource src,
@@ -993,15 +1086,27 @@ bool BaseChannel::SetRtpTransportParameters_w(
}
// Cache secure_required_ for belt and suspenders check on SendPacket
+ return network_thread_->Invoke<bool>(
+ Bind(&BaseChannel::SetRtpTransportParameters_n, this, content, action,
+ src, error_desc));
+}
+
+bool BaseChannel::SetRtpTransportParameters_n(
+ const MediaContentDescription* content,
+ ContentAction action,
+ ContentSource src,
+ std::string* error_desc) {
+ RTC_DCHECK(network_thread_->IsCurrent());
+
if (src == CS_LOCAL) {
set_secure_required(content->crypto_required() != CT_NONE);
}
- if (!SetSrtp_w(content->cryptos(), action, src, error_desc)) {
+ if (!SetSrtp_n(content->cryptos(), action, src, error_desc)) {
return false;
}
- if (!SetRtcpMux_w(content->rtcp_mux(), action, src, error_desc)) {
+ if (!SetRtcpMux_n(content->rtcp_mux(), action, src, error_desc)) {
return false;
}
@@ -1010,19 +1115,18 @@ bool BaseChannel::SetRtpTransportParameters_w(
// |dtls| will be set to true if DTLS is active for transport channel and
// crypto is empty.
-bool BaseChannel::CheckSrtpConfig(const std::vector<CryptoParams>& cryptos,
- bool* dtls,
- std::string* error_desc) {
+bool BaseChannel::CheckSrtpConfig_n(const std::vector<CryptoParams>& cryptos,
+ bool* dtls,
+ std::string* error_desc) {
*dtls = transport_channel_->IsDtlsActive();
if (*dtls && !cryptos.empty()) {
- SafeSetError("Cryptos must be empty when DTLS is active.",
- error_desc);
+ SafeSetError("Cryptos must be empty when DTLS is active.", error_desc);
return false;
}
return true;
}
-bool BaseChannel::SetSrtp_w(const std::vector<CryptoParams>& cryptos,
+bool BaseChannel::SetSrtp_n(const std::vector<CryptoParams>& cryptos,
ContentAction action,
ContentSource src,
std::string* error_desc) {
@@ -1033,7 +1137,7 @@ bool BaseChannel::SetSrtp_w(const std::vector<CryptoParams>& cryptos,
}
bool ret = false;
bool dtls = false;
- ret = CheckSrtpConfig(cryptos, &dtls, error_desc);
+ ret = CheckSrtpConfig_n(cryptos, &dtls, error_desc);
if (!ret) {
return false;
}
@@ -1070,19 +1174,19 @@ bool BaseChannel::SetSrtp_w(const std::vector<CryptoParams>& cryptos,
}
void BaseChannel::ActivateRtcpMux() {
- worker_thread_->Invoke<void>(Bind(
- &BaseChannel::ActivateRtcpMux_w, this));
+ network_thread_->Invoke<void>(Bind(&BaseChannel::ActivateRtcpMux_n, this));
}
-void BaseChannel::ActivateRtcpMux_w() {
+void BaseChannel::ActivateRtcpMux_n() {
if (!rtcp_mux_filter_.IsActive()) {
rtcp_mux_filter_.SetActive();
- set_rtcp_transport_channel(nullptr, true);
+ SetRtcpTransportChannel_n(nullptr, true);
rtcp_transport_enabled_ = false;
}
}
-bool BaseChannel::SetRtcpMux_w(bool enable, ContentAction action,
+bool BaseChannel::SetRtcpMux_n(bool enable,
+ ContentAction action,
ContentSource src,
std::string* error_desc) {
bool ret = false;
@@ -1100,7 +1204,7 @@ bool BaseChannel::SetRtcpMux_w(bool enable, ContentAction action,
LOG(LS_INFO) << "Enabling rtcp-mux for " << content_name()
<< " by destroying RTCP transport channel for "
<< transport_name();
- set_rtcp_transport_channel(nullptr, true);
+ SetRtcpTransportChannel_n(nullptr, true);
rtcp_transport_enabled_ = false;
}
break;
@@ -1121,7 +1225,7 @@ bool BaseChannel::SetRtcpMux_w(bool enable, ContentAction action,
if (rtcp_mux_filter_.IsActive()) {
// If the RTP transport is already writable, then so are we.
if (transport_channel_->writable()) {
- ChannelWritable_w();
+ ChannelWritable_n();
}
}
@@ -1285,23 +1389,38 @@ bool BaseChannel::UpdateRemoteStreams_w(
return ret;
}
-void BaseChannel::MaybeCacheRtpAbsSendTimeHeaderExtension(
+void BaseChannel::MaybeCacheRtpAbsSendTimeHeaderExtension_w(
const std::vector<RtpHeaderExtension>& extensions) {
+// Absolute Send Time extension id is used only with external auth,
+// so do not bother searching for it and making asyncronious call to set
+// something that is not used.
+#if defined(ENABLE_EXTERNAL_AUTH)
const RtpHeaderExtension* send_time_extension =
FindHeaderExtension(extensions, kRtpAbsoluteSenderTimeHeaderExtension);
- rtp_abs_sendtime_extn_id_ =
+ int rtp_abs_sendtime_extn_id =
send_time_extension ? send_time_extension->id : -1;
+ invoker_.AsyncInvoke<void>(
+ network_thread_, Bind(&BaseChannel::CacheRtpAbsSendTimeHeaderExtension_n,
+ this, rtp_abs_sendtime_extn_id));
+#endif
+}
+
+void BaseChannel::CacheRtpAbsSendTimeHeaderExtension_n(
+ int rtp_abs_sendtime_extn_id) {
+ rtp_abs_sendtime_extn_id_ = rtp_abs_sendtime_extn_id;
}
void BaseChannel::OnMessage(rtc::Message *pmsg) {
TRACE_EVENT0("webrtc", "BaseChannel::OnMessage");
switch (pmsg->message_id) {
- case MSG_RTPPACKET:
- case MSG_RTCPPACKET: {
- PacketMessageData* data = static_cast<PacketMessageData*>(pmsg->pdata);
- SendPacket(pmsg->message_id == MSG_RTCPPACKET, &data->packet,
- data->options);
- delete data; // because it is Posted
+ case MSG_SEND_RTP_PACKET:
+ case MSG_SEND_RTCP_PACKET: {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ SendPacketMessageData* data =
+ static_cast<SendPacketMessageData*>(pmsg->pdata);
+ bool rtcp = pmsg->message_id == MSG_SEND_RTCP_PACKET;
+ SendPacket(rtcp, &data->packet, data->options);
+ delete data;
break;
}
case MSG_FIRSTPACKETRECEIVED: {
@@ -1311,25 +1430,39 @@ void BaseChannel::OnMessage(rtc::Message *pmsg) {
}
}
-void BaseChannel::FlushRtcpMessages() {
+void BaseChannel::FlushRtcpMessages_n() {
// Flush all remaining RTCP messages. This should only be called in
// destructor.
- ASSERT(rtc::Thread::Current() == worker_thread_);
+ RTC_DCHECK(network_thread_->IsCurrent());
rtc::MessageList rtcp_messages;
- worker_thread_->Clear(this, MSG_RTCPPACKET, &rtcp_messages);
- for (rtc::MessageList::iterator it = rtcp_messages.begin();
- it != rtcp_messages.end(); ++it) {
- worker_thread_->Send(this, MSG_RTCPPACKET, it->pdata);
+ network_thread_->Clear(this, MSG_SEND_RTCP_PACKET, &rtcp_messages);
+ for (const auto& message : rtcp_messages) {
+ network_thread_->Send(this, MSG_SEND_RTCP_PACKET, message.pdata);
}
}
-VoiceChannel::VoiceChannel(rtc::Thread* thread,
+void BaseChannel::SignalSentPacket_n(TransportChannel* /* channel */,
+ const rtc::SentPacket& sent_packet) {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ invoker_.AsyncInvoke<void>(
+ worker_thread_,
+ rtc::Bind(&BaseChannel::SignalSentPacket_w, this, sent_packet));
+}
+
+void BaseChannel::SignalSentPacket_w(const rtc::SentPacket& sent_packet) {
+ RTC_DCHECK(worker_thread_->IsCurrent());
+ SignalSentPacket(sent_packet);
+}
+
+VoiceChannel::VoiceChannel(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
MediaEngineInterface* media_engine,
VoiceMediaChannel* media_channel,
TransportController* transport_controller,
const std::string& content_name,
bool rtcp)
- : BaseChannel(thread,
+ : BaseChannel(worker_thread,
+ network_thread,
media_channel,
transport_controller,
content_name,
@@ -1346,8 +1479,8 @@ VoiceChannel::~VoiceChannel() {
Deinit();
}
-bool VoiceChannel::Init() {
- if (!BaseChannel::Init()) {
+bool VoiceChannel::Init_w(const std::string* bundle_transport_name) {
+ if (!BaseChannel::Init_w(bundle_transport_name)) {
return false;
}
return true;
@@ -1403,28 +1536,49 @@ void VoiceChannel::SetRawAudioSink(
InvokeOnWorker(Bind(&SetRawAudioSink_w, media_channel(), ssrc, &sink));
}
-webrtc::RtpParameters VoiceChannel::GetRtpParameters(uint32_t ssrc) const {
+webrtc::RtpParameters VoiceChannel::GetRtpSendParameters(uint32_t ssrc) const {
return worker_thread()->Invoke<webrtc::RtpParameters>(
- Bind(&VoiceChannel::GetRtpParameters_w, this, ssrc));
+ Bind(&VoiceChannel::GetRtpSendParameters_w, this, ssrc));
}
-webrtc::RtpParameters VoiceChannel::GetRtpParameters_w(uint32_t ssrc) const {
- // Not yet implemented.
- // TODO(skvlad): Add support for limiting send bitrate for audio channels.
- return webrtc::RtpParameters();
+webrtc::RtpParameters VoiceChannel::GetRtpSendParameters_w(
+ uint32_t ssrc) const {
+ return media_channel()->GetRtpSendParameters(ssrc);
}
-bool VoiceChannel::SetRtpParameters(uint32_t ssrc,
- const webrtc::RtpParameters& parameters) {
+bool VoiceChannel::SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
return InvokeOnWorker(
- Bind(&VoiceChannel::SetRtpParameters_w, this, ssrc, parameters));
+ Bind(&VoiceChannel::SetRtpSendParameters_w, this, ssrc, parameters));
}
-bool VoiceChannel::SetRtpParameters_w(uint32_t ssrc,
- webrtc::RtpParameters parameters) {
- // Not yet implemented.
- // TODO(skvlad): Add support for limiting send bitrate for audio channels.
- return false;
+bool VoiceChannel::SetRtpSendParameters_w(uint32_t ssrc,
+ webrtc::RtpParameters parameters) {
+ return media_channel()->SetRtpSendParameters(ssrc, parameters);
+}
+
+webrtc::RtpParameters VoiceChannel::GetRtpReceiveParameters(
+ uint32_t ssrc) const {
+ return worker_thread()->Invoke<webrtc::RtpParameters>(
+ Bind(&VoiceChannel::GetRtpReceiveParameters_w, this, ssrc));
+}
+
+webrtc::RtpParameters VoiceChannel::GetRtpReceiveParameters_w(
+ uint32_t ssrc) const {
+ return media_channel()->GetRtpReceiveParameters(ssrc);
+}
+
+bool VoiceChannel::SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ return InvokeOnWorker(
+ Bind(&VoiceChannel::SetRtpReceiveParameters_w, this, ssrc, parameters));
+}
+
+bool VoiceChannel::SetRtpReceiveParameters_w(uint32_t ssrc,
+ webrtc::RtpParameters parameters) {
+ return media_channel()->SetRtpReceiveParameters(ssrc, parameters);
}
bool VoiceChannel::GetStats(VoiceMediaInfo* stats) {
@@ -1491,15 +1645,21 @@ void VoiceChannel::OnChannelRead(TransportChannel* channel,
}
}
-void VoiceChannel::ChangeState() {
+void BaseChannel::ChangeState() {
+ RTC_DCHECK(network_thread_->IsCurrent());
+ invoker_.AsyncInvoke<void>(worker_thread_,
+ Bind(&BaseChannel::ChangeState_w, this));
+}
+
+void VoiceChannel::ChangeState_w() {
// Render incoming data if we're the active call, and we have the local
// content. We receive data on the default channel and multiplexed streams.
- bool recv = IsReadyToReceive();
+ bool recv = IsReadyToReceive_w();
media_channel()->SetPlayout(recv);
// Send outgoing data if we're the active call, we have the remote content,
// and we have had some form of connectivity.
- bool send = IsReadyToSend();
+ bool send = IsReadyToSend_w();
media_channel()->SetSend(send);
LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send;
@@ -1525,7 +1685,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
return false;
}
- if (!SetRtpTransportParameters_w(content, action, CS_LOCAL, error_desc)) {
+ if (!SetRtpTransportParameters(content, action, CS_LOCAL, error_desc)) {
return false;
}
@@ -1551,7 +1711,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content,
}
set_local_content_direction(content->direction());
- ChangeState();
+ ChangeState_w();
return true;
}
@@ -1570,7 +1730,7 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
return false;
}
- if (!SetRtpTransportParameters_w(content, action, CS_REMOTE, error_desc)) {
+ if (!SetRtpTransportParameters(content, action, CS_REMOTE, error_desc)) {
return false;
}
@@ -1598,11 +1758,11 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content,
}
if (audio->rtp_header_extensions_set()) {
- MaybeCacheRtpAbsSendTimeHeaderExtension(audio->rtp_header_extensions());
+ MaybeCacheRtpAbsSendTimeHeaderExtension_w(audio->rtp_header_extensions());
}
set_remote_content_direction(content->direction());
- ChangeState();
+ ChangeState_w();
return true;
}
@@ -1656,23 +1816,26 @@ void VoiceChannel::OnAudioMonitorUpdate(AudioMonitor* monitor,
SignalAudioMonitor(this, info);
}
-void VoiceChannel::GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const {
+void VoiceChannel::GetSrtpCryptoSuites_n(
+ std::vector<int>* crypto_suites) const {
GetSupportedAudioCryptoSuites(crypto_suites);
}
-VideoChannel::VideoChannel(rtc::Thread* thread,
+VideoChannel::VideoChannel(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
VideoMediaChannel* media_channel,
TransportController* transport_controller,
const std::string& content_name,
bool rtcp)
- : BaseChannel(thread,
+ : BaseChannel(worker_thread,
+ network_thread,
media_channel,
transport_controller,
content_name,
rtcp) {}
-bool VideoChannel::Init() {
- if (!BaseChannel::Init()) {
+bool VideoChannel::Init_w(const std::string* bundle_transport_name) {
+ if (!BaseChannel::Init_w(bundle_transport_name)) {
return false;
}
return true;
@@ -1694,9 +1857,11 @@ bool VideoChannel::SetSink(uint32_t ssrc,
return true;
}
-bool VideoChannel::SetCapturer(uint32_t ssrc, VideoCapturer* capturer) {
- return InvokeOnWorker(Bind(&VideoMediaChannel::SetCapturer,
- media_channel(), ssrc, capturer));
+void VideoChannel::SetSource(
+ uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
+ worker_thread()->Invoke<void>(
+ Bind(&VideoMediaChannel::SetSource, media_channel(), ssrc, source));
}
bool VideoChannel::SetVideoSend(uint32_t ssrc,
@@ -1706,29 +1871,55 @@ bool VideoChannel::SetVideoSend(uint32_t ssrc,
ssrc, mute, options));
}
-webrtc::RtpParameters VideoChannel::GetRtpParameters(uint32_t ssrc) const {
+webrtc::RtpParameters VideoChannel::GetRtpSendParameters(uint32_t ssrc) const {
+ return worker_thread()->Invoke<webrtc::RtpParameters>(
+ Bind(&VideoChannel::GetRtpSendParameters_w, this, ssrc));
+}
+
+webrtc::RtpParameters VideoChannel::GetRtpSendParameters_w(
+ uint32_t ssrc) const {
+ return media_channel()->GetRtpSendParameters(ssrc);
+}
+
+bool VideoChannel::SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ return InvokeOnWorker(
+ Bind(&VideoChannel::SetRtpSendParameters_w, this, ssrc, parameters));
+}
+
+bool VideoChannel::SetRtpSendParameters_w(uint32_t ssrc,
+ webrtc::RtpParameters parameters) {
+ return media_channel()->SetRtpSendParameters(ssrc, parameters);
+}
+
+webrtc::RtpParameters VideoChannel::GetRtpReceiveParameters(
+ uint32_t ssrc) const {
return worker_thread()->Invoke<webrtc::RtpParameters>(
- Bind(&VideoChannel::GetRtpParameters_w, this, ssrc));
+ Bind(&VideoChannel::GetRtpReceiveParameters_w, this, ssrc));
}
-webrtc::RtpParameters VideoChannel::GetRtpParameters_w(uint32_t ssrc) const {
- return media_channel()->GetRtpParameters(ssrc);
+webrtc::RtpParameters VideoChannel::GetRtpReceiveParameters_w(
+ uint32_t ssrc) const {
+ return media_channel()->GetRtpReceiveParameters(ssrc);
}
-bool VideoChannel::SetRtpParameters(uint32_t ssrc,
- const webrtc::RtpParameters& parameters) {
+bool VideoChannel::SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
return InvokeOnWorker(
- Bind(&VideoChannel::SetRtpParameters_w, this, ssrc, parameters));
+ Bind(&VideoChannel::SetRtpReceiveParameters_w, this, ssrc, parameters));
}
-bool VideoChannel::SetRtpParameters_w(uint32_t ssrc,
- webrtc::RtpParameters parameters) {
- return media_channel()->SetRtpParameters(ssrc, parameters);
+bool VideoChannel::SetRtpReceiveParameters_w(uint32_t ssrc,
+ webrtc::RtpParameters parameters) {
+ return media_channel()->SetRtpReceiveParameters(ssrc, parameters);
}
-void VideoChannel::ChangeState() {
+
+void VideoChannel::ChangeState_w() {
// Send outgoing data if we're the active call, we have the remote content,
// and we have had some form of connectivity.
- bool send = IsReadyToSend();
+ bool send = IsReadyToSend_w();
if (!media_channel()->SetSend(send)) {
LOG(LS_ERROR) << "Failed to SetSend on video channel";
// TODO(gangji): Report error back to server.
@@ -1777,7 +1968,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
return false;
}
- if (!SetRtpTransportParameters_w(content, action, CS_LOCAL, error_desc)) {
+ if (!SetRtpTransportParameters(content, action, CS_LOCAL, error_desc)) {
return false;
}
@@ -1803,7 +1994,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content,
}
set_local_content_direction(content->direction());
- ChangeState();
+ ChangeState_w();
return true;
}
@@ -1822,8 +2013,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
return false;
}
-
- if (!SetRtpTransportParameters_w(content, action, CS_REMOTE, error_desc)) {
+ if (!SetRtpTransportParameters(content, action, CS_REMOTE, error_desc)) {
return false;
}
@@ -1852,11 +2042,11 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
}
if (video->rtp_header_extensions_set()) {
- MaybeCacheRtpAbsSendTimeHeaderExtension(video->rtp_header_extensions());
+ MaybeCacheRtpAbsSendTimeHeaderExtension_w(video->rtp_header_extensions());
}
set_remote_content_direction(content->direction());
- ChangeState();
+ ChangeState_w();
return true;
}
@@ -1887,16 +2077,19 @@ void VideoChannel::OnMediaMonitorUpdate(
SignalMediaMonitor(this, info);
}
-void VideoChannel::GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const {
+void VideoChannel::GetSrtpCryptoSuites_n(
+ std::vector<int>* crypto_suites) const {
GetSupportedVideoCryptoSuites(crypto_suites);
}
-DataChannel::DataChannel(rtc::Thread* thread,
+DataChannel::DataChannel(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
DataMediaChannel* media_channel,
TransportController* transport_controller,
const std::string& content_name,
bool rtcp)
- : BaseChannel(thread,
+ : BaseChannel(worker_thread,
+ network_thread,
media_channel,
transport_controller,
content_name,
@@ -1913,8 +2106,8 @@ DataChannel::~DataChannel() {
Deinit();
}
-bool DataChannel::Init() {
- if (!BaseChannel::Init()) {
+bool DataChannel::Init_w(const std::string* bundle_transport_name) {
+ if (!BaseChannel::Init_w(bundle_transport_name)) {
return false;
}
media_channel()->SignalDataReceived.connect(
@@ -2000,7 +2193,7 @@ bool DataChannel::SetLocalContent_w(const MediaContentDescription* content,
}
if (data_channel_type_ == DCT_RTP) {
- if (!SetRtpTransportParameters_w(content, action, CS_LOCAL, error_desc)) {
+ if (!SetRtpTransportParameters(content, action, CS_LOCAL, error_desc)) {
return false;
}
}
@@ -2032,7 +2225,7 @@ bool DataChannel::SetLocalContent_w(const MediaContentDescription* content,
}
set_local_content_direction(content->direction());
- ChangeState();
+ ChangeState_w();
return true;
}
@@ -2062,7 +2255,7 @@ bool DataChannel::SetRemoteContent_w(const MediaContentDescription* content,
LOG(LS_INFO) << "Setting remote data description";
if (data_channel_type_ == DCT_RTP &&
- !SetRtpTransportParameters_w(content, action, CS_REMOTE, error_desc)) {
+ !SetRtpTransportParameters(content, action, CS_REMOTE, error_desc)) {
return false;
}
@@ -2087,21 +2280,21 @@ bool DataChannel::SetRemoteContent_w(const MediaContentDescription* content,
}
set_remote_content_direction(content->direction());
- ChangeState();
+ ChangeState_w();
return true;
}
-void DataChannel::ChangeState() {
+void DataChannel::ChangeState_w() {
// Render incoming data if we're the active call, and we have the local
// content. We receive data on the default channel and multiplexed streams.
- bool recv = IsReadyToReceive();
+ bool recv = IsReadyToReceive_w();
if (!media_channel()->SetReceive(recv)) {
LOG(LS_ERROR) << "Failed to SetReceive on data channel";
}
// Send outgoing data if we're the active call, we have the remote content,
// and we have had some form of connectivity.
- bool send = IsReadyToSend();
+ bool send = IsReadyToSend_w();
if (!media_channel()->SetSend(send)) {
LOG(LS_ERROR) << "Failed to SetSend on data channel";
}
@@ -2197,12 +2390,12 @@ void DataChannel::OnDataChannelReadyToSend(bool writable) {
new DataChannelReadyToSendMessageData(writable));
}
-void DataChannel::GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const {
+void DataChannel::GetSrtpCryptoSuites_n(std::vector<int>* crypto_suites) const {
GetSupportedDataCryptoSuites(crypto_suites);
}
-bool DataChannel::ShouldSetupDtlsSrtp() const {
- return (data_channel_type_ == DCT_RTP) && BaseChannel::ShouldSetupDtlsSrtp();
+bool DataChannel::ShouldSetupDtlsSrtp_n() const {
+ return data_channel_type_ == DCT_RTP && BaseChannel::ShouldSetupDtlsSrtp_n();
}
void DataChannel::OnStreamClosedRemotely(uint32_t sid) {
diff --git a/chromium/third_party/webrtc/pc/channel.h b/chromium/third_party/webrtc/pc/channel.h
index e05b6e5f3f0..d9f5fd6d279 100644
--- a/chromium/third_party/webrtc/pc/channel.h
+++ b/chromium/third_party/webrtc/pc/channel.h
@@ -19,6 +19,7 @@
#include <vector>
#include "webrtc/audio_sink.h"
+#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/asyncudpsocket.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/network.h"
@@ -27,8 +28,8 @@
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/media/base/mediaengine.h"
#include "webrtc/media/base/streamparams.h"
-#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/base/videosinkinterface.h"
+#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/p2p/base/transportcontroller.h"
#include "webrtc/p2p/client/socketmonitor.h"
#include "webrtc/pc/audiomonitor.h"
@@ -47,14 +48,17 @@ namespace cricket {
struct CryptoParams;
class MediaContentDescription;
-enum SinkType {
- SINK_PRE_CRYPTO, // Sink packets before encryption or after decryption.
- SINK_POST_CRYPTO // Sink packets after encryption or before decryption.
-};
-
// BaseChannel contains logic common to voice and video, including
-// enable, marshaling calls to a worker thread, and
+// enable, marshaling calls to a worker and network threads, and
// connection and media monitors.
+// BaseChannel assumes signaling and other threads are allowed to make
+// synchronous calls to the worker thread, the worker thread makes synchronous
+// calls only to the network thread, and the network thread can't be blocked by
+// other threads.
+// All methods with _n suffix must be called on network thread,
+// methods with _w suffix - on worker thread
+// and methods with _s suffix on signaling thread.
+// Network and worker threads may be the same thread.
//
// WARNING! SUBCLASSES MUST CALL Deinit() IN THEIR DESTRUCTORS!
// This is required to avoid a data race between the destructor modifying the
@@ -66,26 +70,22 @@ class BaseChannel
public MediaChannel::NetworkInterface,
public ConnectionStatsGetter {
public:
- BaseChannel(rtc::Thread* thread,
+ BaseChannel(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
MediaChannel* channel,
TransportController* transport_controller,
const std::string& content_name,
bool rtcp);
virtual ~BaseChannel();
- bool Init();
- // Deinit may be called multiple times and is simply ignored if it's alreay
+ bool Init_w(const std::string* bundle_transport_name);
+ // Deinit may be called multiple times and is simply ignored if it's already
// done.
void Deinit();
rtc::Thread* worker_thread() const { return worker_thread_; }
+ rtc::Thread* network_thread() const { return network_thread_; }
const std::string& content_name() const { return content_name_; }
const std::string& transport_name() const { return transport_name_; }
- TransportChannel* transport_channel() const {
- return transport_channel_;
- }
- TransportChannel* rtcp_transport_channel() const {
- return rtcp_transport_channel_;
- }
bool enabled() const { return enabled_; }
// This function returns true if we are using SRTP.
@@ -143,18 +143,28 @@ class BaseChannel
}
sigslot::signal2<BaseChannel*, bool> SignalDtlsSetupFailure;
- void SignalDtlsSetupFailure_w(bool rtcp);
+ void SignalDtlsSetupFailure_n(bool rtcp);
void SignalDtlsSetupFailure_s(bool rtcp);
// Used for latency measurements.
sigslot::signal1<BaseChannel*> SignalFirstPacketReceived;
+ // Forward TransportChannel SignalSentPacket to worker thread.
+ sigslot::signal1<const rtc::SentPacket&> SignalSentPacket;
+
+ // Only public for unit tests. Otherwise, consider private.
+ TransportChannel* transport_channel() const { return transport_channel_; }
+ TransportChannel* rtcp_transport_channel() const {
+ return rtcp_transport_channel_;
+ }
+
// Made public for easier testing.
void SetReadyToSend(bool rtcp, bool ready);
// Only public for unit tests. Otherwise, consider protected.
int SetOption(SocketType type, rtc::Socket::Option o, int val)
override;
+ int SetOption_n(SocketType type, rtc::Socket::Option o, int val);
SrtpFilter* srtp_filter() { return &srtp_filter_; }
@@ -162,11 +172,11 @@ class BaseChannel
virtual MediaChannel* media_channel() const { return media_channel_; }
// Sets the |transport_channel_| (and |rtcp_transport_channel_|, if |rtcp_| is
// true). Gets the transport channels from |transport_controller_|.
- bool SetTransport_w(const std::string& transport_name);
+ bool SetTransport_n(const std::string& transport_name);
- void set_transport_channel(TransportChannel* transport);
- void set_rtcp_transport_channel(TransportChannel* transport,
- bool update_writablity);
+ void SetTransportChannel_n(TransportChannel* transport);
+ void SetRtcpTransportChannel_n(TransportChannel* transport,
+ bool update_writablity);
bool was_ever_writable() const { return was_ever_writable_; }
void set_local_content_direction(MediaContentDirection direction) {
@@ -178,8 +188,8 @@ class BaseChannel
void set_secure_required(bool secure_required) {
secure_required_ = secure_required;
}
- bool IsReadyToReceive() const;
- bool IsReadyToSend() const;
+ bool IsReadyToReceive_w() const;
+ bool IsReadyToSend_w() const;
rtc::Thread* signaling_thread() {
return transport_controller_->signaling_thread();
}
@@ -188,7 +198,7 @@ class BaseChannel
void ConnectToTransportChannel(TransportChannel* tc);
void DisconnectFromTransportChannel(TransportChannel* tc);
- void FlushRtcpMessages();
+ void FlushRtcpMessages_n();
// NetworkInterface implementation, called by MediaEngine
bool SendPacket(rtc::CopyOnWriteBuffer* packet,
@@ -217,28 +227,33 @@ class BaseChannel
bool SendPacket(bool rtcp,
rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options);
+
virtual bool WantsPacket(bool rtcp, const rtc::CopyOnWriteBuffer* packet);
void HandlePacket(bool rtcp, rtc::CopyOnWriteBuffer* packet,
const rtc::PacketTime& packet_time);
+ void OnPacketReceived(bool rtcp,
+ const rtc::CopyOnWriteBuffer& packet,
+ const rtc::PacketTime& packet_time);
void EnableMedia_w();
void DisableMedia_w();
- void UpdateWritableState_w();
- void ChannelWritable_w();
- void ChannelNotWritable_w();
+ void UpdateWritableState_n();
+ void ChannelWritable_n();
+ void ChannelNotWritable_n();
bool AddRecvStream_w(const StreamParams& sp);
bool RemoveRecvStream_w(uint32_t ssrc);
bool AddSendStream_w(const StreamParams& sp);
bool RemoveSendStream_w(uint32_t ssrc);
- virtual bool ShouldSetupDtlsSrtp() const;
+ virtual bool ShouldSetupDtlsSrtp_n() const;
// Do the DTLS key expansion and impose it on the SRTP/SRTCP filters.
// |rtcp_channel| indicates whether to set up the RTP or RTCP filter.
- bool SetupDtlsSrtp(bool rtcp_channel);
- void MaybeSetupDtlsSrtp_w();
+ bool SetupDtlsSrtp_n(bool rtcp_channel);
+ void MaybeSetupDtlsSrtp_n();
// Set the DTLS-SRTP cipher policy on this channel as appropriate.
- bool SetDtlsSrtpCryptoSuites(TransportChannel* tc, bool rtcp);
+ bool SetDtlsSrtpCryptoSuites_n(TransportChannel* tc, bool rtcp);
- virtual void ChangeState() = 0;
+ void ChangeState();
+ virtual void ChangeState_w() = 0;
// Gets the content info appropriate to the channel (audio or video).
virtual const ContentInfo* GetFirstContent(
@@ -255,25 +270,29 @@ class BaseChannel
virtual bool SetRemoteContent_w(const MediaContentDescription* content,
ContentAction action,
std::string* error_desc) = 0;
- bool SetRtpTransportParameters_w(const MediaContentDescription* content,
+ bool SetRtpTransportParameters(const MediaContentDescription* content,
+ ContentAction action,
+ ContentSource src,
+ std::string* error_desc);
+ bool SetRtpTransportParameters_n(const MediaContentDescription* content,
ContentAction action,
ContentSource src,
std::string* error_desc);
// Helper method to get RTP Absoulute SendTime extension header id if
// present in remote supported extensions list.
- void MaybeCacheRtpAbsSendTimeHeaderExtension(
+ void MaybeCacheRtpAbsSendTimeHeaderExtension_w(
const std::vector<RtpHeaderExtension>& extensions);
- bool CheckSrtpConfig(const std::vector<CryptoParams>& cryptos,
- bool* dtls,
- std::string* error_desc);
- bool SetSrtp_w(const std::vector<CryptoParams>& params,
+ bool CheckSrtpConfig_n(const std::vector<CryptoParams>& cryptos,
+ bool* dtls,
+ std::string* error_desc);
+ bool SetSrtp_n(const std::vector<CryptoParams>& params,
ContentAction action,
ContentSource src,
std::string* error_desc);
- void ActivateRtcpMux_w();
- bool SetRtcpMux_w(bool enable,
+ void ActivateRtcpMux_n();
+ bool SetRtcpMux_n(bool enable,
ContentAction action,
ContentSource src,
std::string* error_desc);
@@ -283,7 +302,7 @@ class BaseChannel
// Handled in derived classes
// Get the SRTP crypto suites to use for RTP media
- virtual void GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const = 0;
+ virtual void GetSrtpCryptoSuites_n(std::vector<int>* crypto_suites) const = 0;
virtual void OnConnectionMonitorUpdate(ConnectionMonitor* monitor,
const std::vector<ConnectionInfo>& infos) = 0;
@@ -294,13 +313,24 @@ class BaseChannel
}
private:
- rtc::Thread* worker_thread_;
- TransportController* transport_controller_;
- MediaChannel* media_channel_;
- std::vector<StreamParams> local_streams_;
- std::vector<StreamParams> remote_streams_;
+ bool InitNetwork_n(const std::string* bundle_transport_name);
+ void DisconnectTransportChannels_n();
+ void DestroyTransportChannels_n();
+ void SignalSentPacket_n(TransportChannel* channel,
+ const rtc::SentPacket& sent_packet);
+ void SignalSentPacket_w(const rtc::SentPacket& sent_packet);
+ bool IsTransportReadyToSend_n() const;
+ void CacheRtpAbsSendTimeHeaderExtension_n(int rtp_abs_sendtime_extn_id);
+
+ rtc::Thread* const worker_thread_;
+ rtc::Thread* const network_thread_;
+ rtc::AsyncInvoker invoker_;
const std::string content_name_;
+ std::unique_ptr<ConnectionMonitor> connection_monitor_;
+
+ // Transport related members that should be accessed from network thread.
+ TransportController* const transport_controller_;
std::string transport_name_;
bool rtcp_transport_enabled_;
TransportChannel* transport_channel_;
@@ -310,32 +340,40 @@ class BaseChannel
SrtpFilter srtp_filter_;
RtcpMuxFilter rtcp_mux_filter_;
BundleFilter bundle_filter_;
- std::unique_ptr<ConnectionMonitor> connection_monitor_;
- bool enabled_;
- bool writable_;
bool rtp_ready_to_send_;
bool rtcp_ready_to_send_;
+ bool writable_;
bool was_ever_writable_;
- MediaContentDirection local_content_direction_;
- MediaContentDirection remote_content_direction_;
bool has_received_packet_;
bool dtls_keyed_;
bool secure_required_;
int rtp_abs_sendtime_extn_id_;
+
+ // MediaChannel related members that should be access from worker thread.
+ MediaChannel* const media_channel_;
+ // Currently enabled_ flag accessed from signaling thread too, but it can
+ // be changed only when signaling thread does sunchronious call to worker
+ // thread, so it should be safe.
+ bool enabled_;
+ std::vector<StreamParams> local_streams_;
+ std::vector<StreamParams> remote_streams_;
+ MediaContentDirection local_content_direction_;
+ MediaContentDirection remote_content_direction_;
};
// VoiceChannel is a specialization that adds support for early media, DTMF,
// and input/output level monitoring.
class VoiceChannel : public BaseChannel {
public:
- VoiceChannel(rtc::Thread* thread,
+ VoiceChannel(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
MediaEngineInterface* media_engine,
VoiceMediaChannel* channel,
TransportController* transport_controller,
const std::string& content_name,
bool rtcp);
~VoiceChannel();
- bool Init();
+ bool Init_w(const std::string* bundle_transport_name);
// Configure sending media on the stream with SSRC |ssrc|
// If there is only one sending stream SSRC 0 can be used.
@@ -345,7 +383,7 @@ class VoiceChannel : public BaseChannel {
AudioSource* source);
// downcasts a MediaChannel
- virtual VoiceMediaChannel* media_channel() const {
+ VoiceMediaChannel* media_channel() const override {
return static_cast<VoiceMediaChannel*>(BaseChannel::media_channel());
}
@@ -366,8 +404,12 @@ class VoiceChannel : public BaseChannel {
bool SetOutputVolume(uint32_t ssrc, double volume);
void SetRawAudioSink(uint32_t ssrc,
std::unique_ptr<webrtc::AudioSinkInterface> sink);
- webrtc::RtpParameters GetRtpParameters(uint32_t ssrc) const;
- bool SetRtpParameters(uint32_t ssrc, const webrtc::RtpParameters& parameters);
+ webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const;
+ bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters);
+ webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const;
+ bool SetRtpReceiveParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters);
// Get statistics about the current media session.
bool GetStats(VoiceMediaInfo* stats);
@@ -388,34 +430,39 @@ class VoiceChannel : public BaseChannel {
int GetInputLevel_w();
int GetOutputLevel_w();
void GetActiveStreams_w(AudioInfo::StreamList* actives);
- webrtc::RtpParameters GetRtpParameters_w(uint32_t ssrc) const;
- bool SetRtpParameters_w(uint32_t ssrc, webrtc::RtpParameters parameters);
+ webrtc::RtpParameters GetRtpSendParameters_w(uint32_t ssrc) const;
+ bool SetRtpSendParameters_w(uint32_t ssrc, webrtc::RtpParameters parameters);
+ webrtc::RtpParameters GetRtpReceiveParameters_w(uint32_t ssrc) const;
+ bool SetRtpReceiveParameters_w(uint32_t ssrc,
+ webrtc::RtpParameters parameters);
private:
// overrides from BaseChannel
- virtual void OnChannelRead(TransportChannel* channel,
- const char* data, size_t len,
- const rtc::PacketTime& packet_time,
- int flags);
- virtual void ChangeState();
- virtual const ContentInfo* GetFirstContent(const SessionDescription* sdesc);
- virtual bool SetLocalContent_w(const MediaContentDescription* content,
- ContentAction action,
- std::string* error_desc);
- virtual bool SetRemoteContent_w(const MediaContentDescription* content,
- ContentAction action,
- std::string* error_desc);
+ void OnChannelRead(TransportChannel* channel,
+ const char* data,
+ size_t len,
+ const rtc::PacketTime& packet_time,
+ int flags) override;
+ void ChangeState_w() override;
+ const ContentInfo* GetFirstContent(const SessionDescription* sdesc) override;
+ bool SetLocalContent_w(const MediaContentDescription* content,
+ ContentAction action,
+ std::string* error_desc) override;
+ bool SetRemoteContent_w(const MediaContentDescription* content,
+ ContentAction action,
+ std::string* error_desc) override;
void HandleEarlyMediaTimeout();
bool InsertDtmf_w(uint32_t ssrc, int event, int duration);
bool SetOutputVolume_w(uint32_t ssrc, double volume);
bool GetStats_w(VoiceMediaInfo* stats);
- virtual void OnMessage(rtc::Message* pmsg);
- virtual void GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const;
- virtual void OnConnectionMonitorUpdate(
- ConnectionMonitor* monitor, const std::vector<ConnectionInfo>& infos);
- virtual void OnMediaMonitorUpdate(
- VoiceMediaChannel* media_channel, const VoiceMediaInfo& info);
+ void OnMessage(rtc::Message* pmsg) override;
+ void GetSrtpCryptoSuites_n(std::vector<int>* crypto_suites) const override;
+ void OnConnectionMonitorUpdate(
+ ConnectionMonitor* monitor,
+ const std::vector<ConnectionInfo>& infos) override;
+ void OnMediaMonitorUpdate(VoiceMediaChannel* media_channel,
+ const VoiceMediaInfo& info);
void OnAudioMonitorUpdate(AudioMonitor* monitor, const AudioInfo& info);
static const int kEarlyMediaTimeout = 1000;
@@ -435,21 +482,25 @@ class VoiceChannel : public BaseChannel {
// VideoChannel is a specialization for video.
class VideoChannel : public BaseChannel {
public:
- VideoChannel(rtc::Thread* thread,
+ VideoChannel(rtc::Thread* worker_thread,
+ rtc::Thread* netwokr_thread,
VideoMediaChannel* channel,
TransportController* transport_controller,
const std::string& content_name,
bool rtcp);
~VideoChannel();
- bool Init();
+ bool Init_w(const std::string* bundle_transport_name);
// downcasts a MediaChannel
- virtual VideoMediaChannel* media_channel() const {
+ VideoMediaChannel* media_channel() const override {
return static_cast<VideoMediaChannel*>(BaseChannel::media_channel());
}
bool SetSink(uint32_t ssrc, rtc::VideoSinkInterface<VideoFrame>* sink);
- bool SetCapturer(uint32_t ssrc, VideoCapturer* capturer);
+ // Register a source. The |ssrc| must correspond to a registered
+ // send stream.
+ void SetSource(uint32_t ssrc,
+ rtc::VideoSourceInterface<cricket::VideoFrame>* source);
// Get statistics about the current media session.
bool GetStats(VideoMediaInfo* stats);
@@ -461,29 +512,37 @@ class VideoChannel : public BaseChannel {
sigslot::signal2<VideoChannel*, const VideoMediaInfo&> SignalMediaMonitor;
bool SetVideoSend(uint32_t ssrc, bool enable, const VideoOptions* options);
- webrtc::RtpParameters GetRtpParameters(uint32_t ssrc) const;
- bool SetRtpParameters(uint32_t ssrc, const webrtc::RtpParameters& parameters);
+ webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const;
+ bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters);
+ webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const;
+ bool SetRtpReceiveParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters);
private:
// overrides from BaseChannel
- virtual void ChangeState();
- virtual const ContentInfo* GetFirstContent(const SessionDescription* sdesc);
- virtual bool SetLocalContent_w(const MediaContentDescription* content,
- ContentAction action,
- std::string* error_desc);
- virtual bool SetRemoteContent_w(const MediaContentDescription* content,
- ContentAction action,
- std::string* error_desc);
+ void ChangeState_w() override;
+ const ContentInfo* GetFirstContent(const SessionDescription* sdesc) override;
+ bool SetLocalContent_w(const MediaContentDescription* content,
+ ContentAction action,
+ std::string* error_desc) override;
+ bool SetRemoteContent_w(const MediaContentDescription* content,
+ ContentAction action,
+ std::string* error_desc) override;
bool GetStats_w(VideoMediaInfo* stats);
- webrtc::RtpParameters GetRtpParameters_w(uint32_t ssrc) const;
- bool SetRtpParameters_w(uint32_t ssrc, webrtc::RtpParameters parameters);
+ webrtc::RtpParameters GetRtpSendParameters_w(uint32_t ssrc) const;
+ bool SetRtpSendParameters_w(uint32_t ssrc, webrtc::RtpParameters parameters);
+ webrtc::RtpParameters GetRtpReceiveParameters_w(uint32_t ssrc) const;
+ bool SetRtpReceiveParameters_w(uint32_t ssrc,
+ webrtc::RtpParameters parameters);
- virtual void OnMessage(rtc::Message* pmsg);
- virtual void GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const;
- virtual void OnConnectionMonitorUpdate(
- ConnectionMonitor* monitor, const std::vector<ConnectionInfo>& infos);
- virtual void OnMediaMonitorUpdate(
- VideoMediaChannel* media_channel, const VideoMediaInfo& info);
+ void OnMessage(rtc::Message* pmsg) override;
+ void GetSrtpCryptoSuites_n(std::vector<int>* crypto_suites) const override;
+ void OnConnectionMonitorUpdate(
+ ConnectionMonitor* monitor,
+ const std::vector<ConnectionInfo>& infos) override;
+ void OnMediaMonitorUpdate(VideoMediaChannel* media_channel,
+ const VideoMediaInfo& info);
std::unique_ptr<VideoMediaMonitor> media_monitor_;
@@ -498,13 +557,14 @@ class VideoChannel : public BaseChannel {
// DataChannel is a specialization for data.
class DataChannel : public BaseChannel {
public:
- DataChannel(rtc::Thread* thread,
+ DataChannel(rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
DataMediaChannel* media_channel,
TransportController* transport_controller,
const std::string& content_name,
bool rtcp);
~DataChannel();
- bool Init();
+ bool Init_w(const std::string* bundle_transport_name);
virtual bool SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
@@ -532,7 +592,7 @@ class DataChannel : public BaseChannel {
protected:
// downcasts a MediaChannel.
- virtual DataMediaChannel* media_channel() const {
+ DataMediaChannel* media_channel() const override {
return static_cast<DataMediaChannel*>(BaseChannel::media_channel());
}
@@ -569,7 +629,7 @@ class DataChannel : public BaseChannel {
typedef rtc::TypedMessageData<bool> DataChannelReadyToSendMessageData;
// overrides from BaseChannel
- virtual const ContentInfo* GetFirstContent(const SessionDescription* sdesc);
+ const ContentInfo* GetFirstContent(const SessionDescription* sdesc) override;
// If data_channel_type_ is DCT_NONE, set it. Otherwise, check that
// it's the same as what was set previously. Returns false if it's
// set to one type one type and changed to another type later.
@@ -579,22 +639,23 @@ class DataChannel : public BaseChannel {
// DataContentDescription.
bool SetDataChannelTypeFromContent(const DataContentDescription* content,
std::string* error_desc);
- virtual bool SetLocalContent_w(const MediaContentDescription* content,
- ContentAction action,
- std::string* error_desc);
- virtual bool SetRemoteContent_w(const MediaContentDescription* content,
- ContentAction action,
- std::string* error_desc);
- virtual void ChangeState();
- virtual bool WantsPacket(bool rtcp, const rtc::CopyOnWriteBuffer* packet);
+ bool SetLocalContent_w(const MediaContentDescription* content,
+ ContentAction action,
+ std::string* error_desc) override;
+ bool SetRemoteContent_w(const MediaContentDescription* content,
+ ContentAction action,
+ std::string* error_desc) override;
+ void ChangeState_w() override;
+ bool WantsPacket(bool rtcp, const rtc::CopyOnWriteBuffer* packet) override;
- virtual void OnMessage(rtc::Message* pmsg);
- virtual void GetSrtpCryptoSuites(std::vector<int>* crypto_suites) const;
- virtual void OnConnectionMonitorUpdate(
- ConnectionMonitor* monitor, const std::vector<ConnectionInfo>& infos);
- virtual void OnMediaMonitorUpdate(
- DataMediaChannel* media_channel, const DataMediaInfo& info);
- virtual bool ShouldSetupDtlsSrtp() const;
+ void OnMessage(rtc::Message* pmsg) override;
+ void GetSrtpCryptoSuites_n(std::vector<int>* crypto_suites) const override;
+ void OnConnectionMonitorUpdate(
+ ConnectionMonitor* monitor,
+ const std::vector<ConnectionInfo>& infos) override;
+ void OnMediaMonitorUpdate(DataMediaChannel* media_channel,
+ const DataMediaInfo& info);
+ bool ShouldSetupDtlsSrtp_n() const override;
void OnDataReceived(
const ReceiveDataParams& params, const char* data, size_t len);
void OnDataChannelError(uint32_t ssrc, DataMediaChannel::Error error);
diff --git a/chromium/third_party/webrtc/pc/channel_unittest.cc b/chromium/third_party/webrtc/pc/channel_unittest.cc
index a423842e00d..6c6a3c10e43 100644
--- a/chromium/third_party/webrtc/pc/channel_unittest.cc
+++ b/chromium/third_party/webrtc/pc/channel_unittest.cc
@@ -10,24 +10,13 @@
#include <memory>
-#include "webrtc/base/arraysize.h"
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/fileutils.h"
+#include "webrtc/base/array_view.h"
+#include "webrtc/base/buffer.h"
#include "webrtc/base/gunit.h"
-#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
-#include "webrtc/base/pathutils.h"
-#include "webrtc/base/signalthread.h"
-#include "webrtc/base/ssladapter.h"
-#include "webrtc/base/sslidentity.h"
-#include "webrtc/base/window.h"
#include "webrtc/media/base/fakemediaengine.h"
#include "webrtc/media/base/fakertp.h"
-#include "webrtc/media/base/fakescreencapturerfactory.h"
-#include "webrtc/media/base/fakevideocapturer.h"
#include "webrtc/media/base/mediachannel.h"
-#include "webrtc/media/base/rtpdump.h"
-#include "webrtc/media/base/screencastid.h"
#include "webrtc/media/base/testutils.h"
#include "webrtc/p2p/base/faketransportcontroller.h"
#include "webrtc/pc/channel.h"
@@ -46,19 +35,21 @@ using cricket::FakeVoiceMediaChannel;
using cricket::ScreencastId;
using cricket::StreamParams;
using cricket::TransportChannel;
-using rtc::WindowId;
-
-static const cricket::AudioCodec kPcmuCodec(0, "PCMU", 64000, 8000, 1, 0);
-static const cricket::AudioCodec kPcmaCodec(8, "PCMA", 64000, 8000, 1, 0);
-static const cricket::AudioCodec kIsacCodec(103, "ISAC", 40000, 16000, 1, 0);
-static const cricket::VideoCodec kH264Codec(97, "H264", 640, 400, 30, 0);
-static const cricket::VideoCodec kH264SvcCodec(99, "H264-SVC", 320, 200, 15, 0);
-static const cricket::DataCodec kGoogleDataCodec(101, "google-data", 0);
-static const uint32_t kSsrc1 = 0x1111;
-static const uint32_t kSsrc2 = 0x2222;
-static const uint32_t kSsrc3 = 0x3333;
-static const int kAudioPts[] = {0, 8};
-static const int kVideoPts[] = {97, 99};
+
+namespace {
+const cricket::AudioCodec kPcmuCodec(0, "PCMU", 64000, 8000, 1);
+const cricket::AudioCodec kPcmaCodec(8, "PCMA", 64000, 8000, 1);
+const cricket::AudioCodec kIsacCodec(103, "ISAC", 40000, 16000, 1);
+const cricket::VideoCodec kH264Codec(97, "H264", 640, 400, 30);
+const cricket::VideoCodec kH264SvcCodec(99, "H264-SVC", 320, 200, 15);
+const cricket::DataCodec kGoogleDataCodec(101, "google-data");
+const uint32_t kSsrc1 = 0x1111;
+const uint32_t kSsrc2 = 0x2222;
+const uint32_t kSsrc3 = 0x3333;
+const int kAudioPts[] = {0, 8};
+const int kVideoPts[] = {97, 99};
+enum class NetworkIsWorker { Yes, No };
+} // namespace
template <class ChannelT,
class MediaChannelT,
@@ -76,10 +67,6 @@ class Traits {
typedef OptionsT Options;
};
-// Controls how long we wait for a session to send messages that we
-// expect, in milliseconds. We put it high to avoid flaky tests.
-static const int kEventTimeout = 5000;
-
class VoiceTraits : public Traits<cricket::VoiceChannel,
cricket::FakeVoiceMediaChannel,
cricket::AudioContentDescription,
@@ -101,12 +88,7 @@ class DataTraits : public Traits<cricket::DataChannel,
cricket::DataMediaInfo,
cricket::DataOptions> {};
-rtc::StreamInterface* Open(const std::string& path) {
- return rtc::Filesystem::OpenFile(
- rtc::Pathname(path), "wb");
-}
-
-// Base class for Voice/VideoChannel tests
+// Base class for Voice/Video/DataChannel tests
template<class T>
class ChannelTest : public testing::Test, public sigslot::has_slots<> {
public:
@@ -114,40 +96,52 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
DTLS = 0x10 };
ChannelTest(bool verify_playout,
- const uint8_t* rtp_data,
- int rtp_len,
- const uint8_t* rtcp_data,
- int rtcp_len)
+ rtc::ArrayView<const uint8_t> rtp_data,
+ rtc::ArrayView<const uint8_t> rtcp_data,
+ NetworkIsWorker network_is_worker)
: verify_playout_(verify_playout),
- transport_controller1_(cricket::ICEROLE_CONTROLLING),
- transport_controller2_(cricket::ICEROLE_CONTROLLED),
media_channel1_(NULL),
media_channel2_(NULL),
- rtp_packet_(reinterpret_cast<const char*>(rtp_data), rtp_len),
- rtcp_packet_(reinterpret_cast<const char*>(rtcp_data), rtcp_len),
+ rtp_packet_(rtp_data.data(), rtp_data.size()),
+ rtcp_packet_(rtcp_data.data(), rtcp_data.size()),
media_info_callbacks1_(),
- media_info_callbacks2_() {}
+ media_info_callbacks2_() {
+ if (network_is_worker == NetworkIsWorker::Yes) {
+ network_thread_ = rtc::Thread::Current();
+ } else {
+ network_thread_keeper_ = rtc::Thread::Create();
+ network_thread_keeper_->SetName("Network", nullptr);
+ network_thread_keeper_->Start();
+ network_thread_ = network_thread_keeper_.get();
+ }
+ transport_controller1_.reset(new cricket::FakeTransportController(
+ network_thread_, cricket::ICEROLE_CONTROLLING));
+ transport_controller2_.reset(new cricket::FakeTransportController(
+ network_thread_, cricket::ICEROLE_CONTROLLED));
+ }
void CreateChannels(int flags1, int flags2) {
CreateChannels(new typename T::MediaChannel(NULL, typename T::Options()),
new typename T::MediaChannel(NULL, typename T::Options()),
- flags1, flags2, rtc::Thread::Current());
+ flags1, flags2);
}
- void CreateChannels(
- typename T::MediaChannel* ch1, typename T::MediaChannel* ch2,
- int flags1, int flags2, rtc::Thread* thread) {
+ void CreateChannels(typename T::MediaChannel* ch1,
+ typename T::MediaChannel* ch2,
+ int flags1,
+ int flags2) {
+ rtc::Thread* worker_thread = rtc::Thread::Current();
media_channel1_ = ch1;
media_channel2_ = ch2;
- channel1_.reset(CreateChannel(thread, &media_engine_, ch1,
- &transport_controller1_,
- (flags1 & RTCP) != 0));
- channel2_.reset(CreateChannel(thread, &media_engine_, ch2,
- &transport_controller2_,
- (flags2 & RTCP) != 0));
- channel1_->SignalMediaMonitor.connect(
- this, &ChannelTest<T>::OnMediaMonitor);
- channel2_->SignalMediaMonitor.connect(
- this, &ChannelTest<T>::OnMediaMonitor);
+ channel1_.reset(
+ CreateChannel(worker_thread, network_thread_, &media_engine_, ch1,
+ transport_controller1_.get(), (flags1 & RTCP) != 0));
+ channel2_.reset(
+ CreateChannel(worker_thread, network_thread_, &media_engine_, ch2,
+ transport_controller2_.get(), (flags2 & RTCP) != 0));
+ channel1_->SignalMediaMonitor.connect(this,
+ &ChannelTest<T>::OnMediaMonitor1);
+ channel2_->SignalMediaMonitor.connect(this,
+ &ChannelTest<T>::OnMediaMonitor2);
if ((flags1 & DTLS) && (flags2 & DTLS)) {
flags1 = (flags1 & ~SECURE);
flags2 = (flags2 & ~SECURE);
@@ -161,14 +155,14 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
if (flags1 & DTLS) {
// Confirmed to work with KT_RSA and KT_ECDSA.
- transport_controller1_.SetLocalCertificate(
- rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ transport_controller1_->SetLocalCertificate(
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
rtc::SSLIdentity::Generate("session1", rtc::KT_DEFAULT))));
}
if (flags2 & DTLS) {
// Confirmed to work with KT_RSA and KT_ECDSA.
- transport_controller2_.SetLocalCertificate(
- rtc::RTCCertificate::Create(rtc::scoped_ptr<rtc::SSLIdentity>(
+ transport_controller2_->SetLocalCertificate(
+ rtc::RTCCertificate::Create(std::unique_ptr<rtc::SSLIdentity>(
rtc::SSLIdentity::Generate("session2", rtc::KT_DEFAULT))));
}
@@ -187,14 +181,16 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
}
}
typename T::Channel* CreateChannel(
- rtc::Thread* thread,
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
cricket::MediaEngineInterface* engine,
typename T::MediaChannel* ch,
cricket::TransportController* transport_controller,
bool rtcp) {
- typename T::Channel* channel = new typename T::Channel(
- thread, engine, ch, transport_controller, cricket::CN_AUDIO, rtcp);
- if (!channel->Init()) {
+ typename T::Channel* channel =
+ new typename T::Channel(worker_thread, network_thread, engine, ch,
+ transport_controller, cricket::CN_AUDIO, rtcp);
+ if (!channel->Init_w(nullptr)) {
delete channel;
channel = NULL;
}
@@ -209,7 +205,7 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
result = channel2_->SetRemoteContent(&remote_media_content1_,
CA_OFFER, NULL);
if (result) {
- transport_controller1_.Connect(&transport_controller2_);
+ transport_controller1_->Connect(transport_controller2_.get());
result = channel2_->SetLocalContent(&local_media_content2_,
CA_ANSWER, NULL);
@@ -242,7 +238,7 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
channel2_->Enable(true);
result = channel1_->SetRemoteContent(&remote_media_content2_,
CA_PRANSWER, NULL);
- transport_controller1_.Connect(&transport_controller2_);
+ transport_controller1_->Connect(transport_controller2_.get());
}
return result;
}
@@ -269,105 +265,92 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
return channel1_->RemoveRecvStream(id);
}
- // Calling "_w" method here is ok since we only use one thread for this test
cricket::FakeTransport* GetTransport1() {
- return transport_controller1_.GetTransport_w(channel1_->content_name());
+ std::string name = channel1_->content_name();
+ return network_thread_->Invoke<cricket::FakeTransport*>(
+ [this, name] { return transport_controller1_->GetTransport_n(name); });
}
cricket::FakeTransport* GetTransport2() {
- return transport_controller2_.GetTransport_w(channel2_->content_name());
+ std::string name = channel2_->content_name();
+ return network_thread_->Invoke<cricket::FakeTransport*>(
+ [this, name] { return transport_controller2_->GetTransport_n(name); });
}
- bool SendRtp1() {
- return media_channel1_->SendRtp(rtp_packet_.c_str(),
- static_cast<int>(rtp_packet_.size()),
- rtc::PacketOptions());
+ void SendRtp1() {
+ media_channel1_->SendRtp(rtp_packet_.data(), rtp_packet_.size(),
+ rtc::PacketOptions());
}
- bool SendRtp2() {
- return media_channel2_->SendRtp(rtp_packet_.c_str(),
- static_cast<int>(rtp_packet_.size()),
- rtc::PacketOptions());
+ void SendRtp2() {
+ media_channel2_->SendRtp(rtp_packet_.data(), rtp_packet_.size(),
+ rtc::PacketOptions());
}
- bool SendRtcp1() {
- return media_channel1_->SendRtcp(rtcp_packet_.c_str(),
- static_cast<int>(rtcp_packet_.size()));
+ void SendRtcp1() {
+ media_channel1_->SendRtcp(rtcp_packet_.data(), rtcp_packet_.size());
}
- bool SendRtcp2() {
- return media_channel2_->SendRtcp(rtcp_packet_.c_str(),
- static_cast<int>(rtcp_packet_.size()));
+ void SendRtcp2() {
+ media_channel2_->SendRtcp(rtcp_packet_.data(), rtcp_packet_.size());
}
// Methods to send custom data.
- bool SendCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) {
- std::string data(CreateRtpData(ssrc, sequence_number, pl_type));
- return media_channel1_->SendRtp(data.c_str(), static_cast<int>(data.size()),
- rtc::PacketOptions());
- }
- bool SendCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) {
- std::string data(CreateRtpData(ssrc, sequence_number, pl_type));
- return media_channel2_->SendRtp(data.c_str(), static_cast<int>(data.size()),
- rtc::PacketOptions());
- }
- bool SendCustomRtcp1(uint32_t ssrc) {
- std::string data(CreateRtcpData(ssrc));
- return media_channel1_->SendRtcp(data.c_str(),
- static_cast<int>(data.size()));
- }
- bool SendCustomRtcp2(uint32_t ssrc) {
- std::string data(CreateRtcpData(ssrc));
- return media_channel2_->SendRtcp(data.c_str(),
- static_cast<int>(data.size()));
+ void SendCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) {
+ rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
+ media_channel1_->SendRtp(data.data(), data.size(), rtc::PacketOptions());
+ }
+ void SendCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) {
+ rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
+ media_channel2_->SendRtp(data.data(), data.size(), rtc::PacketOptions());
+ }
+ void SendCustomRtcp1(uint32_t ssrc) {
+ rtc::Buffer data = CreateRtcpData(ssrc);
+ media_channel1_->SendRtcp(data.data(), data.size());
}
+ void SendCustomRtcp2(uint32_t ssrc) {
+ rtc::Buffer data = CreateRtcpData(ssrc);
+ media_channel2_->SendRtcp(data.data(), data.size());
+ }
+
bool CheckRtp1() {
- return media_channel1_->CheckRtp(rtp_packet_.c_str(),
- static_cast<int>(rtp_packet_.size()));
+ return media_channel1_->CheckRtp(rtp_packet_.data(), rtp_packet_.size());
}
bool CheckRtp2() {
- return media_channel2_->CheckRtp(rtp_packet_.c_str(),
- static_cast<int>(rtp_packet_.size()));
+ return media_channel2_->CheckRtp(rtp_packet_.data(), rtp_packet_.size());
}
bool CheckRtcp1() {
- return media_channel1_->CheckRtcp(rtcp_packet_.c_str(),
- static_cast<int>(rtcp_packet_.size()));
+ return media_channel1_->CheckRtcp(rtcp_packet_.data(), rtcp_packet_.size());
}
bool CheckRtcp2() {
- return media_channel2_->CheckRtcp(rtcp_packet_.c_str(),
- static_cast<int>(rtcp_packet_.size()));
+ return media_channel2_->CheckRtcp(rtcp_packet_.data(), rtcp_packet_.size());
}
// Methods to check custom data.
bool CheckCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) {
- std::string data(CreateRtpData(ssrc, sequence_number, pl_type));
- return media_channel1_->CheckRtp(data.c_str(),
- static_cast<int>(data.size()));
+ rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
+ return media_channel1_->CheckRtp(data.data(), data.size());
}
bool CheckCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) {
- std::string data(CreateRtpData(ssrc, sequence_number, pl_type));
- return media_channel2_->CheckRtp(data.c_str(),
- static_cast<int>(data.size()));
+ rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type);
+ return media_channel2_->CheckRtp(data.data(), data.size());
}
bool CheckCustomRtcp1(uint32_t ssrc) {
- std::string data(CreateRtcpData(ssrc));
- return media_channel1_->CheckRtcp(data.c_str(),
- static_cast<int>(data.size()));
+ rtc::Buffer data = CreateRtcpData(ssrc);
+ return media_channel1_->CheckRtcp(data.data(), data.size());
}
bool CheckCustomRtcp2(uint32_t ssrc) {
- std::string data(CreateRtcpData(ssrc));
- return media_channel2_->CheckRtcp(data.c_str(),
- static_cast<int>(data.size()));
+ rtc::Buffer data = CreateRtcpData(ssrc);
+ return media_channel2_->CheckRtcp(data.data(), data.size());
}
- std::string CreateRtpData(uint32_t ssrc, int sequence_number, int pl_type) {
- std::string data(rtp_packet_);
+ rtc::Buffer CreateRtpData(uint32_t ssrc, int sequence_number, int pl_type) {
+ rtc::Buffer data(rtp_packet_.data(), rtp_packet_.size());
// Set SSRC in the rtp packet copy.
- rtc::SetBE32(const_cast<char*>(data.c_str()) + 8, ssrc);
- rtc::SetBE16(const_cast<char*>(data.c_str()) + 2, sequence_number);
+ rtc::SetBE32(data.data() + 8, ssrc);
+ rtc::SetBE16(data.data() + 2, sequence_number);
if (pl_type >= 0) {
- rtc::Set8(const_cast<char*>(data.c_str()), 1,
- static_cast<uint8_t>(pl_type));
+ rtc::Set8(data.data(), 1, static_cast<uint8_t>(pl_type));
}
return data;
}
- std::string CreateRtcpData(uint32_t ssrc) {
- std::string data(rtcp_packet_);
+ rtc::Buffer CreateRtcpData(uint32_t ssrc) {
+ rtc::Buffer data(rtcp_packet_.data(), rtcp_packet_.size());
// Set SSRC in the rtcp packet copy.
- rtc::SetBE32(const_cast<char*>(data.c_str()) + 4, ssrc);
+ rtc::SetBE32(data.data() + 4, ssrc);
return data;
}
@@ -408,86 +391,40 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
return sdesc;
}
- class CallThread : public rtc::SignalThread {
- public:
- typedef bool (ChannelTest<T>::*Method)();
- CallThread(ChannelTest<T>* obj, Method method, bool* result = nullptr)
- : obj_(obj),
- method_(method),
- result_(false),
- result_ptr_(result) {
- if (result_ptr_)
- *result_ptr_ = false;
- }
-
- ~CallThread() {
- if (result_ptr_) {
- rtc::CritScope cs(&result_lock_);
- *result_ptr_ = result_;
- }
- }
-
- virtual void DoWork() {
- SetResult((*obj_.*method_)());
- }
-
- bool result() {
- rtc::CritScope cs(&result_lock_);
- return result_;
- }
-
- private:
- void SetResult(const bool& result) {
- rtc::CritScope cs(&result_lock_);
- result_ = result;
- }
-
- ChannelTest<T>* obj_;
- Method method_;
- rtc::CriticalSection result_lock_;
- bool result_ GUARDED_BY(result_lock_);
- bool* result_ptr_;
- };
-
// Will manage the lifetime of a CallThread, making sure it's
// destroyed before this object goes out of scope.
class ScopedCallThread {
public:
- using Method = typename CallThread::Method;
-
- ScopedCallThread(ChannelTest<T>* obj, Method method)
- : thread_(new CallThread(obj, method)) {
+ template <class FunctorT>
+ ScopedCallThread(const FunctorT& functor)
+ : thread_(rtc::Thread::Create()),
+ task_(new rtc::FunctorMessageHandler<void, FunctorT>(functor)) {
thread_->Start();
+ thread_->Post(task_.get());
}
- ~ScopedCallThread() {
- thread_->Destroy(true);
- }
+ ~ScopedCallThread() { thread_->Stop(); }
- bool result() const { return thread_->result(); }
+ rtc::Thread* thread() { return thread_.get(); }
private:
- CallThread* thread_;
+ std::unique_ptr<rtc::Thread> thread_;
+ std::unique_ptr<rtc::MessageHandler> task_;
};
- void CallOnThreadAndWaitForDone(typename CallThread::Method method,
- bool* result) {
- CallThread* thread = new CallThread(this, method, result);
- thread->Start();
- thread->Destroy(true);
- }
-
bool CodecMatches(const typename T::Codec& c1, const typename T::Codec& c2) {
return false; // overridden in specialized classes
}
- void OnMediaMonitor(typename T::Channel* channel,
- const typename T::MediaInfo& info) {
- if (channel == channel1_.get()) {
- media_info_callbacks1_++;
- } else if (channel == channel2_.get()) {
- media_info_callbacks2_++;
- }
+ void OnMediaMonitor1(typename T::Channel* channel,
+ const typename T::MediaInfo& info) {
+ RTC_DCHECK_EQ(channel, channel1_.get());
+ media_info_callbacks1_++;
+ }
+ void OnMediaMonitor2(typename T::Channel* channel,
+ const typename T::MediaInfo& info) {
+ RTC_DCHECK_EQ(channel, channel2_.get());
+ media_info_callbacks2_++;
}
cricket::CandidatePairInterface* last_selected_candidate_pair() {
@@ -792,7 +729,7 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(channel2_->SetRemoteContent(&content1, CA_OFFER, NULL));
EXPECT_EQ(1u, media_channel2_->recv_streams().size());
- transport_controller1_.Connect(&transport_controller2_);
+ transport_controller1_->Connect(transport_controller2_.get());
// Channel 2 do not send anything.
typename T::Content content2;
@@ -803,7 +740,8 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(channel2_->Enable(true));
EXPECT_EQ(0u, media_channel2_->send_streams().size());
- EXPECT_TRUE(SendCustomRtp1(kSsrc1, 0));
+ SendCustomRtp1(kSsrc1, 0);
+ WaitForThreads();
EXPECT_TRUE(CheckCustomRtp2(kSsrc1, 0));
// Let channel 2 update the content by sending |stream2| and enable SRTP.
@@ -829,7 +767,8 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(channel1_->secure());
EXPECT_TRUE(channel2_->secure());
- EXPECT_TRUE(SendCustomRtp2(kSsrc2, 0));
+ SendCustomRtp2(kSsrc2, 0);
+ WaitForThreads();
EXPECT_TRUE(CheckCustomRtp1(kSsrc2, 0));
}
@@ -867,7 +806,7 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_FALSE(media_channel2_->playout());
}
EXPECT_FALSE(media_channel2_->sending());
- transport_controller1_.Connect(&transport_controller2_);
+ transport_controller1_->Connect(transport_controller2_.get());
if (verify_playout_) {
EXPECT_TRUE(media_channel1_->playout());
}
@@ -915,7 +854,7 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(channel2_->SetRemoteContent(&content1, CA_OFFER, NULL));
EXPECT_TRUE(channel2_->SetLocalContent(&content2, CA_PRANSWER, NULL));
EXPECT_TRUE(channel1_->SetRemoteContent(&content2, CA_PRANSWER, NULL));
- transport_controller1_.Connect(&transport_controller2_);
+ transport_controller1_->Connect(transport_controller2_.get());
if (verify_playout_) {
EXPECT_TRUE(media_channel1_->playout());
@@ -958,39 +897,47 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
// Tests that when the transport channel signals a candidate pair change
// event, the media channel will receive a call on the network route change.
void TestNetworkRouteChanges() {
+ constexpr uint16_t kLocalNetId = 1;
+ constexpr uint16_t kRemoteNetId = 2;
+ constexpr int kLastPacketId = 100;
+
CreateChannels(0, 0);
cricket::TransportChannel* transport_channel1 =
channel1_->transport_channel();
- ASSERT_TRUE(transport_channel1 != nullptr);
+ ASSERT_TRUE(transport_channel1);
typename T::MediaChannel* media_channel1 =
static_cast<typename T::MediaChannel*>(channel1_->media_channel());
- ASSERT_TRUE(media_channel1 != nullptr);
-
- media_channel1_->set_num_network_route_changes(0);
- // The transport channel becomes disconnected.
- transport_channel1->SignalSelectedCandidatePairChanged(transport_channel1,
- nullptr, -1);
- EXPECT_EQ(1, media_channel1_->num_network_route_changes());
+ ASSERT_TRUE(media_channel1);
+
+ media_channel1->set_num_network_route_changes(0);
+ network_thread_->Invoke<void>([transport_channel1] {
+ // The transport channel becomes disconnected.
+ transport_channel1->SignalSelectedCandidatePairChanged(transport_channel1,
+ nullptr, -1);
+ });
+ WaitForThreads();
+ EXPECT_EQ(1, media_channel1->num_network_route_changes());
EXPECT_FALSE(media_channel1->last_network_route().connected);
-
- media_channel1_->set_num_network_route_changes(0);
- // The transport channel becomes connected.
- rtc::SocketAddress local_address("192.168.1.1", 1000 /* port number */);
- rtc::SocketAddress remote_address("192.168.1.2", 2000 /* port number */);
- uint16_t local_net_id = 1;
- uint16_t remote_net_id = 2;
- int last_packet_id = 100;
- rtc::scoped_ptr<cricket::CandidatePairInterface> candidate_pair(
- transport_controller1_.CreateFakeCandidatePair(
- local_address, local_net_id, remote_address, remote_net_id));
- transport_channel1->SignalSelectedCandidatePairChanged(
- transport_channel1, candidate_pair.get(), last_packet_id);
- EXPECT_EQ(1, media_channel1_->num_network_route_changes());
- cricket::NetworkRoute expected_network_route(local_net_id, remote_net_id,
- last_packet_id);
+ media_channel1->set_num_network_route_changes(0);
+
+ network_thread_->Invoke<void>([this, transport_channel1, media_channel1,
+ kLocalNetId, kRemoteNetId, kLastPacketId] {
+ // The transport channel becomes connected.
+ rtc::SocketAddress local_address("192.168.1.1", 1000 /* port number */);
+ rtc::SocketAddress remote_address("192.168.1.2", 2000 /* port number */);
+ std::unique_ptr<cricket::CandidatePairInterface> candidate_pair(
+ transport_controller1_->CreateFakeCandidatePair(
+ local_address, kLocalNetId, remote_address, kRemoteNetId));
+ transport_channel1->SignalSelectedCandidatePairChanged(
+ transport_channel1, candidate_pair.get(), kLastPacketId);
+ });
+ WaitForThreads();
+ EXPECT_EQ(1, media_channel1->num_network_route_changes());
+ rtc::NetworkRoute expected_network_route(kLocalNetId, kRemoteNetId,
+ kLastPacketId);
EXPECT_EQ(expected_network_route, media_channel1->last_network_route());
- EXPECT_EQ(last_packet_id,
+ EXPECT_EQ(kLastPacketId,
media_channel1->last_network_route().last_sent_packet_id);
}
@@ -1029,8 +976,7 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
}
};
CreateChannels(new LastWordMediaChannel(), new LastWordMediaChannel(),
- RTCP | RTCP_MUX, RTCP | RTCP_MUX,
- rtc::Thread::Current());
+ RTCP | RTCP_MUX, RTCP | RTCP_MUX);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
EXPECT_TRUE(SendTerminate());
@@ -1045,14 +991,28 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_TRUE(GetTransport2());
EXPECT_EQ(1U, GetTransport1()->channels().size());
EXPECT_EQ(1U, GetTransport2()->channels().size());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
EXPECT_TRUE(CheckNoRtp2());
}
+ void TestDeinit() {
+ CreateChannels(RTCP, RTCP);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+ SendRtp1();
+ SendRtp2();
+ SendRtcp1();
+ SendRtcp2();
+ // Do not wait, destroy channels.
+ channel1_.reset(nullptr);
+ channel2_.reset(nullptr);
+ }
+
// Check that RTCP is not transmitted if both sides don't support RTCP.
void SendNoRtcpToNoRtcp() {
CreateChannels(0, 0);
@@ -1062,8 +1022,9 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_TRUE(GetTransport2());
EXPECT_EQ(1U, GetTransport1()->channels().size());
EXPECT_EQ(1U, GetTransport2()->channels().size());
- EXPECT_FALSE(SendRtcp1());
- EXPECT_FALSE(SendRtcp2());
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckNoRtcp1());
EXPECT_TRUE(CheckNoRtcp2());
}
@@ -1077,8 +1038,9 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_TRUE(GetTransport2());
EXPECT_EQ(1U, GetTransport1()->channels().size());
EXPECT_EQ(2U, GetTransport2()->channels().size());
- EXPECT_FALSE(SendRtcp1());
- EXPECT_FALSE(SendRtcp2());
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckNoRtcp1());
EXPECT_TRUE(CheckNoRtcp2());
}
@@ -1092,8 +1054,9 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_TRUE(GetTransport2());
EXPECT_EQ(2U, GetTransport1()->channels().size());
EXPECT_EQ(1U, GetTransport2()->channels().size());
- EXPECT_FALSE(SendRtcp1());
- EXPECT_FALSE(SendRtcp2());
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckNoRtcp1());
EXPECT_TRUE(CheckNoRtcp2());
}
@@ -1107,8 +1070,9 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_TRUE(GetTransport2());
EXPECT_EQ(2U, GetTransport1()->channels().size());
EXPECT_EQ(2U, GetTransport2()->channels().size());
- EXPECT_TRUE(SendRtcp1());
- EXPECT_TRUE(SendRtcp2());
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtcp1());
EXPECT_TRUE(CheckRtcp2());
EXPECT_TRUE(CheckNoRtcp1());
@@ -1124,8 +1088,9 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_TRUE(GetTransport2());
EXPECT_EQ(2U, GetTransport1()->channels().size());
EXPECT_EQ(2U, GetTransport2()->channels().size());
- EXPECT_TRUE(SendRtcp1());
- EXPECT_TRUE(SendRtcp2());
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtcp1());
EXPECT_TRUE(CheckRtcp2());
EXPECT_TRUE(CheckNoRtcp1());
@@ -1142,10 +1107,11 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_EQ(1U, GetTransport2()->channels().size());
EXPECT_TRUE(SendAccept());
EXPECT_EQ(1U, GetTransport1()->channels().size());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
- EXPECT_TRUE(SendRtcp1());
- EXPECT_TRUE(SendRtcp2());
+ SendRtp1();
+ SendRtp2();
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
@@ -1167,10 +1133,11 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_EQ(1U, GetTransport1()->channels().size());
EXPECT_EQ(1U, GetTransport2()->channels().size());
EXPECT_TRUE(SendAccept());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
- EXPECT_TRUE(SendRtcp1());
- EXPECT_TRUE(SendRtcp2());
+ SendRtp1();
+ SendRtp2();
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
@@ -1193,10 +1160,11 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_EQ(1U, GetTransport2()->channels().size());
EXPECT_TRUE(SendAccept());
EXPECT_EQ(1U, GetTransport1()->channels().size());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
- EXPECT_TRUE(SendRtcp1());
- EXPECT_TRUE(SendRtcp2());
+ SendRtp1();
+ SendRtp2();
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
@@ -1220,10 +1188,11 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_EQ(1U, GetTransport2()->channels().size());
EXPECT_TRUE(SendAccept());
EXPECT_EQ(1U, GetTransport1()->channels().size());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
- EXPECT_TRUE(SendRtcp1());
- EXPECT_TRUE(SendRtcp2());
+ SendRtp1();
+ SendRtp2();
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
@@ -1258,21 +1227,24 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
// RTCP can be sent before the call is accepted, if the transport is ready.
// It should not be muxed though, as the remote side doesn't support mux.
- EXPECT_TRUE(SendRtcp1());
+ SendRtcp1();
+ WaitForThreads();
EXPECT_TRUE(CheckNoRtp2());
EXPECT_TRUE(CheckRtcp2());
// Send RTCP packet from callee and verify that it is received.
- EXPECT_TRUE(SendRtcp2());
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckNoRtp1());
EXPECT_TRUE(CheckRtcp1());
// Complete call setup and ensure everything is still OK.
EXPECT_TRUE(SendAccept());
EXPECT_EQ(2U, GetTransport1()->channels().size());
- EXPECT_TRUE(SendRtcp1());
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtcp2());
- EXPECT_TRUE(SendRtcp2());
EXPECT_TRUE(CheckRtcp1());
}
@@ -1290,19 +1262,23 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
// RTCP can't be sent yet, since the RTCP transport isn't writable, and
// we haven't yet received the accept that says we should mux.
- EXPECT_FALSE(SendRtcp1());
+ SendRtcp1();
+ WaitForThreads();
+ EXPECT_TRUE(CheckNoRtcp2());
// Send muxed RTCP packet from callee and verify that it is received.
- EXPECT_TRUE(SendRtcp2());
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckNoRtp1());
EXPECT_TRUE(CheckRtcp1());
// Complete call setup and ensure everything is still OK.
EXPECT_TRUE(SendAccept());
EXPECT_EQ(1U, GetTransport1()->channels().size());
- EXPECT_TRUE(SendRtcp1());
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtcp2());
- EXPECT_TRUE(SendRtcp2());
EXPECT_TRUE(CheckRtcp1());
}
@@ -1320,17 +1296,19 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_FALSE(channel1_->secure());
EXPECT_FALSE(channel2_->secure());
EXPECT_TRUE(SendInitiate());
- EXPECT_TRUE_WAIT(channel1_->writable(), kEventTimeout);
- EXPECT_TRUE_WAIT(channel2_->writable(), kEventTimeout);
+ WaitForThreads();
+ EXPECT_TRUE(channel1_->writable());
+ EXPECT_TRUE(channel2_->writable());
EXPECT_TRUE(SendAccept());
EXPECT_TRUE(channel1_->secure());
EXPECT_TRUE(channel2_->secure());
EXPECT_EQ(dtls1 && dtls2, channel1_->secure_dtls());
EXPECT_EQ(dtls1 && dtls2, channel2_->secure_dtls());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
- EXPECT_TRUE(SendRtcp1());
- EXPECT_TRUE(SendRtcp2());
+ SendRtp1();
+ SendRtp2();
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
@@ -1350,10 +1328,11 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(SendAccept());
EXPECT_FALSE(channel1_->secure());
EXPECT_FALSE(channel2_->secure());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
- EXPECT_TRUE(SendRtcp1());
- EXPECT_TRUE(SendRtcp2());
+ SendRtp1();
+ SendRtp2();
+ SendRtcp1();
+ SendRtcp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
@@ -1379,15 +1358,18 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_TRUE(GetTransport2());
EXPECT_EQ(2U, GetTransport1()->channels().size());
EXPECT_EQ(2U, GetTransport2()->channels().size());
- EXPECT_TRUE(SendCustomRtcp1(kSsrc1));
+ WaitForThreads(); // Wait for 'sending' flag go through network thread.
+ SendCustomRtcp1(kSsrc1);
+ SendCustomRtp1(kSsrc1, ++sequence_number1_1);
+ WaitForThreads();
EXPECT_TRUE(CheckCustomRtcp2(kSsrc1));
- EXPECT_TRUE(SendCustomRtp1(kSsrc1, ++sequence_number1_1));
EXPECT_TRUE(CheckCustomRtp2(kSsrc1, sequence_number1_1));
// Send packets from callee and verify that it is received.
- EXPECT_TRUE(SendCustomRtcp2(kSsrc2));
+ SendCustomRtcp2(kSsrc2);
+ SendCustomRtp2(kSsrc2, ++sequence_number2_2);
+ WaitForThreads();
EXPECT_TRUE(CheckCustomRtcp1(kSsrc2));
- EXPECT_TRUE(SendCustomRtp2(kSsrc2, ++sequence_number2_2));
EXPECT_TRUE(CheckCustomRtp1(kSsrc2, sequence_number2_2));
// Complete call setup and ensure everything is still OK.
@@ -1396,13 +1378,14 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_EQ(1U, GetTransport2()->channels().size());
EXPECT_TRUE(channel1_->secure());
EXPECT_TRUE(channel2_->secure());
- EXPECT_TRUE(SendCustomRtcp1(kSsrc1));
+ SendCustomRtcp1(kSsrc1);
+ SendCustomRtp1(kSsrc1, ++sequence_number1_1);
+ SendCustomRtcp2(kSsrc2);
+ SendCustomRtp2(kSsrc2, ++sequence_number2_2);
+ WaitForThreads();
EXPECT_TRUE(CheckCustomRtcp2(kSsrc1));
- EXPECT_TRUE(SendCustomRtp1(kSsrc1, ++sequence_number1_1));
EXPECT_TRUE(CheckCustomRtp2(kSsrc1, sequence_number1_1));
- EXPECT_TRUE(SendCustomRtcp2(kSsrc2));
EXPECT_TRUE(CheckCustomRtcp1(kSsrc2));
- EXPECT_TRUE(SendCustomRtp2(kSsrc2, ++sequence_number2_2));
EXPECT_TRUE(CheckCustomRtp1(kSsrc2, sequence_number2_2));
}
@@ -1411,20 +1394,20 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
CreateChannels(RTCP, RTCP);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
- ScopedCallThread send_rtp1(this, &ChannelTest<T>::SendRtp1);
- ScopedCallThread send_rtp2(this, &ChannelTest<T>::SendRtp2);
- ScopedCallThread send_rtcp1(this, &ChannelTest<T>::SendRtcp1);
- ScopedCallThread send_rtcp2(this, &ChannelTest<T>::SendRtcp2);
- EXPECT_TRUE_WAIT(CheckRtp1(), 1000);
- EXPECT_TRUE_WAIT(CheckRtp2(), 1000);
- EXPECT_TRUE_WAIT(send_rtp1.result(), 1000);
- EXPECT_TRUE_WAIT(send_rtp2.result(), 1000);
+ ScopedCallThread send_rtp1([this] { SendRtp1(); });
+ ScopedCallThread send_rtp2([this] { SendRtp2(); });
+ ScopedCallThread send_rtcp1([this] { SendRtcp1(); });
+ ScopedCallThread send_rtcp2([this] { SendRtcp2(); });
+ rtc::Thread* involved_threads[] = {send_rtp1.thread(), send_rtp2.thread(),
+ send_rtcp1.thread(),
+ send_rtcp2.thread()};
+ WaitForThreads(involved_threads);
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
EXPECT_TRUE(CheckNoRtp2());
- EXPECT_TRUE_WAIT(CheckRtcp1(), 1000);
- EXPECT_TRUE_WAIT(CheckRtcp2(), 1000);
- EXPECT_TRUE_WAIT(send_rtcp1.result(), 1000);
- EXPECT_TRUE_WAIT(send_rtcp2.result(), 1000);
+ EXPECT_TRUE(CheckRtcp1());
+ EXPECT_TRUE(CheckRtcp2());
EXPECT_TRUE(CheckNoRtcp1());
EXPECT_TRUE(CheckNoRtcp2());
}
@@ -1434,20 +1417,20 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
CreateChannels(RTCP | SECURE, RTCP | SECURE);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
- ScopedCallThread send_rtp1(this, &ChannelTest<T>::SendRtp1);
- ScopedCallThread send_rtp2(this, &ChannelTest<T>::SendRtp2);
- ScopedCallThread send_rtcp1(this, &ChannelTest<T>::SendRtcp1);
- ScopedCallThread send_rtcp2(this, &ChannelTest<T>::SendRtcp2);
- EXPECT_TRUE_WAIT(CheckRtp1(), 1000);
- EXPECT_TRUE_WAIT(CheckRtp2(), 1000);
- EXPECT_TRUE_WAIT(send_rtp1.result(), 1000);
- EXPECT_TRUE_WAIT(send_rtp2.result(), 1000);
+ ScopedCallThread send_rtp1([this] { SendRtp1(); });
+ ScopedCallThread send_rtp2([this] { SendRtp2(); });
+ ScopedCallThread send_rtcp1([this] { SendRtcp1(); });
+ ScopedCallThread send_rtcp2([this] { SendRtcp2(); });
+ rtc::Thread* involved_threads[] = {send_rtp1.thread(), send_rtp2.thread(),
+ send_rtcp1.thread(),
+ send_rtcp2.thread()};
+ WaitForThreads(involved_threads);
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
EXPECT_TRUE(CheckNoRtp2());
- EXPECT_TRUE_WAIT(CheckRtcp1(), 1000);
- EXPECT_TRUE_WAIT(CheckRtcp2(), 1000);
- EXPECT_TRUE_WAIT(send_rtcp1.result(), 1000);
- EXPECT_TRUE_WAIT(send_rtcp2.result(), 1000);
+ EXPECT_TRUE(CheckRtcp1());
+ EXPECT_TRUE(CheckRtcp2());
EXPECT_TRUE(CheckNoRtcp1());
EXPECT_TRUE(CheckNoRtcp2());
}
@@ -1462,45 +1445,54 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
ASSERT_TRUE(GetTransport2());
EXPECT_EQ(1U, GetTransport1()->channels().size());
EXPECT_EQ(1U, GetTransport2()->channels().size());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
EXPECT_TRUE(CheckNoRtp2());
// Lose writability, which should fail.
- GetTransport1()->SetWritable(false);
- EXPECT_FALSE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
+ network_thread_->Invoke<void>(
+ [this] { GetTransport1()->SetWritable(false); });
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckNoRtp2());
// Regain writability
- GetTransport1()->SetWritable(true);
+ network_thread_->Invoke<void>(
+ [this] { GetTransport1()->SetWritable(true); });
EXPECT_TRUE(media_channel1_->sending());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
EXPECT_TRUE(CheckNoRtp2());
// Lose writability completely
- GetTransport1()->SetDestination(NULL);
+ network_thread_->Invoke<void>(
+ [this] { GetTransport1()->SetDestination(NULL); });
EXPECT_TRUE(media_channel1_->sending());
// Should fail also.
- EXPECT_FALSE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckNoRtp2());
// Gain writability back
- GetTransport1()->SetDestination(GetTransport2());
+ network_thread_->Invoke<void>(
+ [this] { GetTransport1()->SetDestination(GetTransport2()); });
EXPECT_TRUE(media_channel1_->sending());
- EXPECT_TRUE(SendRtp1());
- EXPECT_TRUE(SendRtp2());
+ SendRtp1();
+ SendRtp2();
+ WaitForThreads();
EXPECT_TRUE(CheckRtp1());
EXPECT_TRUE(CheckRtp2());
EXPECT_TRUE(CheckNoRtp1());
@@ -1537,28 +1529,32 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_FALSE(channel2_->bundle_filter()->FindPayloadType(pl_type2));
// Both channels can receive pl_type1 only.
- EXPECT_TRUE(SendCustomRtp1(kSsrc1, ++sequence_number1_1, pl_type1));
+ SendCustomRtp1(kSsrc1, ++sequence_number1_1, pl_type1);
+ SendCustomRtp2(kSsrc2, ++sequence_number2_2, pl_type1);
+ WaitForThreads();
EXPECT_TRUE(CheckCustomRtp2(kSsrc1, sequence_number1_1, pl_type1));
- EXPECT_TRUE(SendCustomRtp2(kSsrc2, ++sequence_number2_2, pl_type1));
EXPECT_TRUE(CheckCustomRtp1(kSsrc2, sequence_number2_2, pl_type1));
EXPECT_TRUE(CheckNoRtp1());
EXPECT_TRUE(CheckNoRtp2());
// RTCP test
- EXPECT_TRUE(SendCustomRtp1(kSsrc1, ++sequence_number1_1, pl_type2));
+ SendCustomRtp1(kSsrc1, ++sequence_number1_1, pl_type2);
+ SendCustomRtp2(kSsrc2, ++sequence_number2_2, pl_type2);
+ WaitForThreads();
EXPECT_FALSE(CheckCustomRtp2(kSsrc1, sequence_number1_1, pl_type2));
- EXPECT_TRUE(SendCustomRtp2(kSsrc2, ++sequence_number2_2, pl_type2));
EXPECT_FALSE(CheckCustomRtp1(kSsrc2, sequence_number2_2, pl_type2));
- EXPECT_TRUE(SendCustomRtcp1(kSsrc1));
- EXPECT_TRUE(SendCustomRtcp2(kSsrc2));
+ SendCustomRtcp1(kSsrc1);
+ SendCustomRtcp2(kSsrc2);
+ WaitForThreads();
EXPECT_TRUE(CheckCustomRtcp1(kSsrc2));
EXPECT_TRUE(CheckNoRtcp1());
EXPECT_TRUE(CheckCustomRtcp2(kSsrc1));
EXPECT_TRUE(CheckNoRtcp2());
- EXPECT_TRUE(SendCustomRtcp1(kSsrc2));
- EXPECT_TRUE(SendCustomRtcp2(kSsrc1));
+ SendCustomRtcp1(kSsrc2);
+ SendCustomRtcp2(kSsrc1);
+ WaitForThreads();
// Bundle filter shouldn't filter out any RTCP.
EXPECT_TRUE(CheckCustomRtcp1(kSsrc1));
EXPECT_TRUE(CheckCustomRtcp2(kSsrc2));
@@ -1704,7 +1700,6 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
}
void TestFlushRtcp() {
- bool send_rtcp1;
CreateChannels(RTCP, RTCP);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
@@ -1714,14 +1709,16 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_EQ(2U, GetTransport2()->channels().size());
// Send RTCP1 from a different thread.
- CallOnThreadAndWaitForDone(&ChannelTest<T>::SendRtcp1, &send_rtcp1);
- EXPECT_TRUE(send_rtcp1);
+ ScopedCallThread send_rtcp([this] { SendRtcp1(); });
// The sending message is only posted. channel2_ should be empty.
EXPECT_TRUE(CheckNoRtcp2());
+ rtc::Thread* wait_for[] = {send_rtcp.thread()};
+ WaitForThreads(wait_for); // Ensure rtcp was posted
// When channel1_ is deleted, the RTCP packet should be sent out to
// channel2_.
channel1_.reset();
+ WaitForThreads();
EXPECT_TRUE(CheckRtcp2());
}
@@ -1744,18 +1741,13 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
// So we need to pass in pl_type so that the packet can pass through
// the bundle filter before it can be processed by the srtp filter.
// The packet is not a valid srtp packet because it is too short.
- unsigned const char kBadPacket[] = {0x84,
- static_cast<unsigned char>(pl_type),
- 0x00,
- 0x01,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x00,
- 0x01};
+ static unsigned const char kBadPacket[] = {
+ 0x84, static_cast<unsigned char>(pl_type),
+ 0x00, 0x01,
+ 0x00, 0x00,
+ 0x00, 0x00,
+ 0x00, 0x00,
+ 0x00, 0x01};
CreateChannels(RTCP | SECURE, RTCP | SECURE);
EXPECT_FALSE(channel1_->secure());
EXPECT_FALSE(channel2_->secure());
@@ -1768,37 +1760,42 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
&error_handler, &SrtpErrorHandler::OnSrtpError);
// Testing failures in sending packets.
- EXPECT_FALSE(media_channel2_->SendRtp(kBadPacket, sizeof(kBadPacket),
- rtc::PacketOptions()));
+ media_channel2_->SendRtp(kBadPacket, sizeof(kBadPacket),
+ rtc::PacketOptions());
+ WaitForThreads();
// The first failure will trigger an error.
- EXPECT_EQ_WAIT(cricket::SrtpFilter::ERROR_FAIL, error_handler.error_, 500);
+ EXPECT_EQ(cricket::SrtpFilter::ERROR_FAIL, error_handler.error_);
EXPECT_EQ(cricket::SrtpFilter::PROTECT, error_handler.mode_);
error_handler.error_ = cricket::SrtpFilter::ERROR_NONE;
error_handler.mode_ = cricket::SrtpFilter::UNPROTECT;
// The next 250 ms failures will not trigger an error.
- EXPECT_FALSE(media_channel2_->SendRtp(kBadPacket, sizeof(kBadPacket),
- rtc::PacketOptions()));
+ media_channel2_->SendRtp(kBadPacket, sizeof(kBadPacket),
+ rtc::PacketOptions());
// Wait for a while to ensure no message comes in.
+ WaitForThreads();
rtc::Thread::Current()->ProcessMessages(200);
EXPECT_EQ(cricket::SrtpFilter::ERROR_NONE, error_handler.error_);
EXPECT_EQ(cricket::SrtpFilter::UNPROTECT, error_handler.mode_);
// Wait for a little more - the error will be triggered again.
rtc::Thread::Current()->ProcessMessages(200);
- EXPECT_FALSE(media_channel2_->SendRtp(kBadPacket, sizeof(kBadPacket),
- rtc::PacketOptions()));
- EXPECT_EQ_WAIT(cricket::SrtpFilter::ERROR_FAIL, error_handler.error_, 500);
+ media_channel2_->SendRtp(kBadPacket, sizeof(kBadPacket),
+ rtc::PacketOptions());
+ WaitForThreads();
+ EXPECT_EQ(cricket::SrtpFilter::ERROR_FAIL, error_handler.error_);
EXPECT_EQ(cricket::SrtpFilter::PROTECT, error_handler.mode_);
// Testing failures in receiving packets.
error_handler.error_ = cricket::SrtpFilter::ERROR_NONE;
error_handler.mode_ = cricket::SrtpFilter::UNPROTECT;
- cricket::TransportChannel* transport_channel =
- channel2_->transport_channel();
- transport_channel->SignalReadPacket(
- transport_channel, reinterpret_cast<const char*>(kBadPacket),
- sizeof(kBadPacket), rtc::PacketTime(), 0);
- EXPECT_EQ_WAIT(cricket::SrtpFilter::ERROR_FAIL, error_handler.error_, 500);
+ network_thread_->Invoke<void>([this] {
+ cricket::TransportChannel* transport_channel =
+ channel2_->transport_channel();
+ transport_channel->SignalReadPacket(
+ transport_channel, reinterpret_cast<const char*>(kBadPacket),
+ sizeof(kBadPacket), rtc::PacketTime(), 0);
+ });
+ EXPECT_EQ(cricket::SrtpFilter::ERROR_FAIL, error_handler.error_);
EXPECT_EQ(cricket::SrtpFilter::UNPROTECT, error_handler.mode_);
}
@@ -1807,23 +1804,37 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
TransportChannel* rtp = channel1_->transport_channel();
TransportChannel* rtcp = channel1_->rtcp_transport_channel();
EXPECT_FALSE(media_channel1_->ready_to_send());
- rtp->SignalReadyToSend(rtp);
+
+ network_thread_->Invoke<void>([rtp] { rtp->SignalReadyToSend(rtp); });
+ WaitForThreads();
EXPECT_FALSE(media_channel1_->ready_to_send());
- rtcp->SignalReadyToSend(rtcp);
+
+ network_thread_->Invoke<void>([rtcp] { rtcp->SignalReadyToSend(rtcp); });
+ WaitForThreads();
// MediaChannel::OnReadyToSend only be called when both rtp and rtcp
// channel are ready to send.
EXPECT_TRUE(media_channel1_->ready_to_send());
// rtp channel becomes not ready to send will be propagated to mediachannel
- channel1_->SetReadyToSend(false, false);
+ network_thread_->Invoke<void>(
+ [this] { channel1_->SetReadyToSend(false, false); });
+ WaitForThreads();
EXPECT_FALSE(media_channel1_->ready_to_send());
- channel1_->SetReadyToSend(false, true);
+
+ network_thread_->Invoke<void>(
+ [this] { channel1_->SetReadyToSend(false, true); });
+ WaitForThreads();
EXPECT_TRUE(media_channel1_->ready_to_send());
// rtcp channel becomes not ready to send will be propagated to mediachannel
- channel1_->SetReadyToSend(true, false);
+ network_thread_->Invoke<void>(
+ [this] { channel1_->SetReadyToSend(true, false); });
+ WaitForThreads();
EXPECT_FALSE(media_channel1_->ready_to_send());
- channel1_->SetReadyToSend(true, true);
+
+ network_thread_->Invoke<void>(
+ [this] { channel1_->SetReadyToSend(true, true); });
+ WaitForThreads();
EXPECT_TRUE(media_channel1_->ready_to_send());
}
@@ -1840,9 +1851,13 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_FALSE(media_channel1_->ready_to_send());
// In the case of rtcp mux, the SignalReadyToSend() from rtp channel
// should trigger the MediaChannel's OnReadyToSend.
- rtp->SignalReadyToSend(rtp);
+ network_thread_->Invoke<void>([rtp] { rtp->SignalReadyToSend(rtp); });
+ WaitForThreads();
EXPECT_TRUE(media_channel1_->ready_to_send());
- channel1_->SetReadyToSend(false, false);
+
+ network_thread_->Invoke<void>(
+ [this] { channel1_->SetReadyToSend(false, false); });
+ WaitForThreads();
EXPECT_FALSE(media_channel1_->ready_to_send());
}
@@ -1872,7 +1887,7 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(
channel1_->SetLocalContent(&local_media_content1_, CA_OFFER, NULL));
EXPECT_EQ(media_channel1_->max_bps(), -1);
- VerifyMaxBitrate(media_channel1_->GetRtpParameters(kSsrc1), -1);
+ VerifyMaxBitrate(media_channel1_->GetRtpSendParameters(kSsrc1), -1);
}
void CanChangeMaxBitrate() {
@@ -1880,25 +1895,48 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(
channel1_->SetLocalContent(&local_media_content1_, CA_OFFER, NULL));
- EXPECT_TRUE(
- channel1_->SetRtpParameters(kSsrc1, BitrateLimitedParameters(1000)));
- VerifyMaxBitrate(channel1_->GetRtpParameters(kSsrc1), 1000);
- VerifyMaxBitrate(media_channel1_->GetRtpParameters(kSsrc1), 1000);
+ EXPECT_TRUE(channel1_->SetRtpSendParameters(
+ kSsrc1, BitrateLimitedParameters(1000)));
+ VerifyMaxBitrate(channel1_->GetRtpSendParameters(kSsrc1), 1000);
+ VerifyMaxBitrate(media_channel1_->GetRtpSendParameters(kSsrc1), 1000);
EXPECT_EQ(-1, media_channel1_->max_bps());
EXPECT_TRUE(
- channel1_->SetRtpParameters(kSsrc1, BitrateLimitedParameters(-1)));
- VerifyMaxBitrate(channel1_->GetRtpParameters(kSsrc1), -1);
- VerifyMaxBitrate(media_channel1_->GetRtpParameters(kSsrc1), -1);
+ channel1_->SetRtpSendParameters(kSsrc1, BitrateLimitedParameters(-1)));
+ VerifyMaxBitrate(channel1_->GetRtpSendParameters(kSsrc1), -1);
+ VerifyMaxBitrate(media_channel1_->GetRtpSendParameters(kSsrc1), -1);
EXPECT_EQ(-1, media_channel1_->max_bps());
}
protected:
+ void WaitForThreads() { WaitForThreads(rtc::ArrayView<rtc::Thread*>()); }
+ static void ProcessThreadQueue(rtc::Thread* thread) {
+ RTC_DCHECK(thread->IsCurrent());
+ while (!thread->empty()) {
+ thread->ProcessMessages(0);
+ }
+ }
+ void WaitForThreads(rtc::ArrayView<rtc::Thread*> threads) {
+ // |threads| and current thread post packets to network thread.
+ for (rtc::Thread* thread : threads) {
+ thread->Invoke<void>([thread] { ProcessThreadQueue(thread); });
+ }
+ ProcessThreadQueue(rtc::Thread::Current());
+ // Network thread move them around and post back to worker = current thread.
+ if (!network_thread_->IsCurrent()) {
+ network_thread_->Invoke<void>(
+ [this] { ProcessThreadQueue(network_thread_); });
+ }
+ // Worker thread = current Thread process received messages.
+ ProcessThreadQueue(rtc::Thread::Current());
+ }
// TODO(pbos): Remove playout from all media channels and let renderers mute
// themselves.
const bool verify_playout_;
- cricket::FakeTransportController transport_controller1_;
- cricket::FakeTransportController transport_controller2_;
+ std::unique_ptr<rtc::Thread> network_thread_keeper_;
+ rtc::Thread* network_thread_;
+ std::unique_ptr<cricket::FakeTransportController> transport_controller1_;
+ std::unique_ptr<cricket::FakeTransportController> transport_controller2_;
cricket::FakeMediaEngine media_engine_;
// The media channels are owned by the voice channel objects below.
typename T::MediaChannel* media_channel1_;
@@ -1910,8 +1948,8 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
typename T::Content remote_media_content1_;
typename T::Content remote_media_content2_;
// The RTP and RTCP packets to send in the tests.
- std::string rtp_packet_;
- std::string rtcp_packet_;
+ rtc::Buffer rtp_packet_;
+ rtc::Buffer rtcp_packet_;
int media_info_callbacks1_;
int media_info_callbacks2_;
cricket::CandidatePairInterface* last_selected_candidate_pair_;
@@ -1954,29 +1992,33 @@ void ChannelTest<VoiceTraits>::AddLegacyStreamInContent(
audio->AddLegacyStream(ssrc);
}
-class VoiceChannelTest
- : public ChannelTest<VoiceTraits> {
+class VoiceChannelSingleThreadTest : public ChannelTest<VoiceTraits> {
+ public:
+ typedef ChannelTest<VoiceTraits> Base;
+ VoiceChannelSingleThreadTest()
+ : Base(true, kPcmuFrame, kRtcpReport, NetworkIsWorker::Yes) {}
+};
+
+class VoiceChannelDoubleThreadTest : public ChannelTest<VoiceTraits> {
public:
typedef ChannelTest<VoiceTraits> Base;
- VoiceChannelTest()
- : Base(true,
- kPcmuFrame,
- sizeof(kPcmuFrame),
- kRtcpReport,
- sizeof(kRtcpReport)) {}
+ VoiceChannelDoubleThreadTest()
+ : Base(true, kPcmuFrame, kRtcpReport, NetworkIsWorker::No) {}
};
// override to add NULL parameter
template <>
cricket::VideoChannel* ChannelTest<VideoTraits>::CreateChannel(
- rtc::Thread* thread,
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
cricket::MediaEngineInterface* engine,
cricket::FakeVideoMediaChannel* ch,
cricket::TransportController* transport_controller,
bool rtcp) {
- cricket::VideoChannel* channel = new cricket::VideoChannel(
- thread, ch, transport_controller, cricket::CN_VIDEO, rtcp);
- if (!channel->Init()) {
+ cricket::VideoChannel* channel =
+ new cricket::VideoChannel(worker_thread, network_thread, ch,
+ transport_controller, cricket::CN_VIDEO, rtcp);
+ if (!channel->Init_w(nullptr)) {
delete channel;
channel = NULL;
}
@@ -2026,67 +2068,72 @@ void ChannelTest<VideoTraits>::AddLegacyStreamInContent(
video->AddLegacyStream(ssrc);
}
-class VideoChannelTest
- : public ChannelTest<VideoTraits> {
+class VideoChannelSingleThreadTest : public ChannelTest<VideoTraits> {
public:
typedef ChannelTest<VideoTraits> Base;
- VideoChannelTest()
- : Base(false,
- kH264Packet,
- sizeof(kH264Packet),
- kRtcpReport,
- sizeof(kRtcpReport)) {}
+ VideoChannelSingleThreadTest()
+ : Base(false, kH264Packet, kRtcpReport, NetworkIsWorker::Yes) {}
};
+class VideoChannelDoubleThreadTest : public ChannelTest<VideoTraits> {
+ public:
+ typedef ChannelTest<VideoTraits> Base;
+ VideoChannelDoubleThreadTest()
+ : Base(false, kH264Packet, kRtcpReport, NetworkIsWorker::No) {}
+};
-// VoiceChannelTest
-TEST_F(VoiceChannelTest, TestInit) {
+// VoiceChannelSingleThreadTest
+TEST_F(VoiceChannelSingleThreadTest, TestInit) {
Base::TestInit();
EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
EXPECT_TRUE(media_channel1_->dtmf_info_queue().empty());
}
-TEST_F(VoiceChannelTest, TestSetContents) {
+TEST_F(VoiceChannelSingleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, TestSetContents) {
Base::TestSetContents();
}
-TEST_F(VoiceChannelTest, TestSetContentsNullOffer) {
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentsNullOffer) {
Base::TestSetContentsNullOffer();
}
-TEST_F(VoiceChannelTest, TestSetContentsRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentsRtcpMux) {
Base::TestSetContentsRtcpMux();
}
-TEST_F(VoiceChannelTest, TestSetContentsRtcpMuxWithPrAnswer) {
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) {
Base::TestSetContentsRtcpMux();
}
-TEST_F(VoiceChannelTest, TestSetRemoteContentUpdate) {
+TEST_F(VoiceChannelSingleThreadTest, TestSetRemoteContentUpdate) {
Base::TestSetRemoteContentUpdate();
}
-TEST_F(VoiceChannelTest, TestStreams) {
+TEST_F(VoiceChannelSingleThreadTest, TestStreams) {
Base::TestStreams();
}
-TEST_F(VoiceChannelTest, TestUpdateStreamsInLocalContent) {
+TEST_F(VoiceChannelSingleThreadTest, TestUpdateStreamsInLocalContent) {
Base::TestUpdateStreamsInLocalContent();
}
-TEST_F(VoiceChannelTest, TestUpdateRemoteStreamsInContent) {
+TEST_F(VoiceChannelSingleThreadTest, TestUpdateRemoteStreamsInContent) {
Base::TestUpdateStreamsInRemoteContent();
}
-TEST_F(VoiceChannelTest, TestChangeStreamParamsInContent) {
+TEST_F(VoiceChannelSingleThreadTest, TestChangeStreamParamsInContent) {
Base::TestChangeStreamParamsInContent();
}
-TEST_F(VoiceChannelTest, TestPlayoutAndSendingStates) {
+TEST_F(VoiceChannelSingleThreadTest, TestPlayoutAndSendingStates) {
Base::TestPlayoutAndSendingStates();
}
-TEST_F(VoiceChannelTest, TestMuteStream) {
+TEST_F(VoiceChannelSingleThreadTest, TestMuteStream) {
CreateChannels(0, 0);
// Test that we can Mute the default channel even though the sending SSRC
// is unknown.
@@ -2108,123 +2155,123 @@ TEST_F(VoiceChannelTest, TestMuteStream) {
EXPECT_FALSE(media_channel1_->IsStreamMuted(kSsrc1));
}
-TEST_F(VoiceChannelTest, TestMediaContentDirection) {
+TEST_F(VoiceChannelSingleThreadTest, TestMediaContentDirection) {
Base::TestMediaContentDirection();
}
-TEST_F(VoiceChannelTest, TestNetworkRouteChanges) {
+TEST_F(VoiceChannelSingleThreadTest, TestNetworkRouteChanges) {
Base::TestNetworkRouteChanges();
}
-TEST_F(VoiceChannelTest, TestCallSetup) {
+TEST_F(VoiceChannelSingleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
-TEST_F(VoiceChannelTest, TestCallTeardownRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, TestCallTeardownRtcpMux) {
Base::TestCallTeardownRtcpMux();
}
-TEST_F(VoiceChannelTest, SendRtpToRtp) {
+TEST_F(VoiceChannelSingleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
-TEST_F(VoiceChannelTest, SendNoRtcpToNoRtcp) {
+TEST_F(VoiceChannelSingleThreadTest, SendNoRtcpToNoRtcp) {
Base::SendNoRtcpToNoRtcp();
}
-TEST_F(VoiceChannelTest, SendNoRtcpToRtcp) {
+TEST_F(VoiceChannelSingleThreadTest, SendNoRtcpToRtcp) {
Base::SendNoRtcpToRtcp();
}
-TEST_F(VoiceChannelTest, SendRtcpToNoRtcp) {
+TEST_F(VoiceChannelSingleThreadTest, SendRtcpToNoRtcp) {
Base::SendRtcpToNoRtcp();
}
-TEST_F(VoiceChannelTest, SendRtcpToRtcp) {
+TEST_F(VoiceChannelSingleThreadTest, SendRtcpToRtcp) {
Base::SendRtcpToRtcp();
}
-TEST_F(VoiceChannelTest, SendRtcpMuxToRtcp) {
+TEST_F(VoiceChannelSingleThreadTest, SendRtcpMuxToRtcp) {
Base::SendRtcpMuxToRtcp();
}
-TEST_F(VoiceChannelTest, SendRtcpMuxToRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendRtcpMuxToRtcpMux) {
Base::SendRtcpMuxToRtcpMux();
}
-TEST_F(VoiceChannelTest, SendRequireRtcpMuxToRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendRequireRtcpMuxToRtcpMux) {
Base::SendRequireRtcpMuxToRtcpMux();
}
-TEST_F(VoiceChannelTest, SendRtcpMuxToRequireRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendRtcpMuxToRequireRtcpMux) {
Base::SendRtcpMuxToRequireRtcpMux();
}
-TEST_F(VoiceChannelTest, SendRequireRtcpMuxToRequireRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendRequireRtcpMuxToRequireRtcpMux) {
Base::SendRequireRtcpMuxToRequireRtcpMux();
}
-TEST_F(VoiceChannelTest, SendRequireRtcpMuxToNoRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendRequireRtcpMuxToNoRtcpMux) {
Base::SendRequireRtcpMuxToNoRtcpMux();
}
-TEST_F(VoiceChannelTest, SendEarlyRtcpMuxToRtcp) {
+TEST_F(VoiceChannelSingleThreadTest, SendEarlyRtcpMuxToRtcp) {
Base::SendEarlyRtcpMuxToRtcp();
}
-TEST_F(VoiceChannelTest, SendEarlyRtcpMuxToRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendEarlyRtcpMuxToRtcpMux) {
Base::SendEarlyRtcpMuxToRtcpMux();
}
-TEST_F(VoiceChannelTest, SendSrtpToSrtpRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendSrtpToSrtpRtcpMux) {
Base::SendSrtpToSrtp(RTCP_MUX, RTCP_MUX);
}
-TEST_F(VoiceChannelTest, SendSrtpToRtp) {
+TEST_F(VoiceChannelSingleThreadTest, SendSrtpToRtp) {
Base::SendSrtpToSrtp();
}
-TEST_F(VoiceChannelTest, SendSrtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendSrtcpMux) {
Base::SendSrtpToSrtp(RTCP_MUX, RTCP_MUX);
}
-TEST_F(VoiceChannelTest, SendDtlsSrtpToSrtp) {
+TEST_F(VoiceChannelSingleThreadTest, SendDtlsSrtpToSrtp) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
Base::SendSrtpToSrtp(DTLS, 0);
}
-TEST_F(VoiceChannelTest, SendDtlsSrtpToDtlsSrtp) {
+TEST_F(VoiceChannelSingleThreadTest, SendDtlsSrtpToDtlsSrtp) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
Base::SendSrtpToSrtp(DTLS, DTLS);
}
-TEST_F(VoiceChannelTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
Base::SendSrtpToSrtp(DTLS | RTCP_MUX, DTLS | RTCP_MUX);
}
-TEST_F(VoiceChannelTest, SendEarlyMediaUsingRtcpMuxSrtp) {
+TEST_F(VoiceChannelSingleThreadTest, SendEarlyMediaUsingRtcpMuxSrtp) {
Base::SendEarlyMediaUsingRtcpMuxSrtp();
}
-TEST_F(VoiceChannelTest, SendRtpToRtpOnThread) {
+TEST_F(VoiceChannelSingleThreadTest, SendRtpToRtpOnThread) {
Base::SendRtpToRtpOnThread();
}
-TEST_F(VoiceChannelTest, SendSrtpToSrtpOnThread) {
+TEST_F(VoiceChannelSingleThreadTest, SendSrtpToSrtpOnThread) {
Base::SendSrtpToSrtpOnThread();
}
-TEST_F(VoiceChannelTest, SendWithWritabilityLoss) {
+TEST_F(VoiceChannelSingleThreadTest, SendWithWritabilityLoss) {
Base::SendWithWritabilityLoss();
}
-TEST_F(VoiceChannelTest, TestMediaMonitor) {
+TEST_F(VoiceChannelSingleThreadTest, TestMediaMonitor) {
Base::TestMediaMonitor();
}
// Test that InsertDtmf properly forwards to the media channel.
-TEST_F(VoiceChannelTest, TestInsertDtmf) {
+TEST_F(VoiceChannelSingleThreadTest, TestInsertDtmf) {
CreateChannels(0, 0);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
@@ -2243,44 +2290,44 @@ TEST_F(VoiceChannelTest, TestInsertDtmf) {
3, 7, 120));
}
-TEST_F(VoiceChannelTest, TestSetContentFailure) {
+TEST_F(VoiceChannelSingleThreadTest, TestSetContentFailure) {
Base::TestSetContentFailure();
}
-TEST_F(VoiceChannelTest, TestSendTwoOffers) {
+TEST_F(VoiceChannelSingleThreadTest, TestSendTwoOffers) {
Base::TestSendTwoOffers();
}
-TEST_F(VoiceChannelTest, TestReceiveTwoOffers) {
+TEST_F(VoiceChannelSingleThreadTest, TestReceiveTwoOffers) {
Base::TestReceiveTwoOffers();
}
-TEST_F(VoiceChannelTest, TestSendPrAnswer) {
+TEST_F(VoiceChannelSingleThreadTest, TestSendPrAnswer) {
Base::TestSendPrAnswer();
}
-TEST_F(VoiceChannelTest, TestReceivePrAnswer) {
+TEST_F(VoiceChannelSingleThreadTest, TestReceivePrAnswer) {
Base::TestReceivePrAnswer();
}
-TEST_F(VoiceChannelTest, TestFlushRtcp) {
+TEST_F(VoiceChannelSingleThreadTest, TestFlushRtcp) {
Base::TestFlushRtcp();
}
-TEST_F(VoiceChannelTest, TestSrtpError) {
+TEST_F(VoiceChannelSingleThreadTest, TestSrtpError) {
Base::TestSrtpError(kAudioPts[0]);
}
-TEST_F(VoiceChannelTest, TestOnReadyToSend) {
+TEST_F(VoiceChannelSingleThreadTest, TestOnReadyToSend) {
Base::TestOnReadyToSend();
}
-TEST_F(VoiceChannelTest, TestOnReadyToSendWithRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, TestOnReadyToSendWithRtcpMux) {
Base::TestOnReadyToSendWithRtcpMux();
}
// Test that we can scale the output volume properly for 1:1 calls.
-TEST_F(VoiceChannelTest, TestScaleVolume1to1Call) {
+TEST_F(VoiceChannelSingleThreadTest, TestScaleVolume1to1Call) {
CreateChannels(RTCP, RTCP);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
@@ -2304,7 +2351,7 @@ TEST_F(VoiceChannelTest, TestScaleVolume1to1Call) {
}
// Test that we can scale the output volume properly for multiway calls.
-TEST_F(VoiceChannelTest, TestScaleVolumeMultiwayCall) {
+TEST_F(VoiceChannelSingleThreadTest, TestScaleVolumeMultiwayCall) {
CreateChannels(RTCP, RTCP);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
@@ -2341,88 +2388,654 @@ TEST_F(VoiceChannelTest, TestScaleVolumeMultiwayCall) {
EXPECT_DOUBLE_EQ(0.0, volume);
}
-TEST_F(VoiceChannelTest, SendBundleToBundle) {
+TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundle) {
Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, false);
}
-TEST_F(VoiceChannelTest, SendBundleToBundleSecure) {
+TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundleSecure) {
Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, true);
}
-TEST_F(VoiceChannelTest, SendBundleToBundleWithRtcpMux) {
+TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundleWithRtcpMux) {
Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, false);
}
-TEST_F(VoiceChannelTest, SendBundleToBundleWithRtcpMuxSecure) {
+TEST_F(VoiceChannelSingleThreadTest, SendBundleToBundleWithRtcpMuxSecure) {
Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, true);
}
-TEST_F(VoiceChannelTest, GetRtpParametersIsNotImplemented) {
- // These tests verify that the Get/SetRtpParameters methods for VoiceChannel
- // always fail as they are not implemented.
- // TODO(skvlad): Replace with full tests when support for bitrate limiting
- // for audio RtpSenders is added.
+TEST_F(VoiceChannelSingleThreadTest, DefaultMaxBitrateIsUnlimited) {
+ Base::DefaultMaxBitrateIsUnlimited();
+}
+
+TEST_F(VoiceChannelSingleThreadTest, CanChangeMaxBitrate) {
+ Base::CanChangeMaxBitrate();
+}
+
+// VoiceChannelDoubleThreadTest
+TEST_F(VoiceChannelDoubleThreadTest, TestInit) {
+ Base::TestInit();
+ EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
+ EXPECT_TRUE(media_channel1_->dtmf_info_queue().empty());
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContents) {
+ Base::TestSetContents();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsNullOffer) {
+ Base::TestSetContentsNullOffer();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsRtcpMux) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetRemoteContentUpdate) {
+ Base::TestSetRemoteContentUpdate();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestStreams) {
+ Base::TestStreams();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestUpdateStreamsInLocalContent) {
+ Base::TestUpdateStreamsInLocalContent();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestUpdateRemoteStreamsInContent) {
+ Base::TestUpdateStreamsInRemoteContent();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestChangeStreamParamsInContent) {
+ Base::TestChangeStreamParamsInContent();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestPlayoutAndSendingStates) {
+ Base::TestPlayoutAndSendingStates();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestMuteStream) {
CreateChannels(0, 0);
- EXPECT_TRUE(
- channel1_->SetLocalContent(&local_media_content1_, CA_OFFER, NULL));
- webrtc::RtpParameters voice_parameters = channel1_->GetRtpParameters(kSsrc1);
- EXPECT_EQ(0UL, voice_parameters.encodings.size());
+ // Test that we can Mute the default channel even though the sending SSRC
+ // is unknown.
+ EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
+ EXPECT_TRUE(channel1_->SetAudioSend(0, false, nullptr, nullptr));
+ EXPECT_TRUE(media_channel1_->IsStreamMuted(0));
+ EXPECT_TRUE(channel1_->SetAudioSend(0, true, nullptr, nullptr));
+ EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
+
+ // Test that we can not mute an unknown SSRC.
+ EXPECT_FALSE(channel1_->SetAudioSend(kSsrc1, false, nullptr, nullptr));
+
+ SendInitiate();
+ // After the local session description has been set, we can mute a stream
+ // with its SSRC.
+ EXPECT_TRUE(channel1_->SetAudioSend(kSsrc1, false, nullptr, nullptr));
+ EXPECT_TRUE(media_channel1_->IsStreamMuted(kSsrc1));
+ EXPECT_TRUE(channel1_->SetAudioSend(kSsrc1, true, nullptr, nullptr));
+ EXPECT_FALSE(media_channel1_->IsStreamMuted(kSsrc1));
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestMediaContentDirection) {
+ Base::TestMediaContentDirection();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestNetworkRouteChanges) {
+ Base::TestNetworkRouteChanges();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestCallSetup) {
+ Base::TestCallSetup();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestCallTeardownRtcpMux) {
+ Base::TestCallTeardownRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRtpToRtp) {
+ Base::SendRtpToRtp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendNoRtcpToNoRtcp) {
+ Base::SendNoRtcpToNoRtcp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendNoRtcpToRtcp) {
+ Base::SendNoRtcpToRtcp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRtcpToNoRtcp) {
+ Base::SendRtcpToNoRtcp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRtcpToRtcp) {
+ Base::SendRtcpToRtcp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRtcpMuxToRtcp) {
+ Base::SendRtcpMuxToRtcp();
}
-TEST_F(VoiceChannelTest, SetRtpParametersIsNotImplemented) {
+TEST_F(VoiceChannelDoubleThreadTest, SendRtcpMuxToRtcpMux) {
+ Base::SendRtcpMuxToRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRequireRtcpMuxToRtcpMux) {
+ Base::SendRequireRtcpMuxToRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRtcpMuxToRequireRtcpMux) {
+ Base::SendRtcpMuxToRequireRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRequireRtcpMuxToRequireRtcpMux) {
+ Base::SendRequireRtcpMuxToRequireRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRequireRtcpMuxToNoRtcpMux) {
+ Base::SendRequireRtcpMuxToNoRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendEarlyRtcpMuxToRtcp) {
+ Base::SendEarlyRtcpMuxToRtcp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendEarlyRtcpMuxToRtcpMux) {
+ Base::SendEarlyRtcpMuxToRtcpMux();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendSrtpToSrtpRtcpMux) {
+ Base::SendSrtpToSrtp(RTCP_MUX, RTCP_MUX);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendSrtpToRtp) {
+ Base::SendSrtpToSrtp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendSrtcpMux) {
+ Base::SendSrtpToSrtp(RTCP_MUX, RTCP_MUX);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendDtlsSrtpToSrtp) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ Base::SendSrtpToSrtp(DTLS, 0);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendDtlsSrtpToDtlsSrtp) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ Base::SendSrtpToSrtp(DTLS, DTLS);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ Base::SendSrtpToSrtp(DTLS | RTCP_MUX, DTLS | RTCP_MUX);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendEarlyMediaUsingRtcpMuxSrtp) {
+ Base::SendEarlyMediaUsingRtcpMuxSrtp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendRtpToRtpOnThread) {
+ Base::SendRtpToRtpOnThread();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendSrtpToSrtpOnThread) {
+ Base::SendSrtpToSrtpOnThread();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendWithWritabilityLoss) {
+ Base::SendWithWritabilityLoss();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestMediaMonitor) {
+ Base::TestMediaMonitor();
+}
+
+// Test that InsertDtmf properly forwards to the media channel.
+TEST_F(VoiceChannelDoubleThreadTest, TestInsertDtmf) {
CreateChannels(0, 0);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+ EXPECT_EQ(0U, media_channel1_->dtmf_info_queue().size());
+
+ EXPECT_TRUE(channel1_->InsertDtmf(1, 3, 100));
+ EXPECT_TRUE(channel1_->InsertDtmf(2, 5, 110));
+ EXPECT_TRUE(channel1_->InsertDtmf(3, 7, 120));
+
+ ASSERT_EQ(3U, media_channel1_->dtmf_info_queue().size());
+ EXPECT_TRUE(
+ CompareDtmfInfo(media_channel1_->dtmf_info_queue()[0], 1, 3, 100));
EXPECT_TRUE(
- channel1_->SetLocalContent(&local_media_content1_, CA_OFFER, NULL));
- EXPECT_FALSE(
- channel1_->SetRtpParameters(kSsrc1, BitrateLimitedParameters(1000)));
+ CompareDtmfInfo(media_channel1_->dtmf_info_queue()[1], 2, 5, 110));
+ EXPECT_TRUE(
+ CompareDtmfInfo(media_channel1_->dtmf_info_queue()[2], 3, 7, 120));
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSetContentFailure) {
+ Base::TestSetContentFailure();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSendTwoOffers) {
+ Base::TestSendTwoOffers();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestReceiveTwoOffers) {
+ Base::TestReceiveTwoOffers();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSendPrAnswer) {
+ Base::TestSendPrAnswer();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestReceivePrAnswer) {
+ Base::TestReceivePrAnswer();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestFlushRtcp) {
+ Base::TestFlushRtcp();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestSrtpError) {
+ Base::TestSrtpError(kAudioPts[0]);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestOnReadyToSend) {
+ Base::TestOnReadyToSend();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, TestOnReadyToSendWithRtcpMux) {
+ Base::TestOnReadyToSendWithRtcpMux();
+}
+
+// Test that we can scale the output volume properly for 1:1 calls.
+TEST_F(VoiceChannelDoubleThreadTest, TestScaleVolume1to1Call) {
+ CreateChannels(RTCP, RTCP);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+ double volume;
+
+ // Default is (1.0).
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
+ EXPECT_DOUBLE_EQ(1.0, volume);
+ // invalid ssrc.
+ EXPECT_FALSE(media_channel1_->GetOutputVolume(3, &volume));
+
+ // Set scale to (1.5).
+ EXPECT_TRUE(channel1_->SetOutputVolume(0, 1.5));
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
+ EXPECT_DOUBLE_EQ(1.5, volume);
+
+ // Set scale to (0).
+ EXPECT_TRUE(channel1_->SetOutputVolume(0, 0.0));
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
+ EXPECT_DOUBLE_EQ(0.0, volume);
+}
+
+// Test that we can scale the output volume properly for multiway calls.
+TEST_F(VoiceChannelDoubleThreadTest, TestScaleVolumeMultiwayCall) {
+ CreateChannels(RTCP, RTCP);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+ EXPECT_TRUE(AddStream1(1));
+ EXPECT_TRUE(AddStream1(2));
+
+ double volume;
+ // Default is (1.0).
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
+ EXPECT_DOUBLE_EQ(1.0, volume);
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
+ EXPECT_DOUBLE_EQ(1.0, volume);
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
+ EXPECT_DOUBLE_EQ(1.0, volume);
+ // invalid ssrc.
+ EXPECT_FALSE(media_channel1_->GetOutputVolume(3, &volume));
+
+ // Set scale to (1.5) for ssrc = 1.
+ EXPECT_TRUE(channel1_->SetOutputVolume(1, 1.5));
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
+ EXPECT_DOUBLE_EQ(1.5, volume);
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
+ EXPECT_DOUBLE_EQ(1.0, volume);
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
+ EXPECT_DOUBLE_EQ(1.0, volume);
+
+ // Set scale to (0) for all ssrcs.
+ EXPECT_TRUE(channel1_->SetOutputVolume(0, 0.0));
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(0, &volume));
+ EXPECT_DOUBLE_EQ(0.0, volume);
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(1, &volume));
+ EXPECT_DOUBLE_EQ(0.0, volume);
+ EXPECT_TRUE(media_channel1_->GetOutputVolume(2, &volume));
+ EXPECT_DOUBLE_EQ(0.0, volume);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundle) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, false);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundleSecure) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), false, true);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundleWithRtcpMux) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, false);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, SendBundleToBundleWithRtcpMuxSecure) {
+ Base::SendBundleToBundle(kAudioPts, arraysize(kAudioPts), true, true);
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, DefaultMaxBitrateIsUnlimited) {
+ Base::DefaultMaxBitrateIsUnlimited();
+}
+
+TEST_F(VoiceChannelDoubleThreadTest, CanChangeMaxBitrate) {
+ Base::CanChangeMaxBitrate();
+}
+
+// VideoChannelSingleThreadTest
+TEST_F(VideoChannelSingleThreadTest, TestInit) {
+ Base::TestInit();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContents) {
+ Base::TestSetContents();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentsNullOffer) {
+ Base::TestSetContentsNullOffer();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentsRtcpMux) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetRemoteContentUpdate) {
+ Base::TestSetRemoteContentUpdate();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestStreams) {
+ Base::TestStreams();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestUpdateStreamsInLocalContent) {
+ Base::TestUpdateStreamsInLocalContent();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestUpdateRemoteStreamsInContent) {
+ Base::TestUpdateStreamsInRemoteContent();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestChangeStreamParamsInContent) {
+ Base::TestChangeStreamParamsInContent();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestPlayoutAndSendingStates) {
+ Base::TestPlayoutAndSendingStates();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestMuteStream) {
+ CreateChannels(0, 0);
+ // Test that we can Mute the default channel even though the sending SSRC
+ // is unknown.
+ EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
+ EXPECT_TRUE(channel1_->SetVideoSend(0, false, nullptr));
+ EXPECT_TRUE(media_channel1_->IsStreamMuted(0));
+ EXPECT_TRUE(channel1_->SetVideoSend(0, true, nullptr));
+ EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
+ // Test that we can not mute an unknown SSRC.
+ EXPECT_FALSE(channel1_->SetVideoSend(kSsrc1, false, nullptr));
+ SendInitiate();
+ // After the local session description has been set, we can mute a stream
+ // with its SSRC.
+ EXPECT_TRUE(channel1_->SetVideoSend(kSsrc1, false, nullptr));
+ EXPECT_TRUE(media_channel1_->IsStreamMuted(kSsrc1));
+ EXPECT_TRUE(channel1_->SetVideoSend(kSsrc1, true, nullptr));
+ EXPECT_FALSE(media_channel1_->IsStreamMuted(kSsrc1));
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestMediaContentDirection) {
+ Base::TestMediaContentDirection();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestNetworkRouteChanges) {
+ Base::TestNetworkRouteChanges();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestCallSetup) {
+ Base::TestCallSetup();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestCallTeardownRtcpMux) {
+ Base::TestCallTeardownRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtpToRtp) {
+ Base::SendRtpToRtp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendNoRtcpToNoRtcp) {
+ Base::SendNoRtcpToNoRtcp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendNoRtcpToRtcp) {
+ Base::SendNoRtcpToRtcp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtcpToNoRtcp) {
+ Base::SendRtcpToNoRtcp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtcpToRtcp) {
+ Base::SendRtcpToRtcp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtcpMuxToRtcp) {
+ Base::SendRtcpMuxToRtcp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtcpMuxToRtcpMux) {
+ Base::SendRtcpMuxToRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRequireRtcpMuxToRtcpMux) {
+ Base::SendRequireRtcpMuxToRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtcpMuxToRequireRtcpMux) {
+ Base::SendRtcpMuxToRequireRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRequireRtcpMuxToRequireRtcpMux) {
+ Base::SendRequireRtcpMuxToRequireRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRequireRtcpMuxToNoRtcpMux) {
+ Base::SendRequireRtcpMuxToNoRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendEarlyRtcpMuxToRtcp) {
+ Base::SendEarlyRtcpMuxToRtcp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendEarlyRtcpMuxToRtcpMux) {
+ Base::SendEarlyRtcpMuxToRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendSrtpToSrtp) {
+ Base::SendSrtpToSrtp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendSrtpToRtp) {
+ Base::SendSrtpToSrtp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendDtlsSrtpToSrtp) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ Base::SendSrtpToSrtp(DTLS, 0);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendDtlsSrtpToDtlsSrtp) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ Base::SendSrtpToSrtp(DTLS, DTLS);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
+ MAYBE_SKIP_TEST(HaveDtlsSrtp);
+ Base::SendSrtpToSrtp(DTLS | RTCP_MUX, DTLS | RTCP_MUX);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendSrtcpMux) {
+ Base::SendSrtpToSrtp(RTCP_MUX, RTCP_MUX);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendEarlyMediaUsingRtcpMuxSrtp) {
+ Base::SendEarlyMediaUsingRtcpMuxSrtp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendRtpToRtpOnThread) {
+ Base::SendRtpToRtpOnThread();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendSrtpToSrtpOnThread) {
+ Base::SendSrtpToSrtpOnThread();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendWithWritabilityLoss) {
+ Base::SendWithWritabilityLoss();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestMediaMonitor) {
+ Base::TestMediaMonitor();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSetContentFailure) {
+ Base::TestSetContentFailure();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSendTwoOffers) {
+ Base::TestSendTwoOffers();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestReceiveTwoOffers) {
+ Base::TestReceiveTwoOffers();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSendPrAnswer) {
+ Base::TestSendPrAnswer();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestReceivePrAnswer) {
+ Base::TestReceivePrAnswer();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestFlushRtcp) {
+ Base::TestFlushRtcp();
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendBundleToBundle) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, false);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendBundleToBundleSecure) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, true);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendBundleToBundleWithRtcpMux) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, false);
+}
+
+TEST_F(VideoChannelSingleThreadTest, SendBundleToBundleWithRtcpMuxSecure) {
+ Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, true);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestSrtpError) {
+ Base::TestSrtpError(kVideoPts[0]);
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestOnReadyToSend) {
+ Base::TestOnReadyToSend();
+}
+
+TEST_F(VideoChannelSingleThreadTest, TestOnReadyToSendWithRtcpMux) {
+ Base::TestOnReadyToSendWithRtcpMux();
+}
+
+TEST_F(VideoChannelSingleThreadTest, DefaultMaxBitrateIsUnlimited) {
+ Base::DefaultMaxBitrateIsUnlimited();
+}
+
+TEST_F(VideoChannelSingleThreadTest, CanChangeMaxBitrate) {
+ Base::CanChangeMaxBitrate();
}
-// VideoChannelTest
-TEST_F(VideoChannelTest, TestInit) {
+// VideoChannelDoubleThreadTest
+TEST_F(VideoChannelDoubleThreadTest, TestInit) {
Base::TestInit();
}
-TEST_F(VideoChannelTest, TestSetContents) {
+TEST_F(VideoChannelDoubleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(VideoChannelDoubleThreadTest, TestSetContents) {
Base::TestSetContents();
}
-TEST_F(VideoChannelTest, TestSetContentsNullOffer) {
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentsNullOffer) {
Base::TestSetContentsNullOffer();
}
-TEST_F(VideoChannelTest, TestSetContentsRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentsRtcpMux) {
Base::TestSetContentsRtcpMux();
}
-TEST_F(VideoChannelTest, TestSetContentsRtcpMuxWithPrAnswer) {
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) {
Base::TestSetContentsRtcpMux();
}
-TEST_F(VideoChannelTest, TestSetRemoteContentUpdate) {
+TEST_F(VideoChannelDoubleThreadTest, TestSetRemoteContentUpdate) {
Base::TestSetRemoteContentUpdate();
}
-TEST_F(VideoChannelTest, TestStreams) {
+TEST_F(VideoChannelDoubleThreadTest, TestStreams) {
Base::TestStreams();
}
-TEST_F(VideoChannelTest, TestUpdateStreamsInLocalContent) {
+TEST_F(VideoChannelDoubleThreadTest, TestUpdateStreamsInLocalContent) {
Base::TestUpdateStreamsInLocalContent();
}
-TEST_F(VideoChannelTest, TestUpdateRemoteStreamsInContent) {
+TEST_F(VideoChannelDoubleThreadTest, TestUpdateRemoteStreamsInContent) {
Base::TestUpdateStreamsInRemoteContent();
}
-TEST_F(VideoChannelTest, TestChangeStreamParamsInContent) {
+TEST_F(VideoChannelDoubleThreadTest, TestChangeStreamParamsInContent) {
Base::TestChangeStreamParamsInContent();
}
-TEST_F(VideoChannelTest, TestPlayoutAndSendingStates) {
+TEST_F(VideoChannelDoubleThreadTest, TestPlayoutAndSendingStates) {
Base::TestPlayoutAndSendingStates();
}
-TEST_F(VideoChannelTest, TestMuteStream) {
+TEST_F(VideoChannelDoubleThreadTest, TestMuteStream) {
CreateChannels(0, 0);
// Test that we can Mute the default channel even though the sending SSRC
// is unknown.
@@ -2442,214 +3055,217 @@ TEST_F(VideoChannelTest, TestMuteStream) {
EXPECT_FALSE(media_channel1_->IsStreamMuted(kSsrc1));
}
-TEST_F(VideoChannelTest, TestMediaContentDirection) {
+TEST_F(VideoChannelDoubleThreadTest, TestMediaContentDirection) {
Base::TestMediaContentDirection();
}
-TEST_F(VideoChannelTest, TestNetworkRouteChanges) {
+TEST_F(VideoChannelDoubleThreadTest, TestNetworkRouteChanges) {
Base::TestNetworkRouteChanges();
}
-TEST_F(VideoChannelTest, TestCallSetup) {
+TEST_F(VideoChannelDoubleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
-TEST_F(VideoChannelTest, TestCallTeardownRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, TestCallTeardownRtcpMux) {
Base::TestCallTeardownRtcpMux();
}
-TEST_F(VideoChannelTest, SendRtpToRtp) {
+TEST_F(VideoChannelDoubleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
-TEST_F(VideoChannelTest, SendNoRtcpToNoRtcp) {
+TEST_F(VideoChannelDoubleThreadTest, SendNoRtcpToNoRtcp) {
Base::SendNoRtcpToNoRtcp();
}
-TEST_F(VideoChannelTest, SendNoRtcpToRtcp) {
+TEST_F(VideoChannelDoubleThreadTest, SendNoRtcpToRtcp) {
Base::SendNoRtcpToRtcp();
}
-TEST_F(VideoChannelTest, SendRtcpToNoRtcp) {
+TEST_F(VideoChannelDoubleThreadTest, SendRtcpToNoRtcp) {
Base::SendRtcpToNoRtcp();
}
-TEST_F(VideoChannelTest, SendRtcpToRtcp) {
+TEST_F(VideoChannelDoubleThreadTest, SendRtcpToRtcp) {
Base::SendRtcpToRtcp();
}
-TEST_F(VideoChannelTest, SendRtcpMuxToRtcp) {
+TEST_F(VideoChannelDoubleThreadTest, SendRtcpMuxToRtcp) {
Base::SendRtcpMuxToRtcp();
}
-TEST_F(VideoChannelTest, SendRtcpMuxToRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendRtcpMuxToRtcpMux) {
Base::SendRtcpMuxToRtcpMux();
}
-TEST_F(VideoChannelTest, SendRequireRtcpMuxToRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendRequireRtcpMuxToRtcpMux) {
Base::SendRequireRtcpMuxToRtcpMux();
}
-TEST_F(VideoChannelTest, SendRtcpMuxToRequireRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendRtcpMuxToRequireRtcpMux) {
Base::SendRtcpMuxToRequireRtcpMux();
}
-TEST_F(VideoChannelTest, SendRequireRtcpMuxToRequireRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendRequireRtcpMuxToRequireRtcpMux) {
Base::SendRequireRtcpMuxToRequireRtcpMux();
}
-TEST_F(VideoChannelTest, SendRequireRtcpMuxToNoRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendRequireRtcpMuxToNoRtcpMux) {
Base::SendRequireRtcpMuxToNoRtcpMux();
}
-TEST_F(VideoChannelTest, SendEarlyRtcpMuxToRtcp) {
+TEST_F(VideoChannelDoubleThreadTest, SendEarlyRtcpMuxToRtcp) {
Base::SendEarlyRtcpMuxToRtcp();
}
-TEST_F(VideoChannelTest, SendEarlyRtcpMuxToRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendEarlyRtcpMuxToRtcpMux) {
Base::SendEarlyRtcpMuxToRtcpMux();
}
-TEST_F(VideoChannelTest, SendSrtpToSrtp) {
+TEST_F(VideoChannelDoubleThreadTest, SendSrtpToSrtp) {
Base::SendSrtpToSrtp();
}
-TEST_F(VideoChannelTest, SendSrtpToRtp) {
+TEST_F(VideoChannelDoubleThreadTest, SendSrtpToRtp) {
Base::SendSrtpToSrtp();
}
-TEST_F(VideoChannelTest, SendDtlsSrtpToSrtp) {
+TEST_F(VideoChannelDoubleThreadTest, SendDtlsSrtpToSrtp) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
Base::SendSrtpToSrtp(DTLS, 0);
}
-TEST_F(VideoChannelTest, SendDtlsSrtpToDtlsSrtp) {
+TEST_F(VideoChannelDoubleThreadTest, SendDtlsSrtpToDtlsSrtp) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
Base::SendSrtpToSrtp(DTLS, DTLS);
}
-TEST_F(VideoChannelTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendDtlsSrtpToDtlsSrtpRtcpMux) {
MAYBE_SKIP_TEST(HaveDtlsSrtp);
Base::SendSrtpToSrtp(DTLS | RTCP_MUX, DTLS | RTCP_MUX);
}
-TEST_F(VideoChannelTest, SendSrtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendSrtcpMux) {
Base::SendSrtpToSrtp(RTCP_MUX, RTCP_MUX);
}
-TEST_F(VideoChannelTest, SendEarlyMediaUsingRtcpMuxSrtp) {
+TEST_F(VideoChannelDoubleThreadTest, SendEarlyMediaUsingRtcpMuxSrtp) {
Base::SendEarlyMediaUsingRtcpMuxSrtp();
}
-TEST_F(VideoChannelTest, SendRtpToRtpOnThread) {
+TEST_F(VideoChannelDoubleThreadTest, SendRtpToRtpOnThread) {
Base::SendRtpToRtpOnThread();
}
-TEST_F(VideoChannelTest, SendSrtpToSrtpOnThread) {
+TEST_F(VideoChannelDoubleThreadTest, SendSrtpToSrtpOnThread) {
Base::SendSrtpToSrtpOnThread();
}
-TEST_F(VideoChannelTest, SendWithWritabilityLoss) {
+TEST_F(VideoChannelDoubleThreadTest, SendWithWritabilityLoss) {
Base::SendWithWritabilityLoss();
}
-TEST_F(VideoChannelTest, TestMediaMonitor) {
+TEST_F(VideoChannelDoubleThreadTest, TestMediaMonitor) {
Base::TestMediaMonitor();
}
-TEST_F(VideoChannelTest, TestSetContentFailure) {
+TEST_F(VideoChannelDoubleThreadTest, TestSetContentFailure) {
Base::TestSetContentFailure();
}
-TEST_F(VideoChannelTest, TestSendTwoOffers) {
+TEST_F(VideoChannelDoubleThreadTest, TestSendTwoOffers) {
Base::TestSendTwoOffers();
}
-TEST_F(VideoChannelTest, TestReceiveTwoOffers) {
+TEST_F(VideoChannelDoubleThreadTest, TestReceiveTwoOffers) {
Base::TestReceiveTwoOffers();
}
-TEST_F(VideoChannelTest, TestSendPrAnswer) {
+TEST_F(VideoChannelDoubleThreadTest, TestSendPrAnswer) {
Base::TestSendPrAnswer();
}
-TEST_F(VideoChannelTest, TestReceivePrAnswer) {
+TEST_F(VideoChannelDoubleThreadTest, TestReceivePrAnswer) {
Base::TestReceivePrAnswer();
}
-TEST_F(VideoChannelTest, TestFlushRtcp) {
+TEST_F(VideoChannelDoubleThreadTest, TestFlushRtcp) {
Base::TestFlushRtcp();
}
-TEST_F(VideoChannelTest, SendBundleToBundle) {
+TEST_F(VideoChannelDoubleThreadTest, SendBundleToBundle) {
Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, false);
}
-TEST_F(VideoChannelTest, SendBundleToBundleSecure) {
+TEST_F(VideoChannelDoubleThreadTest, SendBundleToBundleSecure) {
Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), false, true);
}
-TEST_F(VideoChannelTest, SendBundleToBundleWithRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, SendBundleToBundleWithRtcpMux) {
Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, false);
}
-TEST_F(VideoChannelTest, SendBundleToBundleWithRtcpMuxSecure) {
+TEST_F(VideoChannelDoubleThreadTest, SendBundleToBundleWithRtcpMuxSecure) {
Base::SendBundleToBundle(kVideoPts, arraysize(kVideoPts), true, true);
}
-TEST_F(VideoChannelTest, TestSrtpError) {
+TEST_F(VideoChannelDoubleThreadTest, TestSrtpError) {
Base::TestSrtpError(kVideoPts[0]);
}
-TEST_F(VideoChannelTest, TestOnReadyToSend) {
+TEST_F(VideoChannelDoubleThreadTest, TestOnReadyToSend) {
Base::TestOnReadyToSend();
}
-TEST_F(VideoChannelTest, TestOnReadyToSendWithRtcpMux) {
+TEST_F(VideoChannelDoubleThreadTest, TestOnReadyToSendWithRtcpMux) {
Base::TestOnReadyToSendWithRtcpMux();
}
-TEST_F(VideoChannelTest, DefaultMaxBitrateIsUnlimited) {
+TEST_F(VideoChannelDoubleThreadTest, DefaultMaxBitrateIsUnlimited) {
Base::DefaultMaxBitrateIsUnlimited();
}
-TEST_F(VideoChannelTest, CanChangeMaxBitrate) {
+TEST_F(VideoChannelDoubleThreadTest, CanChangeMaxBitrate) {
Base::CanChangeMaxBitrate();
}
-// DataChannelTest
+// DataChannelSingleThreadTest
+class DataChannelSingleThreadTest : public ChannelTest<DataTraits> {
+ public:
+ typedef ChannelTest<DataTraits> Base;
+ DataChannelSingleThreadTest()
+ : Base(true, kDataPacket, kRtcpReport, NetworkIsWorker::Yes) {}
+};
-class DataChannelTest
- : public ChannelTest<DataTraits> {
+// DataChannelDoubleThreadTest
+class DataChannelDoubleThreadTest : public ChannelTest<DataTraits> {
public:
- typedef ChannelTest<DataTraits>
- Base;
- DataChannelTest()
- : Base(true,
- kDataPacket,
- sizeof(kDataPacket),
- kRtcpReport,
- sizeof(kRtcpReport)) {}
+ typedef ChannelTest<DataTraits> Base;
+ DataChannelDoubleThreadTest()
+ : Base(true, kDataPacket, kRtcpReport, NetworkIsWorker::No) {}
};
// Override to avoid engine channel parameter.
template <>
cricket::DataChannel* ChannelTest<DataTraits>::CreateChannel(
- rtc::Thread* thread,
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread,
cricket::MediaEngineInterface* engine,
cricket::FakeDataMediaChannel* ch,
cricket::TransportController* transport_controller,
bool rtcp) {
- cricket::DataChannel* channel = new cricket::DataChannel(
- thread, ch, transport_controller, cricket::CN_DATA, rtcp);
- if (!channel->Init()) {
+ cricket::DataChannel* channel =
+ new cricket::DataChannel(worker_thread, network_thread, ch,
+ transport_controller, cricket::CN_DATA, rtcp);
+ if (!channel->Init_w(nullptr)) {
delete channel;
channel = NULL;
}
return channel;
}
-template<>
+template <>
void ChannelTest<DataTraits>::CreateContent(
int flags,
const cricket::AudioCodec& audio_codec,
@@ -2664,14 +3280,14 @@ void ChannelTest<DataTraits>::CreateContent(
}
}
-template<>
+template <>
void ChannelTest<DataTraits>::CopyContent(
const cricket::DataContentDescription& source,
cricket::DataContentDescription* data) {
*data = source;
}
-template<>
+template <>
bool ChannelTest<DataTraits>::CodecMatches(const cricket::DataCodec& c1,
const cricket::DataCodec& c2) {
return c1.name == c2.name;
@@ -2685,132 +3301,280 @@ void ChannelTest<DataTraits>::AddLegacyStreamInContent(
data->AddLegacyStream(ssrc);
}
-TEST_F(DataChannelTest, TestInit) {
+TEST_F(DataChannelSingleThreadTest, TestInit) {
+ Base::TestInit();
+ EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
+}
+
+TEST_F(DataChannelSingleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestSetContents) {
+ Base::TestSetContents();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestSetContentsNullOffer) {
+ Base::TestSetContentsNullOffer();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestSetContentsRtcpMux) {
+ Base::TestSetContentsRtcpMux();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestSetRemoteContentUpdate) {
+ Base::TestSetRemoteContentUpdate();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestStreams) {
+ Base::TestStreams();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestUpdateStreamsInLocalContent) {
+ Base::TestUpdateStreamsInLocalContent();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestUpdateRemoteStreamsInContent) {
+ Base::TestUpdateStreamsInRemoteContent();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestChangeStreamParamsInContent) {
+ Base::TestChangeStreamParamsInContent();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestPlayoutAndSendingStates) {
+ Base::TestPlayoutAndSendingStates();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestMediaContentDirection) {
+ Base::TestMediaContentDirection();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestCallSetup) {
+ Base::TestCallSetup();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestCallTeardownRtcpMux) {
+ Base::TestCallTeardownRtcpMux();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestOnReadyToSend) {
+ Base::TestOnReadyToSend();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestOnReadyToSendWithRtcpMux) {
+ Base::TestOnReadyToSendWithRtcpMux();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendRtpToRtp) {
+ Base::SendRtpToRtp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendNoRtcpToNoRtcp) {
+ Base::SendNoRtcpToNoRtcp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendNoRtcpToRtcp) {
+ Base::SendNoRtcpToRtcp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendRtcpToNoRtcp) {
+ Base::SendRtcpToNoRtcp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendRtcpToRtcp) {
+ Base::SendRtcpToRtcp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendRtcpMuxToRtcp) {
+ Base::SendRtcpMuxToRtcp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendRtcpMuxToRtcpMux) {
+ Base::SendRtcpMuxToRtcpMux();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendEarlyRtcpMuxToRtcp) {
+ Base::SendEarlyRtcpMuxToRtcp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendEarlyRtcpMuxToRtcpMux) {
+ Base::SendEarlyRtcpMuxToRtcpMux();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendSrtpToSrtp) {
+ Base::SendSrtpToSrtp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendSrtpToRtp) {
+ Base::SendSrtpToSrtp();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendSrtcpMux) {
+ Base::SendSrtpToSrtp(RTCP_MUX, RTCP_MUX);
+}
+
+TEST_F(DataChannelSingleThreadTest, SendRtpToRtpOnThread) {
+ Base::SendRtpToRtpOnThread();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendSrtpToSrtpOnThread) {
+ Base::SendSrtpToSrtpOnThread();
+}
+
+TEST_F(DataChannelSingleThreadTest, SendWithWritabilityLoss) {
+ Base::SendWithWritabilityLoss();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestMediaMonitor) {
+ Base::TestMediaMonitor();
+}
+
+TEST_F(DataChannelSingleThreadTest, TestSendData) {
+ CreateChannels(0, 0);
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_TRUE(SendAccept());
+
+ cricket::SendDataParams params;
+ params.ssrc = 42;
+ unsigned char data[] = {'f', 'o', 'o'};
+ rtc::CopyOnWriteBuffer payload(data, 3);
+ cricket::SendDataResult result;
+ ASSERT_TRUE(media_channel1_->SendData(params, payload, &result));
+ EXPECT_EQ(params.ssrc, media_channel1_->last_sent_data_params().ssrc);
+ EXPECT_EQ("foo", media_channel1_->last_sent_data());
+}
+
+TEST_F(DataChannelDoubleThreadTest, TestInit) {
Base::TestInit();
EXPECT_FALSE(media_channel1_->IsStreamMuted(0));
}
-TEST_F(DataChannelTest, TestSetContents) {
+TEST_F(DataChannelDoubleThreadTest, TestDeinit) {
+ Base::TestDeinit();
+}
+
+TEST_F(DataChannelDoubleThreadTest, TestSetContents) {
Base::TestSetContents();
}
-TEST_F(DataChannelTest, TestSetContentsNullOffer) {
+TEST_F(DataChannelDoubleThreadTest, TestSetContentsNullOffer) {
Base::TestSetContentsNullOffer();
}
-TEST_F(DataChannelTest, TestSetContentsRtcpMux) {
+TEST_F(DataChannelDoubleThreadTest, TestSetContentsRtcpMux) {
Base::TestSetContentsRtcpMux();
}
-TEST_F(DataChannelTest, TestSetRemoteContentUpdate) {
+TEST_F(DataChannelDoubleThreadTest, TestSetRemoteContentUpdate) {
Base::TestSetRemoteContentUpdate();
}
-TEST_F(DataChannelTest, TestStreams) {
+TEST_F(DataChannelDoubleThreadTest, TestStreams) {
Base::TestStreams();
}
-TEST_F(DataChannelTest, TestUpdateStreamsInLocalContent) {
+TEST_F(DataChannelDoubleThreadTest, TestUpdateStreamsInLocalContent) {
Base::TestUpdateStreamsInLocalContent();
}
-TEST_F(DataChannelTest, TestUpdateRemoteStreamsInContent) {
+TEST_F(DataChannelDoubleThreadTest, TestUpdateRemoteStreamsInContent) {
Base::TestUpdateStreamsInRemoteContent();
}
-TEST_F(DataChannelTest, TestChangeStreamParamsInContent) {
+TEST_F(DataChannelDoubleThreadTest, TestChangeStreamParamsInContent) {
Base::TestChangeStreamParamsInContent();
}
-TEST_F(DataChannelTest, TestPlayoutAndSendingStates) {
+TEST_F(DataChannelDoubleThreadTest, TestPlayoutAndSendingStates) {
Base::TestPlayoutAndSendingStates();
}
-TEST_F(DataChannelTest, TestMediaContentDirection) {
+TEST_F(DataChannelDoubleThreadTest, TestMediaContentDirection) {
Base::TestMediaContentDirection();
}
-TEST_F(DataChannelTest, TestCallSetup) {
+TEST_F(DataChannelDoubleThreadTest, TestCallSetup) {
Base::TestCallSetup();
}
-TEST_F(DataChannelTest, TestCallTeardownRtcpMux) {
+TEST_F(DataChannelDoubleThreadTest, TestCallTeardownRtcpMux) {
Base::TestCallTeardownRtcpMux();
}
-TEST_F(DataChannelTest, TestOnReadyToSend) {
+TEST_F(DataChannelDoubleThreadTest, TestOnReadyToSend) {
Base::TestOnReadyToSend();
}
-TEST_F(DataChannelTest, TestOnReadyToSendWithRtcpMux) {
+TEST_F(DataChannelDoubleThreadTest, TestOnReadyToSendWithRtcpMux) {
Base::TestOnReadyToSendWithRtcpMux();
}
-TEST_F(DataChannelTest, SendRtpToRtp) {
+TEST_F(DataChannelDoubleThreadTest, SendRtpToRtp) {
Base::SendRtpToRtp();
}
-TEST_F(DataChannelTest, SendNoRtcpToNoRtcp) {
+TEST_F(DataChannelDoubleThreadTest, SendNoRtcpToNoRtcp) {
Base::SendNoRtcpToNoRtcp();
}
-TEST_F(DataChannelTest, SendNoRtcpToRtcp) {
+TEST_F(DataChannelDoubleThreadTest, SendNoRtcpToRtcp) {
Base::SendNoRtcpToRtcp();
}
-TEST_F(DataChannelTest, SendRtcpToNoRtcp) {
+TEST_F(DataChannelDoubleThreadTest, SendRtcpToNoRtcp) {
Base::SendRtcpToNoRtcp();
}
-TEST_F(DataChannelTest, SendRtcpToRtcp) {
+TEST_F(DataChannelDoubleThreadTest, SendRtcpToRtcp) {
Base::SendRtcpToRtcp();
}
-TEST_F(DataChannelTest, SendRtcpMuxToRtcp) {
+TEST_F(DataChannelDoubleThreadTest, SendRtcpMuxToRtcp) {
Base::SendRtcpMuxToRtcp();
}
-TEST_F(DataChannelTest, SendRtcpMuxToRtcpMux) {
+TEST_F(DataChannelDoubleThreadTest, SendRtcpMuxToRtcpMux) {
Base::SendRtcpMuxToRtcpMux();
}
-TEST_F(DataChannelTest, SendEarlyRtcpMuxToRtcp) {
+TEST_F(DataChannelDoubleThreadTest, SendEarlyRtcpMuxToRtcp) {
Base::SendEarlyRtcpMuxToRtcp();
}
-TEST_F(DataChannelTest, SendEarlyRtcpMuxToRtcpMux) {
+TEST_F(DataChannelDoubleThreadTest, SendEarlyRtcpMuxToRtcpMux) {
Base::SendEarlyRtcpMuxToRtcpMux();
}
-TEST_F(DataChannelTest, SendSrtpToSrtp) {
+TEST_F(DataChannelDoubleThreadTest, SendSrtpToSrtp) {
Base::SendSrtpToSrtp();
}
-TEST_F(DataChannelTest, SendSrtpToRtp) {
+TEST_F(DataChannelDoubleThreadTest, SendSrtpToRtp) {
Base::SendSrtpToSrtp();
}
-TEST_F(DataChannelTest, SendSrtcpMux) {
+TEST_F(DataChannelDoubleThreadTest, SendSrtcpMux) {
Base::SendSrtpToSrtp(RTCP_MUX, RTCP_MUX);
}
-TEST_F(DataChannelTest, SendRtpToRtpOnThread) {
+TEST_F(DataChannelDoubleThreadTest, SendRtpToRtpOnThread) {
Base::SendRtpToRtpOnThread();
}
-TEST_F(DataChannelTest, SendSrtpToSrtpOnThread) {
+TEST_F(DataChannelDoubleThreadTest, SendSrtpToSrtpOnThread) {
Base::SendSrtpToSrtpOnThread();
}
-TEST_F(DataChannelTest, SendWithWritabilityLoss) {
+TEST_F(DataChannelDoubleThreadTest, SendWithWritabilityLoss) {
Base::SendWithWritabilityLoss();
}
-TEST_F(DataChannelTest, TestMediaMonitor) {
+TEST_F(DataChannelDoubleThreadTest, TestMediaMonitor) {
Base::TestMediaMonitor();
}
-TEST_F(DataChannelTest, TestSendData) {
+TEST_F(DataChannelDoubleThreadTest, TestSendData) {
CreateChannels(0, 0);
EXPECT_TRUE(SendInitiate());
EXPECT_TRUE(SendAccept());
diff --git a/chromium/third_party/webrtc/pc/channelmanager.cc b/chromium/third_party/webrtc/pc/channelmanager.cc
index f59a3df9c72..56dd135549b 100644
--- a/chromium/third_party/webrtc/pc/channelmanager.cc
+++ b/chromium/third_party/webrtc/pc/channelmanager.cc
@@ -44,25 +44,26 @@ static DataEngineInterface* ConstructDataEngine() {
ChannelManager::ChannelManager(MediaEngineInterface* me,
DataEngineInterface* dme,
- rtc::Thread* worker_thread) {
- Construct(me, dme, worker_thread);
+ rtc::Thread* thread) {
+ Construct(me, dme, thread, thread);
}
ChannelManager::ChannelManager(MediaEngineInterface* me,
- rtc::Thread* worker_thread) {
- Construct(me,
- ConstructDataEngine(),
- worker_thread);
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread) {
+ Construct(me, ConstructDataEngine(), worker_thread, network_thread);
}
void ChannelManager::Construct(MediaEngineInterface* me,
DataEngineInterface* dme,
- rtc::Thread* worker_thread) {
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread) {
media_engine_.reset(me);
data_media_engine_.reset(dme);
initialized_ = false;
main_thread_ = rtc::Thread::Current();
worker_thread_ = worker_thread;
+ network_thread_ = network_thread;
audio_output_volume_ = kNotSetOutputVolume;
capturing_ = false;
enable_rtx_ = false;
@@ -144,18 +145,16 @@ bool ChannelManager::Init() {
if (initialized_) {
return false;
}
- ASSERT(worker_thread_ != NULL);
- if (!worker_thread_) {
- return false;
- }
- if (worker_thread_ != rtc::Thread::Current()) {
- // Do not allow invoking calls to other threads on the worker thread.
- worker_thread_->Invoke<bool>(rtc::Bind(
- &rtc::Thread::SetAllowBlockingCalls, worker_thread_, false));
+ RTC_DCHECK(network_thread_);
+ RTC_DCHECK(worker_thread_);
+ if (!network_thread_->IsCurrent()) {
+ // Do not allow invoking calls to other threads on the network thread.
+ network_thread_->Invoke<bool>(
+ rtc::Bind(&rtc::Thread::SetAllowBlockingCalls, network_thread_, false));
}
- initialized_ = worker_thread_->Invoke<bool>(Bind(
- &ChannelManager::InitMediaEngine_w, this));
+ initialized_ = worker_thread_->Invoke<bool>(
+ Bind(&ChannelManager::InitMediaEngine_w, this));
ASSERT(initialized_);
if (!initialized_) {
return false;
@@ -206,17 +205,20 @@ VoiceChannel* ChannelManager::CreateVoiceChannel(
webrtc::MediaControllerInterface* media_controller,
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
const AudioOptions& options) {
return worker_thread_->Invoke<VoiceChannel*>(
Bind(&ChannelManager::CreateVoiceChannel_w, this, media_controller,
- transport_controller, content_name, rtcp, options));
+ transport_controller, content_name, bundle_transport_name, rtcp,
+ options));
}
VoiceChannel* ChannelManager::CreateVoiceChannel_w(
webrtc::MediaControllerInterface* media_controller,
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
const AudioOptions& options) {
ASSERT(initialized_);
@@ -228,9 +230,9 @@ VoiceChannel* ChannelManager::CreateVoiceChannel_w(
return nullptr;
VoiceChannel* voice_channel =
- new VoiceChannel(worker_thread_, media_engine_.get(), media_channel,
- transport_controller, content_name, rtcp);
- if (!voice_channel->Init()) {
+ new VoiceChannel(worker_thread_, network_thread_, media_engine_.get(),
+ media_channel, transport_controller, content_name, rtcp);
+ if (!voice_channel->Init_w(bundle_transport_name)) {
delete voice_channel;
return nullptr;
}
@@ -264,17 +266,20 @@ VideoChannel* ChannelManager::CreateVideoChannel(
webrtc::MediaControllerInterface* media_controller,
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
const VideoOptions& options) {
return worker_thread_->Invoke<VideoChannel*>(
Bind(&ChannelManager::CreateVideoChannel_w, this, media_controller,
- transport_controller, content_name, rtcp, options));
+ transport_controller, content_name, bundle_transport_name, rtcp,
+ options));
}
VideoChannel* ChannelManager::CreateVideoChannel_w(
webrtc::MediaControllerInterface* media_controller,
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
const VideoOptions& options) {
ASSERT(initialized_);
@@ -286,9 +291,10 @@ VideoChannel* ChannelManager::CreateVideoChannel_w(
return NULL;
}
- VideoChannel* video_channel = new VideoChannel(
- worker_thread_, media_channel, transport_controller, content_name, rtcp);
- if (!video_channel->Init()) {
+ VideoChannel* video_channel =
+ new VideoChannel(worker_thread_, network_thread_, media_channel,
+ transport_controller, content_name, rtcp);
+ if (!video_channel->Init_w(bundle_transport_name)) {
delete video_channel;
return NULL;
}
@@ -322,16 +328,18 @@ void ChannelManager::DestroyVideoChannel_w(VideoChannel* video_channel) {
DataChannel* ChannelManager::CreateDataChannel(
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
DataChannelType channel_type) {
return worker_thread_->Invoke<DataChannel*>(
Bind(&ChannelManager::CreateDataChannel_w, this, transport_controller,
- content_name, rtcp, channel_type));
+ content_name, bundle_transport_name, rtcp, channel_type));
}
DataChannel* ChannelManager::CreateDataChannel_w(
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
DataChannelType data_channel_type) {
// This is ok to alloc from a thread other than the worker thread.
@@ -344,9 +352,10 @@ DataChannel* ChannelManager::CreateDataChannel_w(
return NULL;
}
- DataChannel* data_channel = new DataChannel(
- worker_thread_, media_channel, transport_controller, content_name, rtcp);
- if (!data_channel->Init()) {
+ DataChannel* data_channel =
+ new DataChannel(worker_thread_, network_thread_, media_channel,
+ transport_controller, content_name, rtcp);
+ if (!data_channel->Init_w(bundle_transport_name)) {
LOG(LS_WARNING) << "Failed to init data channel.";
delete data_channel;
return NULL;
@@ -413,9 +422,11 @@ void ChannelManager::StopAecDump() {
Bind(&MediaEngineInterface::StopAecDump, media_engine_.get()));
}
-bool ChannelManager::StartRtcEventLog(rtc::PlatformFile file) {
+bool ChannelManager::StartRtcEventLog(rtc::PlatformFile file,
+ int64_t max_size_bytes) {
return worker_thread_->Invoke<bool>(
- Bind(&MediaEngineInterface::StartRtcEventLog, media_engine_.get(), file));
+ Bind(&MediaEngineInterface::StartRtcEventLog, media_engine_.get(), file,
+ max_size_bytes));
}
void ChannelManager::StopRtcEventLog() {
diff --git a/chromium/third_party/webrtc/pc/channelmanager.h b/chromium/third_party/webrtc/pc/channelmanager.h
index 72a2f056b5a..3b0c0e967ea 100644
--- a/chromium/third_party/webrtc/pc/channelmanager.h
+++ b/chromium/third_party/webrtc/pc/channelmanager.h
@@ -42,20 +42,31 @@ class ChannelManager {
// ownership of these objects.
ChannelManager(MediaEngineInterface* me,
DataEngineInterface* dme,
- rtc::Thread* worker);
+ rtc::Thread* worker_and_network);
// Same as above, but gives an easier default DataEngine.
ChannelManager(MediaEngineInterface* me,
- rtc::Thread* worker);
+ rtc::Thread* worker,
+ rtc::Thread* network);
~ChannelManager();
// Accessors for the worker thread, allowing it to be set after construction,
// but before Init. set_worker_thread will return false if called after Init.
rtc::Thread* worker_thread() const { return worker_thread_; }
bool set_worker_thread(rtc::Thread* thread) {
- if (initialized_) return false;
+ if (initialized_) {
+ return false;
+ }
worker_thread_ = thread;
return true;
}
+ rtc::Thread* network_thread() const { return network_thread_; }
+ bool set_network_thread(rtc::Thread* thread) {
+ if (initialized_) {
+ return false;
+ }
+ network_thread_ = thread;
+ return true;
+ }
MediaEngineInterface* media_engine() { return media_engine_.get(); }
@@ -80,6 +91,7 @@ class ChannelManager {
webrtc::MediaControllerInterface* media_controller,
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
const AudioOptions& options);
// Destroys a voice channel created with the Create API.
@@ -90,12 +102,14 @@ class ChannelManager {
webrtc::MediaControllerInterface* media_controller,
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
const VideoOptions& options);
// Destroys a video channel created with the Create API.
void DestroyVideoChannel(VideoChannel* video_channel);
DataChannel* CreateDataChannel(TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
DataChannelType data_channel_type);
// Destroys a data channel created with the Create API.
@@ -126,7 +140,7 @@ class ChannelManager {
void StopAecDump();
// Starts RtcEventLog using existing file.
- bool StartRtcEventLog(rtc::PlatformFile file);
+ bool StartRtcEventLog(rtc::PlatformFile file, int64_t max_size_bytes);
// Stops logging RtcEventLog.
void StopRtcEventLog();
@@ -138,7 +152,8 @@ class ChannelManager {
void Construct(MediaEngineInterface* me,
DataEngineInterface* dme,
- rtc::Thread* worker_thread);
+ rtc::Thread* worker_thread,
+ rtc::Thread* network_thread);
bool InitMediaEngine_w();
void DestructorDeletes_w();
void Terminate_w();
@@ -146,6 +161,7 @@ class ChannelManager {
webrtc::MediaControllerInterface* media_controller,
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
const AudioOptions& options);
void DestroyVoiceChannel_w(VoiceChannel* voice_channel);
@@ -153,11 +169,13 @@ class ChannelManager {
webrtc::MediaControllerInterface* media_controller,
TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
const VideoOptions& options);
void DestroyVideoChannel_w(VideoChannel* video_channel);
DataChannel* CreateDataChannel_w(TransportController* transport_controller,
const std::string& content_name,
+ const std::string* bundle_transport_name,
bool rtcp,
DataChannelType data_channel_type);
void DestroyDataChannel_w(DataChannel* data_channel);
@@ -167,6 +185,7 @@ class ChannelManager {
bool initialized_;
rtc::Thread* main_thread_;
rtc::Thread* worker_thread_;
+ rtc::Thread* network_thread_;
VoiceChannels voice_channels_;
VideoChannels video_channels_;
diff --git a/chromium/third_party/webrtc/pc/channelmanager_unittest.cc b/chromium/third_party/webrtc/pc/channelmanager_unittest.cc
index 1eb09198d9f..e5e7b4f7de4 100644
--- a/chromium/third_party/webrtc/pc/channelmanager_unittest.cc
+++ b/chromium/third_party/webrtc/pc/channelmanager_unittest.cc
@@ -22,14 +22,12 @@
namespace cricket {
static const AudioCodec kAudioCodecs[] = {
- AudioCodec(97, "voice", 1, 2, 3, 0),
- AudioCodec(111, "OPUS", 48000, 32000, 2, 0),
+ AudioCodec(97, "voice", 1, 2, 3), AudioCodec(111, "OPUS", 48000, 32000, 2),
};
static const VideoCodec kVideoCodecs[] = {
- VideoCodec(99, "H264", 100, 200, 300, 0),
- VideoCodec(100, "VP8", 100, 200, 300, 0),
- VideoCodec(96, "rtx", 100, 200, 300, 0),
+ VideoCodec(99, "H264", 100, 200, 300),
+ VideoCodec(100, "VP8", 100, 200, 300), VideoCodec(96, "rtx", 100, 200, 300),
};
class ChannelManagerTest : public testing::Test {
@@ -58,6 +56,7 @@ class ChannelManagerTest : public testing::Test {
fme_ = NULL;
}
+ rtc::Thread network_;
rtc::Thread worker_;
cricket::FakeMediaEngine* fme_;
cricket::FakeDataEngine* fdme_;
@@ -79,14 +78,18 @@ TEST_F(ChannelManagerTest, StartupShutdown) {
// Test that we startup/shutdown properly with a worker thread.
TEST_F(ChannelManagerTest, StartupShutdownOnThread) {
+ network_.Start();
worker_.Start();
EXPECT_FALSE(cm_->initialized());
EXPECT_EQ(rtc::Thread::Current(), cm_->worker_thread());
+ EXPECT_TRUE(cm_->set_network_thread(&network_));
+ EXPECT_EQ(&network_, cm_->network_thread());
EXPECT_TRUE(cm_->set_worker_thread(&worker_));
EXPECT_EQ(&worker_, cm_->worker_thread());
EXPECT_TRUE(cm_->Init());
EXPECT_TRUE(cm_->initialized());
- // Setting the worker thread while initialized should fail.
+ // Setting the network or worker thread while initialized should fail.
+ EXPECT_FALSE(cm_->set_network_thread(rtc::Thread::Current()));
EXPECT_FALSE(cm_->set_worker_thread(rtc::Thread::Current()));
cm_->Terminate();
EXPECT_FALSE(cm_->initialized());
@@ -95,16 +98,17 @@ TEST_F(ChannelManagerTest, StartupShutdownOnThread) {
// Test that we can create and destroy a voice and video channel.
TEST_F(ChannelManagerTest, CreateDestroyChannels) {
EXPECT_TRUE(cm_->Init());
- cricket::VoiceChannel* voice_channel =
- cm_->CreateVoiceChannel(&fake_mc_, transport_controller_,
- cricket::CN_AUDIO, false, AudioOptions());
+ cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel(
+ &fake_mc_, transport_controller_, cricket::CN_AUDIO, nullptr, false,
+ AudioOptions());
EXPECT_TRUE(voice_channel != nullptr);
- cricket::VideoChannel* video_channel =
- cm_->CreateVideoChannel(&fake_mc_, transport_controller_,
- cricket::CN_VIDEO, false, VideoOptions());
+ cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
+ &fake_mc_, transport_controller_, cricket::CN_VIDEO, nullptr, false,
+ VideoOptions());
EXPECT_TRUE(video_channel != nullptr);
- cricket::DataChannel* data_channel = cm_->CreateDataChannel(
- transport_controller_, cricket::CN_DATA, false, cricket::DCT_RTP);
+ cricket::DataChannel* data_channel =
+ cm_->CreateDataChannel(transport_controller_, cricket::CN_DATA, nullptr,
+ false, cricket::DCT_RTP);
EXPECT_TRUE(data_channel != nullptr);
cm_->DestroyVideoChannel(video_channel);
cm_->DestroyVoiceChannel(voice_channel);
@@ -114,22 +118,25 @@ TEST_F(ChannelManagerTest, CreateDestroyChannels) {
// Test that we can create and destroy a voice and video channel with a worker.
TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) {
+ network_.Start();
worker_.Start();
EXPECT_TRUE(cm_->set_worker_thread(&worker_));
+ EXPECT_TRUE(cm_->set_network_thread(&network_));
EXPECT_TRUE(cm_->Init());
delete transport_controller_;
transport_controller_ =
- new cricket::FakeTransportController(&worker_, ICEROLE_CONTROLLING);
- cricket::VoiceChannel* voice_channel =
- cm_->CreateVoiceChannel(&fake_mc_, transport_controller_,
- cricket::CN_AUDIO, false, AudioOptions());
+ new cricket::FakeTransportController(&network_, ICEROLE_CONTROLLING);
+ cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel(
+ &fake_mc_, transport_controller_, cricket::CN_AUDIO, nullptr, false,
+ AudioOptions());
EXPECT_TRUE(voice_channel != nullptr);
- cricket::VideoChannel* video_channel =
- cm_->CreateVideoChannel(&fake_mc_, transport_controller_,
- cricket::CN_VIDEO, false, VideoOptions());
+ cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
+ &fake_mc_, transport_controller_, cricket::CN_VIDEO, nullptr, false,
+ VideoOptions());
EXPECT_TRUE(video_channel != nullptr);
- cricket::DataChannel* data_channel = cm_->CreateDataChannel(
- transport_controller_, cricket::CN_DATA, false, cricket::DCT_RTP);
+ cricket::DataChannel* data_channel =
+ cm_->CreateDataChannel(transport_controller_, cricket::CN_DATA, nullptr,
+ false, cricket::DCT_RTP);
EXPECT_TRUE(data_channel != nullptr);
cm_->DestroyVideoChannel(video_channel);
cm_->DestroyVoiceChannel(voice_channel);
@@ -144,19 +151,20 @@ TEST_F(ChannelManagerTest, NoTransportChannelTest) {
transport_controller_->set_fail_channel_creation(true);
// The test is useless unless the session does not fail creating
// cricket::TransportChannel.
- ASSERT_TRUE(transport_controller_->CreateTransportChannel_w(
+ ASSERT_TRUE(transport_controller_->CreateTransportChannel_n(
"audio", cricket::ICE_CANDIDATE_COMPONENT_RTP) == nullptr);
- cricket::VoiceChannel* voice_channel =
- cm_->CreateVoiceChannel(&fake_mc_, transport_controller_,
- cricket::CN_AUDIO, false, AudioOptions());
+ cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel(
+ &fake_mc_, transport_controller_, cricket::CN_AUDIO, nullptr, false,
+ AudioOptions());
EXPECT_TRUE(voice_channel == nullptr);
- cricket::VideoChannel* video_channel =
- cm_->CreateVideoChannel(&fake_mc_, transport_controller_,
- cricket::CN_VIDEO, false, VideoOptions());
+ cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
+ &fake_mc_, transport_controller_, cricket::CN_VIDEO, nullptr, false,
+ VideoOptions());
EXPECT_TRUE(video_channel == nullptr);
- cricket::DataChannel* data_channel = cm_->CreateDataChannel(
- transport_controller_, cricket::CN_DATA, false, cricket::DCT_RTP);
+ cricket::DataChannel* data_channel =
+ cm_->CreateDataChannel(transport_controller_, cricket::CN_DATA, nullptr,
+ false, cricket::DCT_RTP);
EXPECT_TRUE(data_channel == nullptr);
cm_->Terminate();
}
@@ -198,7 +206,7 @@ TEST_F(ChannelManagerTest, GetSetOutputVolume) {
TEST_F(ChannelManagerTest, SetVideoRtxEnabled) {
std::vector<VideoCodec> codecs;
- const VideoCodec rtx_codec(96, "rtx", 0, 0, 0, 0);
+ const VideoCodec rtx_codec(96, "rtx", 0, 0, 0);
// By default RTX is disabled.
cm_->GetSupportedVideoCodecs(&codecs);
diff --git a/chromium/third_party/webrtc/pc/currentspeakermonitor.cc b/chromium/third_party/webrtc/pc/currentspeakermonitor.cc
index bbb33e3143c..ce0d579486a 100644
--- a/chromium/third_party/webrtc/pc/currentspeakermonitor.cc
+++ b/chromium/third_party/webrtc/pc/currentspeakermonitor.cc
@@ -10,6 +10,8 @@
#include "webrtc/pc/currentspeakermonitor.h"
+#include <vector>
+
#include "webrtc/base/logging.h"
#include "webrtc/media/base/streamparams.h"
#include "webrtc/pc/audiomonitor.h"
@@ -61,7 +63,7 @@ void CurrentSpeakerMonitor::Stop() {
}
void CurrentSpeakerMonitor::set_min_time_between_switches(
- uint32_t min_time_between_switches) {
+ int min_time_between_switches) {
min_time_between_switches_ = min_time_between_switches;
}
@@ -163,7 +165,7 @@ void CurrentSpeakerMonitor::OnAudioMonitor(
// We avoid over-switching by disabling switching for a period of time after
// a switch is done.
- uint32_t now = rtc::Time();
+ int64_t now = rtc::TimeMillis();
if (earliest_permitted_switch_time_ <= now &&
current_speaker_ssrc_ != loudest_speaker_ssrc) {
current_speaker_ssrc_ = loudest_speaker_ssrc;
diff --git a/chromium/third_party/webrtc/pc/currentspeakermonitor.h b/chromium/third_party/webrtc/pc/currentspeakermonitor.h
index 5ca68ae9416..730ded075b3 100644
--- a/chromium/third_party/webrtc/pc/currentspeakermonitor.h
+++ b/chromium/third_party/webrtc/pc/currentspeakermonitor.h
@@ -11,8 +11,8 @@
// CurrentSpeakerMonitor monitors the audio levels for a session and determines
// which participant is currently speaking.
-#ifndef TALK_SESSION_MEDIA_CURRENTSPEAKERMONITOR_H_
-#define TALK_SESSION_MEDIA_CURRENTSPEAKERMONITOR_H_
+#ifndef WEBRTC_PC_CURRENTSPEAKERMONITOR_H_
+#define WEBRTC_PC_CURRENTSPEAKERMONITOR_H_
#include <map>
@@ -45,7 +45,7 @@ class AudioSourceContext {
// It's recommended that the audio monitor be started with a 100 ms period.
class CurrentSpeakerMonitor : public sigslot::has_slots<> {
public:
- CurrentSpeakerMonitor(AudioSourceContext* audio_source_context);
+ explicit CurrentSpeakerMonitor(AudioSourceContext* audio_source_context);
~CurrentSpeakerMonitor();
void Start();
@@ -54,7 +54,7 @@ class CurrentSpeakerMonitor : public sigslot::has_slots<> {
// Used by tests. Note that the actual minimum time between switches
// enforced by the monitor will be the given value plus or minus the
// resolution of the system clock.
- void set_min_time_between_switches(uint32_t min_time_between_switches);
+ void set_min_time_between_switches(int min_time_between_switches);
// This is fired when the current speaker changes, and provides his audio
// SSRC. This only fires after the audio monitor on the underlying
@@ -86,10 +86,10 @@ class CurrentSpeakerMonitor : public sigslot::has_slots<> {
uint32_t current_speaker_ssrc_;
// To prevent overswitching, switching is disabled for some time after a
// switch is made. This gives us the earliest time a switch is permitted.
- uint32_t earliest_permitted_switch_time_;
- uint32_t min_time_between_switches_;
+ int64_t earliest_permitted_switch_time_;
+ int min_time_between_switches_;
};
-}
+} // namespace cricket
-#endif // TALK_SESSION_MEDIA_CURRENTSPEAKERMONITOR_H_
+#endif // WEBRTC_PC_CURRENTSPEAKERMONITOR_H_
diff --git a/chromium/third_party/webrtc/pc/externalhmac.h b/chromium/third_party/webrtc/pc/externalhmac.h
index 74652b0894f..edca74d4d47 100644
--- a/chromium/third_party/webrtc/pc/externalhmac.h
+++ b/chromium/third_party/webrtc/pc/externalhmac.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_SESSION_MEDIA_EXTERNAL_HMAC_H_
-#define TALK_SESSION_MEDIA_EXTERNAL_HMAC_H_
+#ifndef WEBRTC_PC_EXTERNALHMAC_H_
+#define WEBRTC_PC_EXTERNALHMAC_H_
// External libsrtp HMAC auth module which implements methods defined in
// auth_type_t.
@@ -72,4 +72,4 @@ err_status_t external_hmac_compute(ExternalHmacContext* state,
err_status_t external_crypto_init();
#endif // defined(HAVE_SRTP) && defined(ENABLE_EXTERNAL_AUTH)
-#endif // TALK_SESSION_MEDIA_EXTERNAL_HMAC_H_
+#endif // WEBRTC_PC_EXTERNALHMAC_H_
diff --git a/chromium/third_party/webrtc/pc/mediamonitor.cc b/chromium/third_party/webrtc/pc/mediamonitor.cc
index c9ba2d39640..066094d159f 100644
--- a/chromium/third_party/webrtc/pc/mediamonitor.cc
+++ b/chromium/third_party/webrtc/pc/mediamonitor.cc
@@ -88,4 +88,4 @@ void MediaMonitor::PollMediaChannel() {
worker_thread_->PostDelayed(rate_, this, MSG_MONITOR_POLL);
}
-}
+} // namespace cricket
diff --git a/chromium/third_party/webrtc/pc/mediamonitor.h b/chromium/third_party/webrtc/pc/mediamonitor.h
index c2846bed83d..d294cf13e71 100644
--- a/chromium/third_party/webrtc/pc/mediamonitor.h
+++ b/chromium/third_party/webrtc/pc/mediamonitor.h
@@ -10,8 +10,8 @@
// Class to collect statistics from a media channel
-#ifndef TALK_SESSION_MEDIA_MEDIAMONITOR_H_
-#define TALK_SESSION_MEDIA_MEDIAMONITOR_H_
+#ifndef WEBRTC_PC_MEDIAMONITOR_H_
+#define WEBRTC_PC_MEDIAMONITOR_H_
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/sigslot.h"
@@ -79,4 +79,4 @@ typedef MediaMonitorT<DataMediaChannel, DataMediaInfo> DataMediaMonitor;
} // namespace cricket
-#endif // TALK_SESSION_MEDIA_MEDIAMONITOR_H_
+#endif // WEBRTC_PC_MEDIAMONITOR_H_
diff --git a/chromium/third_party/webrtc/pc/mediasession.cc b/chromium/third_party/webrtc/pc/mediasession.cc
index a9d1b95c22a..0fa20d8bd5d 100644
--- a/chromium/third_party/webrtc/pc/mediasession.cc
+++ b/chromium/third_party/webrtc/pc/mediasession.cc
@@ -10,11 +10,12 @@
#include "webrtc/pc/mediasession.h"
-#include <algorithm> // For std::find_if.
+#include <algorithm> // For std::find_if, std::sort.
#include <functional>
#include <map>
#include <memory>
#include <set>
+#include <unordered_map>
#include <utility>
#include "webrtc/base/helpers.h"
@@ -45,7 +46,7 @@ void GetSupportedCryptoSuiteNames(void (*func)(std::vector<int>*),
}
#endif
}
-}
+} // namespace
namespace cricket {
@@ -210,55 +211,6 @@ static bool SelectCrypto(const MediaContentDescription* offer,
return false;
}
-static const StreamParams* FindFirstStreamParamsByCname(
- const StreamParamsVec& params_vec,
- const std::string& cname) {
- for (StreamParamsVec::const_iterator it = params_vec.begin();
- it != params_vec.end(); ++it) {
- if (cname == it->cname)
- return &*it;
- }
- return NULL;
-}
-
-// Generates a new CNAME or the CNAME of an already existing StreamParams
-// if a StreamParams exist for another Stream in streams with sync_label
-// sync_label.
-static bool GenerateCname(const StreamParamsVec& params_vec,
- const MediaSessionOptions::Streams& streams,
- const std::string& synch_label,
- std::string* cname) {
- ASSERT(cname != NULL);
- if (!cname)
- return false;
-
- // Check if a CNAME exist for any of the other synched streams.
- for (MediaSessionOptions::Streams::const_iterator stream_it = streams.begin();
- stream_it != streams.end() ; ++stream_it) {
- if (synch_label != stream_it->sync_label)
- continue;
-
- // groupid is empty for StreamParams generated using
- // MediaSessionDescriptionFactory.
- const StreamParams* param = GetStreamByIds(params_vec, "", stream_it->id);
- if (param) {
- *cname = param->cname;
- return true;
- }
- }
- // No other stream seems to exist that we should sync with.
- // Generate a random string for the RTCP CNAME, as stated in RFC 6222.
- // This string is only used for synchronization, and therefore is opaque.
- do {
- if (!rtc::CreateRandomString(16, cname)) {
- ASSERT(false);
- return false;
- }
- } while (FindFirstStreamParamsByCname(params_vec, *cname));
-
- return true;
-}
-
// Generate random SSRC values that are not already present in |params_vec|.
// The generated values are added to |ssrcs|.
// |num_ssrcs| is the number of the SSRC will be generated.
@@ -443,15 +395,15 @@ static bool IsSctp(const MediaContentDescription* desc) {
// media_type to content_description.
// |current_params| - All currently known StreamParams of any media type.
template <class C>
-static bool AddStreamParams(
- MediaType media_type,
- const MediaSessionOptions::Streams& streams,
- StreamParamsVec* current_streams,
- MediaContentDescriptionImpl<C>* content_description,
- const bool add_legacy_stream) {
+static bool AddStreamParams(MediaType media_type,
+ const MediaSessionOptions& options,
+ StreamParamsVec* current_streams,
+ MediaContentDescriptionImpl<C>* content_description,
+ const bool add_legacy_stream) {
const bool include_rtx_streams =
ContainsRtxCodec(content_description->codecs());
+ const MediaSessionOptions::Streams& streams = options.streams;
if (streams.empty() && add_legacy_stream) {
// TODO(perkj): Remove this legacy stream when all apps use StreamParams.
std::vector<uint32_t> ssrcs;
@@ -482,13 +434,6 @@ static bool AddStreamParams(
// MediaSessionDescriptionFactory.
if (!param) {
// This is a new stream.
- // Get a CNAME. Either new or same as one of the other synched streams.
- std::string cname;
- if (!GenerateCname(*current_streams, streams, stream_it->sync_label,
- &cname)) {
- return false;
- }
-
std::vector<uint32_t> ssrcs;
if (IsSctp(content_description)) {
GenerateSctpSids(*current_streams, &ssrcs);
@@ -516,7 +461,7 @@ static bool AddStreamParams(
}
content_description->set_multistream(true);
}
- stream_param.cname = cname;
+ stream_param.cname = options.rtcp_cname;
stream_param.sync_label = stream_it->sync_label;
content_description->AddStream(stream_param);
@@ -749,7 +694,6 @@ static bool CreateMediaContentOffer(
StreamParamsVec* current_streams,
MediaContentDescriptionImpl<C>* offer) {
offer->AddCodecs(codecs);
- offer->SortCodecs();
if (secure_policy == SEC_REQUIRED) {
offer->set_crypto_required(CT_SDES);
@@ -761,9 +705,8 @@ static bool CreateMediaContentOffer(
offer->set_multistream(options.is_muc);
offer->set_rtp_header_extensions(rtp_extensions);
- if (!AddStreamParams(
- offer->type(), options.streams, current_streams,
- offer, add_legacy_stream)) {
+ if (!AddStreamParams(offer->type(), options, current_streams, offer,
+ add_legacy_stream)) {
return false;
}
@@ -810,6 +753,8 @@ static void NegotiateCodecs(const std::vector<C>& local_codecs,
std::vector<C>* negotiated_codecs) {
for (const C& ours : local_codecs) {
C theirs;
+ // Note that we intentionally only find one matching codec for each of our
+ // local codecs, in case the remote offer contains duplicate codecs.
if (FindMatchingCodec(local_codecs, offered_codecs, ours, &theirs)) {
C negotiated = ours;
negotiated.IntersectFeedbackParams(theirs);
@@ -822,14 +767,23 @@ static void NegotiateCodecs(const std::vector<C>& local_codecs,
offered_apt_value);
}
negotiated.id = theirs.id;
- // RFC3264: Although the answerer MAY list the formats in their desired
- // order of preference, it is RECOMMENDED that unless there is a
- // specific reason, the answerer list formats in the same relative order
- // they were present in the offer.
- negotiated.preference = theirs.preference;
negotiated_codecs->push_back(negotiated);
}
}
+ // RFC3264: Although the answerer MAY list the formats in their desired
+ // order of preference, it is RECOMMENDED that unless there is a
+ // specific reason, the answerer list formats in the same relative order
+ // they were present in the offer.
+ std::unordered_map<int, int> payload_type_preferences;
+ int preference = static_cast<int>(offered_codecs.size() + 1);
+ for (const C& codec : offered_codecs) {
+ payload_type_preferences[codec.id] = preference--;
+ }
+ std::sort(negotiated_codecs->begin(), negotiated_codecs->end(),
+ [&payload_type_preferences](const C& a, const C& b) {
+ return payload_type_preferences[a.id] >
+ payload_type_preferences[b.id];
+ });
}
// Finds a codec in |codecs2| that matches |codec_to_match|, which is
@@ -1042,7 +996,6 @@ static bool CreateMediaContentAnswer(
std::vector<C> negotiated_codecs;
NegotiateCodecs(local_codecs, offer->codecs(), &negotiated_codecs);
answer->AddCodecs(negotiated_codecs);
- answer->SortCodecs();
answer->set_protocol(offer->protocol());
RtpHeaderExtensions negotiated_rtp_extensions;
NegotiateRtpHeaderExtensions(local_rtp_extenstions,
@@ -1070,9 +1023,8 @@ static bool CreateMediaContentAnswer(
return false;
}
- if (!AddStreamParams(
- answer->type(), options.streams, current_streams,
- answer, add_legacy_stream)) {
+ if (!AddStreamParams(answer->type(), options, current_streams, answer,
+ add_legacy_stream)) {
return false; // Something went seriously wrong.
}
@@ -1105,21 +1057,55 @@ static bool CreateMediaContentAnswer(
return true;
}
+static bool IsDtlsRtp(const std::string& protocol) {
+ // Most-likely values first.
+ return protocol == "UDP/TLS/RTP/SAVPF" || protocol == "TCP/TLS/RTP/SAVPF" ||
+ protocol == "UDP/TLS/RTP/SAVP" || protocol == "TCP/TLS/RTP/SAVP";
+}
+
+static bool IsPlainRtp(const std::string& protocol) {
+ // Most-likely values first.
+ return protocol == "RTP/SAVPF" || protocol == "RTP/AVPF" ||
+ protocol == "RTP/SAVP" || protocol == "RTP/AVP";
+}
+
+static bool IsDtlsSctp(const std::string& protocol) {
+ return protocol == "DTLS/SCTP";
+}
+
+static bool IsPlainSctp(const std::string& protocol) {
+ return protocol == "SCTP";
+}
+
static bool IsMediaProtocolSupported(MediaType type,
const std::string& protocol,
bool secure_transport) {
- // Data channels can have a protocol of SCTP or SCTP/DTLS.
- if (type == MEDIA_TYPE_DATA &&
- ((protocol == kMediaProtocolSctp && !secure_transport)||
- (protocol == kMediaProtocolDtlsSctp && secure_transport))) {
+ // Since not all applications serialize and deserialize the media protocol,
+ // we will have to accept |protocol| to be empty.
+ if (protocol.empty()) {
return true;
}
- // Since not all applications serialize and deserialize the media protocol,
- // we will have to accept |protocol| to be empty.
- return protocol == kMediaProtocolAvpf || protocol.empty() ||
- protocol == kMediaProtocolSavpf ||
- (protocol == kMediaProtocolDtlsSavpf && secure_transport);
+ if (type == MEDIA_TYPE_DATA) {
+ // Check for SCTP, but also for RTP for RTP-based data channels.
+ // TODO(pthatcher): Remove RTP once RTP-based data channels are gone.
+ if (secure_transport) {
+ // Most likely scenarios first.
+ return IsDtlsSctp(protocol) || IsDtlsRtp(protocol) ||
+ IsPlainRtp(protocol);
+ } else {
+ return IsPlainSctp(protocol) || IsPlainRtp(protocol);
+ }
+ }
+
+ // Allow for non-DTLS RTP protocol even when using DTLS because that's what
+ // JSEP specifies.
+ if (secure_transport) {
+ // Most likely scenarios first.
+ return IsDtlsRtp(protocol) || IsPlainRtp(protocol);
+ } else {
+ return IsPlainRtp(protocol);
+ }
}
static void SetMediaProtocol(bool secure_transport,
@@ -1365,8 +1351,8 @@ SessionDescription* MediaSessionDescriptionFactory::CreateAnswer(
const SessionDescription* offer, const MediaSessionOptions& options,
const SessionDescription* current_description) const {
// The answer contains the intersection of the codecs in the offer with the
- // codecs we support, ordered by our local preference. As indicated by
- // XEP-0167, we retain the same payload ids from the offer in the answer.
+ // codecs we support. As indicated by XEP-0167, we retain the same payload ids
+ // from the offer in the answer.
std::unique_ptr<SessionDescription> answer(new SessionDescription());
StreamParamsVec current_streams;
@@ -1933,10 +1919,9 @@ bool IsDataContent(const ContentInfo* content) {
const ContentInfo* GetFirstMediaContent(const ContentInfos& contents,
MediaType media_type) {
- for (ContentInfos::const_iterator content = contents.begin();
- content != contents.end(); content++) {
- if (IsMediaContentOfType(&*content, media_type)) {
- return &*content;
+ for (const ContentInfo& content : contents) {
+ if (IsMediaContentOfType(&content, media_type)) {
+ return &content;
}
}
return nullptr;
@@ -2000,4 +1985,77 @@ const DataContentDescription* GetFirstDataContentDescription(
GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA));
}
+//
+// Non-const versions of the above functions.
+//
+
+ContentInfo* GetFirstMediaContent(ContentInfos& contents,
+ MediaType media_type) {
+ for (ContentInfo& content : contents) {
+ if (IsMediaContentOfType(&content, media_type)) {
+ return &content;
+ }
+ }
+ return nullptr;
+}
+
+ContentInfo* GetFirstAudioContent(ContentInfos& contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_AUDIO);
+}
+
+ContentInfo* GetFirstVideoContent(ContentInfos& contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_VIDEO);
+}
+
+ContentInfo* GetFirstDataContent(ContentInfos& contents) {
+ return GetFirstMediaContent(contents, MEDIA_TYPE_DATA);
+}
+
+static ContentInfo* GetFirstMediaContent(SessionDescription* sdesc,
+ MediaType media_type) {
+ if (sdesc == nullptr) {
+ return nullptr;
+ }
+
+ return GetFirstMediaContent(sdesc->contents(), media_type);
+}
+
+ContentInfo* GetFirstAudioContent(SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_AUDIO);
+}
+
+ContentInfo* GetFirstVideoContent(SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_VIDEO);
+}
+
+ContentInfo* GetFirstDataContent(SessionDescription* sdesc) {
+ return GetFirstMediaContent(sdesc, MEDIA_TYPE_DATA);
+}
+
+MediaContentDescription* GetFirstMediaContentDescription(
+ SessionDescription* sdesc,
+ MediaType media_type) {
+ ContentInfo* content = GetFirstMediaContent(sdesc, media_type);
+ ContentDescription* description = content ? content->description : NULL;
+ return static_cast<MediaContentDescription*>(description);
+}
+
+AudioContentDescription* GetFirstAudioContentDescription(
+ SessionDescription* sdesc) {
+ return static_cast<AudioContentDescription*>(
+ GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_AUDIO));
+}
+
+VideoContentDescription* GetFirstVideoContentDescription(
+ SessionDescription* sdesc) {
+ return static_cast<VideoContentDescription*>(
+ GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_VIDEO));
+}
+
+DataContentDescription* GetFirstDataContentDescription(
+ SessionDescription* sdesc) {
+ return static_cast<DataContentDescription*>(
+ GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA));
+}
+
} // namespace cricket
diff --git a/chromium/third_party/webrtc/pc/mediasession.h b/chromium/third_party/webrtc/pc/mediasession.h
index ae221554e20..22291c42a64 100644
--- a/chromium/third_party/webrtc/pc/mediasession.h
+++ b/chromium/third_party/webrtc/pc/mediasession.h
@@ -10,8 +10,8 @@
// Types and classes used in media session descriptions.
-#ifndef TALK_SESSION_MEDIA_MEDIASESSION_H_
-#define TALK_SESSION_MEDIA_MEDIASESSION_H_
+#ifndef WEBRTC_PC_MEDIASESSION_H_
+#define WEBRTC_PC_MEDIASESSION_H_
#include <algorithm>
#include <map>
@@ -76,18 +76,21 @@ extern const char kMediaProtocolTcpDtlsSctp[];
const int kAutoBandwidth = -1;
const int kBufferedModeDisabled = 0;
+// Default RTCP CNAME for unit tests.
+const char kDefaultRtcpCname[] = "DefaultRtcpCname";
+
struct MediaSessionOptions {
- MediaSessionOptions() :
- recv_audio(true),
- recv_video(false),
- data_channel_type(DCT_NONE),
- is_muc(false),
- vad_enabled(true), // When disabled, removes all CN codecs from SDP.
- rtcp_mux_enabled(true),
- bundle_enabled(false),
- video_bandwidth(kAutoBandwidth),
- data_bandwidth(kDataMaxBandwidth) {
- }
+ MediaSessionOptions()
+ : recv_audio(true),
+ recv_video(false),
+ data_channel_type(DCT_NONE),
+ is_muc(false),
+ vad_enabled(true), // When disabled, removes all CN codecs from SDP.
+ rtcp_mux_enabled(true),
+ bundle_enabled(false),
+ video_bandwidth(kAutoBandwidth),
+ data_bandwidth(kDataMaxBandwidth),
+ rtcp_cname(kDefaultRtcpCname) {}
bool has_audio() const {
return recv_audio || HasSendMediaStream(MEDIA_TYPE_AUDIO);
@@ -133,6 +136,7 @@ struct MediaSessionOptions {
int data_bandwidth;
// content name ("mid") => options.
std::map<std::string, TransportOptions> transport_options;
+ std::string rtcp_cname;
struct Stream {
Stream(MediaType type,
@@ -293,10 +297,9 @@ class MediaContentDescription : public ContentDescription {
template <class C>
class MediaContentDescriptionImpl : public MediaContentDescription {
public:
- struct PreferenceSort {
- bool operator()(C a, C b) { return a.preference > b.preference; }
- };
+ typedef C CodecType;
+ // Codecs should be in preference order (most preferred codec first).
const std::vector<C>& codecs() const { return codecs_; }
void set_codecs(const std::vector<C>& codecs) { codecs_ = codecs; }
virtual bool has_codecs() const { return !codecs_.empty(); }
@@ -330,9 +333,6 @@ class MediaContentDescriptionImpl : public MediaContentDescription {
AddCodec(*codec);
}
}
- void SortCodecs() {
- std::sort(codecs_.begin(), codecs_.end(), PreferenceSort());
- }
private:
std::vector<C> codecs_;
@@ -532,6 +532,21 @@ const VideoContentDescription* GetFirstVideoContentDescription(
const SessionDescription* sdesc);
const DataContentDescription* GetFirstDataContentDescription(
const SessionDescription* sdesc);
+// Non-const versions of the above functions.
+// Useful when modifying an existing description.
+ContentInfo* GetFirstMediaContent(ContentInfos& contents, MediaType media_type);
+ContentInfo* GetFirstAudioContent(ContentInfos& contents);
+ContentInfo* GetFirstVideoContent(ContentInfos& contents);
+ContentInfo* GetFirstDataContent(ContentInfos& contents);
+ContentInfo* GetFirstAudioContent(SessionDescription* sdesc);
+ContentInfo* GetFirstVideoContent(SessionDescription* sdesc);
+ContentInfo* GetFirstDataContent(SessionDescription* sdesc);
+AudioContentDescription* GetFirstAudioContentDescription(
+ SessionDescription* sdesc);
+VideoContentDescription* GetFirstVideoContentDescription(
+ SessionDescription* sdesc);
+DataContentDescription* GetFirstDataContentDescription(
+ SessionDescription* sdesc);
void GetSupportedAudioCryptoSuites(std::vector<int>* crypto_suites);
void GetSupportedVideoCryptoSuites(std::vector<int>* crypto_suites);
@@ -548,4 +563,4 @@ void GetDefaultSrtpCryptoSuiteNames(
} // namespace cricket
-#endif // TALK_SESSION_MEDIA_MEDIASESSION_H_
+#endif // WEBRTC_PC_MEDIASESSION_H_
diff --git a/chromium/third_party/webrtc/pc/mediasession_unittest.cc b/chromium/third_party/webrtc/pc/mediasession_unittest.cc
index 6ea7aeb8ab9..b14bd801cbe 100644
--- a/chromium/third_party/webrtc/pc/mediasession_unittest.cc
+++ b/chromium/third_party/webrtc/pc/mediasession_unittest.cc
@@ -75,53 +75,43 @@ using rtc::CS_AES_CM_128_HMAC_SHA1_32;
using rtc::CS_AES_CM_128_HMAC_SHA1_80;
static const AudioCodec kAudioCodecs1[] = {
- AudioCodec(103, "ISAC", 16000, -1, 1, 6),
- AudioCodec(102, "iLBC", 8000, 13300, 1, 5),
- AudioCodec(0, "PCMU", 8000, 64000, 1, 4),
- AudioCodec(8, "PCMA", 8000, 64000, 1, 3),
- AudioCodec(117, "red", 8000, 0, 1, 2),
- AudioCodec(107, "CN", 48000, 0, 1, 1)
-};
+ AudioCodec(103, "ISAC", 16000, -1, 1),
+ AudioCodec(102, "iLBC", 8000, 13300, 1),
+ AudioCodec(0, "PCMU", 8000, 64000, 1),
+ AudioCodec(8, "PCMA", 8000, 64000, 1),
+ AudioCodec(117, "red", 8000, 0, 1),
+ AudioCodec(107, "CN", 48000, 0, 1)};
static const AudioCodec kAudioCodecs2[] = {
- AudioCodec(126, "speex", 16000, 22000, 1, 3),
- AudioCodec(0, "PCMU", 8000, 64000, 1, 2),
- AudioCodec(127, "iLBC", 8000, 13300, 1, 1),
+ AudioCodec(126, "speex", 16000, 22000, 1),
+ AudioCodec(0, "PCMU", 8000, 64000, 1),
+ AudioCodec(127, "iLBC", 8000, 13300, 1),
};
static const AudioCodec kAudioCodecsAnswer[] = {
- AudioCodec(102, "iLBC", 8000, 13300, 1, 5),
- AudioCodec(0, "PCMU", 8000, 64000, 1, 4),
+ AudioCodec(102, "iLBC", 8000, 13300, 1),
+ AudioCodec(0, "PCMU", 8000, 64000, 1),
};
static const VideoCodec kVideoCodecs1[] = {
- VideoCodec(96, "H264-SVC", 320, 200, 30, 2),
- VideoCodec(97, "H264", 320, 200, 30, 1)
-};
+ VideoCodec(96, "H264-SVC", 320, 200, 30),
+ VideoCodec(97, "H264", 320, 200, 30)};
static const VideoCodec kVideoCodecs2[] = {
- VideoCodec(126, "H264", 320, 200, 30, 2),
- VideoCodec(127, "H263", 320, 200, 30, 1)
-};
+ VideoCodec(126, "H264", 320, 200, 30),
+ VideoCodec(127, "H263", 320, 200, 30)};
static const VideoCodec kVideoCodecsAnswer[] = {
- VideoCodec(97, "H264", 320, 200, 30, 1)
-};
+ VideoCodec(97, "H264", 320, 200, 30)};
-static const DataCodec kDataCodecs1[] = {
- DataCodec(98, "binary-data", 2),
- DataCodec(99, "utf8-text", 1)
-};
+static const DataCodec kDataCodecs1[] = {DataCodec(98, "binary-data"),
+ DataCodec(99, "utf8-text")};
-static const DataCodec kDataCodecs2[] = {
- DataCodec(126, "binary-data", 2),
- DataCodec(127, "utf8-text", 1)
-};
+static const DataCodec kDataCodecs2[] = {DataCodec(126, "binary-data"),
+ DataCodec(127, "utf8-text")};
-static const DataCodec kDataCodecsAnswer[] = {
- DataCodec(98, "binary-data", 2),
- DataCodec(99, "utf8-text", 1)
-};
+static const DataCodec kDataCodecsAnswer[] = {DataCodec(98, "binary-data"),
+ DataCodec(99, "utf8-text")};
static const RtpHeaderExtension kAudioRtpExtension1[] = {
RtpHeaderExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8),
@@ -180,6 +170,12 @@ static const char kDataTrack1[] = "data_1";
static const char kDataTrack2[] = "data_2";
static const char kDataTrack3[] = "data_3";
+static const char* kMediaProtocols[] = {"RTP/AVP", "RTP/SAVP", "RTP/AVPF",
+ "RTP/SAVPF"};
+static const char* kMediaProtocolsDtls[] = {
+ "TCP/TLS/RTP/SAVPF", "TCP/TLS/RTP/SAVP", "UDP/TLS/RTP/SAVPF",
+ "UDP/TLS/RTP/SAVP"};
+
static bool IsMediaContentOfType(const ContentInfo* content,
MediaType media_type) {
const MediaContentDescription* mdesc =
@@ -222,9 +218,9 @@ class MediaSessionDescriptionFactoryTest : public testing::Test {
f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2));
f2_.set_data_codecs(MAKE_VECTOR(kDataCodecs2));
tdf1_.set_certificate(rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id1"))));
+ std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id1"))));
tdf2_.set_certificate(rtc::RTCCertificate::Create(
- rtc::scoped_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id2"))));
+ std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id2"))));
}
// Create a video StreamParamsVec object with:
@@ -420,12 +416,12 @@ class MediaSessionDescriptionFactoryTest : public testing::Test {
opts.recv_video = true;
std::unique_ptr<SessionDescription> offer(f1_.CreateOffer(opts, NULL));
ASSERT_TRUE(offer.get() != NULL);
- ContentInfo* ac_offer= offer->GetContentByName("audio");
+ ContentInfo* ac_offer = offer->GetContentByName("audio");
ASSERT_TRUE(ac_offer != NULL);
AudioContentDescription* acd_offer =
static_cast<AudioContentDescription*>(ac_offer->description);
acd_offer->set_direction(direction_in_offer);
- ContentInfo* vc_offer= offer->GetContentByName("video");
+ ContentInfo* vc_offer = offer->GetContentByName("video");
ASSERT_TRUE(vc_offer != NULL);
VideoContentDescription* vcd_offer =
static_cast<VideoContentDescription*>(vc_offer->description);
@@ -899,7 +895,7 @@ TEST_F(MediaSessionDescriptionFactoryTest,
f1_.set_secure(SEC_ENABLED);
f2_.set_secure(SEC_ENABLED);
std::unique_ptr<SessionDescription> offer(f1_.CreateOffer(opts, NULL));
- ContentInfo* dc_offer= offer->GetContentByName("data");
+ ContentInfo* dc_offer = offer->GetContentByName("data");
ASSERT_TRUE(dc_offer != NULL);
DataContentDescription* dcd_offer =
static_cast<DataContentDescription*>(dc_offer->description);
@@ -1316,7 +1312,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) {
ASSERT_EQ(2U, updated_video_streams.size());
EXPECT_EQ(video_streams[0], updated_video_streams[0]);
EXPECT_EQ(kVideoTrack2, updated_video_streams[1].id);
- EXPECT_NE(updated_video_streams[1].cname, updated_video_streams[0].cname);
+ // All the media streams in one PeerConnection share one RTCP CNAME.
+ EXPECT_EQ(updated_video_streams[1].cname, updated_video_streams[0].cname);
const StreamParamsVec& updated_data_streams = updated_dcd->streams();
ASSERT_EQ(2U, updated_data_streams.size());
@@ -1325,6 +1322,10 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) {
ASSERT_EQ(1U, updated_data_streams[1].ssrcs.size());
EXPECT_NE(0U, updated_data_streams[1].ssrcs[0]);
EXPECT_EQ(updated_data_streams[0].cname, updated_data_streams[1].cname);
+ // The stream correctly got the CNAME from the MediaSessionOptions.
+ // The Expected RTCP CNAME is the default one as we are using the default
+ // MediaSessionOptions.
+ EXPECT_EQ(updated_data_streams[0].cname, cricket::kDefaultRtcpCname);
}
// Create an offer with simulcast video stream.
@@ -1435,7 +1436,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) {
EXPECT_TRUE(dcd->rtcp_mux()); // rtcp-mux defaults on
// Update the answer. Add a new video track that is not synched to the
- // other traacks and remove 1 audio track.
+ // other tracks and remove 1 audio track.
opts.AddSendStream(MEDIA_TYPE_VIDEO, kVideoTrack2, kMediaStream2);
opts.RemoveSendStream(MEDIA_TYPE_AUDIO, kAudioTrack2);
opts.RemoveSendStream(MEDIA_TYPE_DATA, kDataTrack2);
@@ -1478,7 +1479,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) {
ASSERT_EQ(2U, updated_video_streams.size());
EXPECT_EQ(video_streams[0], updated_video_streams[0]);
EXPECT_EQ(kVideoTrack2, updated_video_streams[1].id);
- EXPECT_NE(updated_video_streams[1].cname, updated_video_streams[0].cname);
+ // All media streams in one PeerConnection share one CNAME.
+ EXPECT_EQ(updated_video_streams[1].cname, updated_video_streams[0].cname);
const StreamParamsVec& updated_data_streams = updated_dcd->streams();
ASSERT_EQ(1U, updated_data_streams.size());
@@ -1570,11 +1572,9 @@ TEST_F(MediaSessionDescriptionFactoryTest,
EXPECT_EQ(expected_codecs, vcd->codecs());
- // Now, make sure we get same result, except for the preference order,
- // if |f2_| creates an updated offer even though the default payload types
- // are different from |f1_|.
- expected_codecs[0].preference = f1_codecs[1].preference;
-
+ // Now, make sure we get same result (except for the order) if |f2_| creates
+ // an updated offer even though the default payload types between |f1_| and
+ // |f2_| are different.
std::unique_ptr<SessionDescription> updated_offer(
f2_.CreateOffer(opts, answer.get()));
ASSERT_TRUE(updated_offer);
@@ -1694,7 +1694,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtxWithoutApt) {
opts.recv_audio = false;
std::vector<VideoCodec> f1_codecs = MAKE_VECTOR(kVideoCodecs1);
// This creates RTX without associated payload type parameter.
- AddRtxCodec(VideoCodec(126, cricket::kRtxCodecName, 0, 0, 0, 0), &f1_codecs);
+ AddRtxCodec(VideoCodec(126, cricket::kRtxCodecName, 0, 0, 0), &f1_codecs);
f1_.set_video_codecs(f1_codecs);
std::vector<VideoCodec> f2_codecs = MAKE_VECTOR(kVideoCodecs2);
@@ -1842,7 +1842,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, SimSsrcsGenerateMultipleRtxSsrcs) {
// Use a single real codec, and then add RTX for it.
std::vector<VideoCodec> f1_codecs;
- f1_codecs.push_back(VideoCodec(97, "H264", 320, 200, 30, 1));
+ f1_codecs.push_back(VideoCodec(97, "H264", 320, 200, 30));
AddRtxCodec(VideoCodec::CreateRtxCodec(125, 97), &f1_codecs);
f1_.set_video_codecs(f1_codecs);
@@ -2391,3 +2391,62 @@ TEST_F(MediaSessionDescriptionFactoryTest,
EXPECT_EQ("video_modified", video_content->name);
EXPECT_EQ("data_modified", data_content->name);
}
+
+class MediaProtocolTest : public ::testing::TestWithParam<const char*> {
+ public:
+ MediaProtocolTest() : f1_(&tdf1_), f2_(&tdf2_) {
+ f1_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs1));
+ f1_.set_video_codecs(MAKE_VECTOR(kVideoCodecs1));
+ f1_.set_data_codecs(MAKE_VECTOR(kDataCodecs1));
+ f2_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs2));
+ f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2));
+ f2_.set_data_codecs(MAKE_VECTOR(kDataCodecs2));
+ f1_.set_secure(SEC_ENABLED);
+ f2_.set_secure(SEC_ENABLED);
+ tdf1_.set_certificate(rtc::RTCCertificate::Create(
+ std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id1"))));
+ tdf2_.set_certificate(rtc::RTCCertificate::Create(
+ std::unique_ptr<rtc::SSLIdentity>(new rtc::FakeSSLIdentity("id2"))));
+ tdf1_.set_secure(SEC_ENABLED);
+ tdf2_.set_secure(SEC_ENABLED);
+ }
+
+ protected:
+ MediaSessionDescriptionFactory f1_;
+ MediaSessionDescriptionFactory f2_;
+ TransportDescriptionFactory tdf1_;
+ TransportDescriptionFactory tdf2_;
+};
+
+TEST_P(MediaProtocolTest, TestAudioVideoAcceptance) {
+ MediaSessionOptions opts;
+ opts.recv_video = true;
+ std::unique_ptr<SessionDescription> offer(f1_.CreateOffer(opts, nullptr));
+ ASSERT_TRUE(offer.get() != nullptr);
+ // Set the protocol for all the contents.
+ for (auto content : offer.get()->contents()) {
+ static_cast<MediaContentDescription*>(content.description)
+ ->set_protocol(GetParam());
+ }
+ std::unique_ptr<SessionDescription> answer(
+ f2_.CreateAnswer(offer.get(), opts, nullptr));
+ const ContentInfo* ac = answer->GetContentByName("audio");
+ const ContentInfo* vc = answer->GetContentByName("video");
+ ASSERT_TRUE(ac != nullptr);
+ ASSERT_TRUE(vc != nullptr);
+ EXPECT_FALSE(ac->rejected); // the offer is accepted
+ EXPECT_FALSE(vc->rejected);
+ const AudioContentDescription* acd =
+ static_cast<const AudioContentDescription*>(ac->description);
+ const VideoContentDescription* vcd =
+ static_cast<const VideoContentDescription*>(vc->description);
+ EXPECT_EQ(GetParam(), acd->protocol());
+ EXPECT_EQ(GetParam(), vcd->protocol());
+}
+
+INSTANTIATE_TEST_CASE_P(MediaProtocolPatternTest,
+ MediaProtocolTest,
+ ::testing::ValuesIn(kMediaProtocols));
+INSTANTIATE_TEST_CASE_P(MediaProtocolDtlsPatternTest,
+ MediaProtocolTest,
+ ::testing::ValuesIn(kMediaProtocolsDtls));
diff --git a/chromium/third_party/webrtc/pc/mediasink.h b/chromium/third_party/webrtc/pc/mediasink.h
index e97a390aedd..01bd3fa1739 100644
--- a/chromium/third_party/webrtc/pc/mediasink.h
+++ b/chromium/third_party/webrtc/pc/mediasink.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_SESSION_MEDIA_MEDIASINK_H_
-#define TALK_SESSION_MEDIA_MEDIASINK_H_
+#ifndef WEBRTC_PC_MEDIASINK_H_
+#define WEBRTC_PC_MEDIASINK_H_
namespace cricket {
@@ -28,4 +28,4 @@ class MediaSinkInterface {
} // namespace cricket
-#endif // TALK_SESSION_MEDIA_MEDIASINK_H_
+#endif // WEBRTC_PC_MEDIASINK_H_
diff --git a/chromium/third_party/webrtc/pc/pc.gyp b/chromium/third_party/webrtc/pc/pc.gyp
index b07f7b72638..25ebc5de78c 100755
--- a/chromium/third_party/webrtc/pc/pc.gyp
+++ b/chromium/third_party/webrtc/pc/pc.gyp
@@ -33,11 +33,6 @@
'defines': [
'<@(rtc_pc_defines)',
],
- # TODO(kjellander): Make the code compile without disabling these flags.
- # See https://bugs.chromium.org/p/webrtc/issues/detail?id=3307
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'include_dirs': [
'<(DEPTH)/testing/gtest/include',
],
@@ -97,11 +92,6 @@
'rtcpmuxfilter_unittest.cc',
'srtpfilter_unittest.cc',
],
- # TODO(kjellander): Make the code compile without disabling these flags.
- # See https://bugs.chromium.org/p/webrtc/issues/detail?id=3307
- 'cflags_cc!': [
- '-Wnon-virtual-dtor',
- ],
'conditions': [
['clang==0', {
'cflags': [
diff --git a/chromium/third_party/webrtc/pc/rtcpmuxfilter.h b/chromium/third_party/webrtc/pc/rtcpmuxfilter.h
index 272c804bec1..94dc41d980e 100644
--- a/chromium/third_party/webrtc/pc/rtcpmuxfilter.h
+++ b/chromium/third_party/webrtc/pc/rtcpmuxfilter.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_SESSION_MEDIA_RTCPMUXFILTER_H_
-#define TALK_SESSION_MEDIA_RTCPMUXFILTER_H_
+#ifndef WEBRTC_PC_RTCPMUXFILTER_H_
+#define WEBRTC_PC_RTCPMUXFILTER_H_
#include "webrtc/base/basictypes.h"
#include "webrtc/p2p/base/sessiondescription.h"
@@ -69,4 +69,4 @@ class RtcpMuxFilter {
} // namespace cricket
-#endif // TALK_SESSION_MEDIA_RTCPMUXFILTER_H_
+#endif // WEBRTC_PC_RTCPMUXFILTER_H_
diff --git a/chromium/third_party/webrtc/pc/srtpfilter.cc b/chromium/third_party/webrtc/pc/srtpfilter.cc
index e8ea2890eda..e4796fd6720 100644
--- a/chromium/third_party/webrtc/pc/srtpfilter.cc
+++ b/chromium/third_party/webrtc/pc/srtpfilter.cc
@@ -259,7 +259,7 @@ bool SrtpFilter::GetRtpAuthParams(uint8_t** key, int* key_len, int* tag_len) {
return send_session_->GetRtpAuthParams(key, key_len, tag_len);
}
-void SrtpFilter::set_signal_silent_time(uint32_t signal_silent_time_in_ms) {
+void SrtpFilter::set_signal_silent_time(int signal_silent_time_in_ms) {
signal_silent_time_in_ms_ = signal_silent_time_in_ms;
if (IsActive()) {
ASSERT(send_session_ != NULL);
@@ -641,7 +641,7 @@ bool SrtpSession::GetSendStreamPacketIndex(void* p,
return true;
}
-void SrtpSession::set_signal_silent_time(uint32_t signal_silent_time_in_ms) {
+void SrtpSession::set_signal_silent_time(int signal_silent_time_in_ms) {
srtp_stat_->set_signal_silent_time(signal_silent_time_in_ms);
}
@@ -891,10 +891,10 @@ void SrtpStat::HandleSrtpResult(const SrtpStat::FailureKey& key) {
if (key.error != SrtpFilter::ERROR_NONE) {
// For errors, signal first time and wait for 1 sec.
FailureStat* stat = &(failures_[key]);
- uint32_t current_time = rtc::Time();
+ int64_t current_time = rtc::TimeMillis();
if (stat->last_signal_time == 0 ||
rtc::TimeDiff(current_time, stat->last_signal_time) >
- static_cast<int>(signal_silent_time_)) {
+ signal_silent_time_) {
SignalSrtpError(key.ssrc, key.mode, key.error);
stat->last_signal_time = current_time;
}
diff --git a/chromium/third_party/webrtc/pc/srtpfilter.h b/chromium/third_party/webrtc/pc/srtpfilter.h
index f4c1d33dbfa..b54eb8bc866 100644
--- a/chromium/third_party/webrtc/pc/srtpfilter.h
+++ b/chromium/third_party/webrtc/pc/srtpfilter.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef TALK_SESSION_MEDIA_SRTPFILTER_H_
-#define TALK_SESSION_MEDIA_SRTPFILTER_H_
+#ifndef WEBRTC_PC_SRTPFILTER_H_
+#define WEBRTC_PC_SRTPFILTER_H_
#include <list>
#include <map>
@@ -18,6 +18,7 @@
#include <vector>
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/sigslotrepeater.h"
#include "webrtc/base/sslstreamadapter.h"
@@ -119,7 +120,7 @@ class SrtpFilter {
bool GetRtpAuthParams(uint8_t** key, int* key_len, int* tag_len);
// Update the silent threshold (in ms) for signaling errors.
- void set_signal_silent_time(uint32_t signal_silent_time_in_ms);
+ void set_signal_silent_time(int signal_silent_time_in_ms);
bool ResetParams();
@@ -165,7 +166,7 @@ class SrtpFilter {
ST_RECEIVEDPRANSWER
};
State state_;
- uint32_t signal_silent_time_in_ms_;
+ int signal_silent_time_in_ms_;
std::vector<CryptoParams> offer_params_;
std::unique_ptr<SrtpSession> send_session_;
std::unique_ptr<SrtpSession> recv_session_;
@@ -207,7 +208,7 @@ class SrtpSession {
bool GetRtpAuthParams(uint8_t** key, int* key_len, int* tag_len);
// Update the silent threshold (in ms) for signaling errors.
- void set_signal_silent_time(uint32_t signal_silent_time_in_ms);
+ void set_signal_silent_time(int signal_silent_time_in_ms);
// Calls srtp_shutdown if it's initialized.
static void Terminate();
@@ -251,9 +252,9 @@ class SrtpStat {
void AddUnprotectRtcpResult(int result);
// Get silent time (in ms) for SRTP statistics handler.
- uint32_t signal_silent_time() const { return signal_silent_time_; }
+ int signal_silent_time() const { return signal_silent_time_; }
// Set silent time (in ms) for SRTP statistics handler.
- void set_signal_silent_time(uint32_t signal_silent_time) {
+ void set_signal_silent_time(int signal_silent_time) {
signal_silent_time_ = signal_silent_time;
}
@@ -295,7 +296,7 @@ class SrtpStat {
void Reset() {
last_signal_time = 0;
}
- uint32_t last_signal_time;
+ int64_t last_signal_time;
};
// Inspect SRTP result and signal error if needed.
@@ -303,11 +304,11 @@ class SrtpStat {
std::map<FailureKey, FailureStat> failures_;
// Threshold in ms to silent the signaling errors.
- uint32_t signal_silent_time_;
+ int signal_silent_time_;
RTC_DISALLOW_COPY_AND_ASSIGN(SrtpStat);
};
} // namespace cricket
-#endif // TALK_SESSION_MEDIA_SRTPFILTER_H_
+#endif // WEBRTC_PC_SRTPFILTER_H_
diff --git a/chromium/third_party/webrtc/pc/srtpfilter_unittest.cc b/chromium/third_party/webrtc/pc/srtpfilter_unittest.cc
index 2eee35c2023..cc5b3e5fb3c 100644
--- a/chromium/third_party/webrtc/pc/srtpfilter_unittest.cc
+++ b/chromium/third_party/webrtc/pc/srtpfilter_unittest.cc
@@ -9,6 +9,7 @@
*/
#include "webrtc/base/byteorder.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/gunit.h"
#include "webrtc/base/thread.h"
#include "webrtc/media/base/cryptoparams.h"
diff --git a/chromium/third_party/webrtc/pc/voicechannel.h b/chromium/third_party/webrtc/pc/voicechannel.h
index 9b6f16eaba3..78524ab555c 100644
--- a/chromium/third_party/webrtc/pc/voicechannel.h
+++ b/chromium/third_party/webrtc/pc/voicechannel.h
@@ -8,9 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef _VOICECHANNEL_H_
-#define _VOICECHANNEL_H_
+#ifndef WEBRTC_PC_VOICECHANNEL_H_
+#define WEBRTC_PC_VOICECHANNEL_H_
#include "webrtc/pc/channel.h"
-#endif // _VOICECHANNEL_H_
+#endif // WEBRTC_PC_VOICECHANNEL_H_
diff --git a/chromium/third_party/webrtc/pc/yuvscaler_unittest.cc b/chromium/third_party/webrtc/pc/yuvscaler_unittest.cc
index 9e0f8ed2c0c..a33d49523d7 100644
--- a/chromium/third_party/webrtc/pc/yuvscaler_unittest.cc
+++ b/chromium/third_party/webrtc/pc/yuvscaler_unittest.cc
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
#include <sstream>
#include "libyuv/cpu_id.h"
diff --git a/chromium/third_party/webrtc/rtc_unittests_apk.isolate b/chromium/third_party/webrtc/rtc_unittests_apk.isolate
new file mode 100644
index 00000000000..14fd20a81d2
--- /dev/null
+++ b/chromium/third_party/webrtc/rtc_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../build/android/android.isolate',
+ 'rtc_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_rtc_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../build/config/',
+ '../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/rtc_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_rtc_unittests',
+ 'rtc_unittests.isolate',
+ ]
+ },
+}
diff --git a/chromium/third_party/webrtc/sdk/BUILD.gn b/chromium/third_party/webrtc/sdk/BUILD.gn
new file mode 100644
index 00000000000..91a4c8828ed
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/BUILD.gn
@@ -0,0 +1,155 @@
+# Copyright 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../build/webrtc.gni")
+import("//build_overrides/webrtc.gni")
+
+if (is_ios) {
+ source_set("rtc_sdk_common_objc") {
+ deps = [
+ "../base:rtc_base",
+ ]
+ cflags = [ "-fobjc-arc" ]
+ configs += [ "..:common_config" ]
+ public_configs = [ "..:common_inherited_config" ]
+ include_dirs = [
+ "objc/Framework/Classes",
+ "objc/Framework/Headers",
+ ]
+ sources = [
+ "objc/Framework/Classes/NSString+StdString.h",
+ "objc/Framework/Classes/NSString+StdString.mm",
+ "objc/Framework/Classes/RTCCameraPreviewView.m",
+ "objc/Framework/Classes/RTCDispatcher.m",
+ "objc/Framework/Classes/RTCFieldTrials.mm",
+ "objc/Framework/Classes/RTCLogging.mm",
+ "objc/Framework/Classes/RTCSSLAdapter.mm",
+ "objc/Framework/Classes/RTCTracing.mm",
+ "objc/Framework/Classes/RTCUIApplication.h",
+ "objc/Framework/Classes/RTCUIApplication.mm",
+ "objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h",
+ "objc/Framework/Headers/WebRTC/RTCDispatcher.h",
+ "objc/Framework/Headers/WebRTC/RTCFieldTrials.h",
+ "objc/Framework/Headers/WebRTC/RTCLogging.h",
+ "objc/Framework/Headers/WebRTC/RTCMacros.h",
+ "objc/Framework/Headers/WebRTC/RTCSSLAdapter.h",
+ "objc/Framework/Headers/WebRTC/RTCTracing.h",
+ ]
+ if (!build_with_chromium) {
+ sources += [
+ "objc/Framework/Classes/RTCFileLogger.mm",
+ "objc/Framework/Headers/WebRTC/RTCFileLogger.h",
+ ]
+ }
+ libs = [ "AVFoundation.framework" ]
+ }
+
+ source_set("rtc_sdk_peerconnection_objc") {
+ deps = [
+ ":rtc_sdk_common_objc",
+ #"../../talk/libjingle:libjingle_peerconnection",
+ ]
+ cflags = [
+ "-fobjc-arc",
+ "-Wobjc-missing-property-synthesis",
+ ]
+ include_dirs = [
+ "objc/Framework/Classes",
+ "objc/Framework/Headers",
+ ]
+ sources = [
+ # Add these when there's a BUILD.gn for peer connection APIs
+ #"objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h",
+ #"objc/Framework/Classes/RTCAVFoundationVideoSource.mm",
+ #"objc/Framework/Classes/RTCAudioTrack+Private.h",
+ #"objc/Framework/Classes/RTCAudioTrack.mm",
+ #"objc/Framework/Classes/RTCConfiguration+Private.h",
+ #"objc/Framework/Classes/RTCConfiguration.mm",
+ #"objc/Framework/Classes/RTCDataChannel+Private.h",
+ #"objc/Framework/Classes/RTCDataChannel.mm",
+ #"objc/Framework/Classes/RTCDataChannelConfiguration+Private.h",
+ #"objc/Framework/Classes/RTCDataChannelConfiguration.mm",
+ #"objc/Framework/Classes/RTCIceCandidate+Private.h",
+ #"objc/Framework/Classes/RTCIceCandidate.mm",
+ #"objc/Framework/Classes/RTCIceServer+Private.h",
+ #"objc/Framework/Classes/RTCIceServer.mm",
+ #"objc/Framework/Classes/RTCMediaConstraints+Private.h",
+ #"objc/Framework/Classes/RTCMediaConstraints.mm",
+ #"objc/Framework/Classes/RTCMediaStream+Private.h",
+ #"objc/Framework/Classes/RTCMediaStream.mm",
+ #"objc/Framework/Classes/RTCMediaStreamTrack+Private.h",
+ #"objc/Framework/Classes/RTCMediaStreamTrack.mm",
+ #"objc/Framework/Classes/RTCOpenGLVideoRenderer.mm",
+ #"objc/Framework/Classes/RTCPeerConnection+Private.h",
+ #"objc/Framework/Classes/RTCPeerConnection.mm",
+ #"objc/Framework/Classes/RTCPeerConnectionFactory+Private.h",
+ #"objc/Framework/Classes/RTCPeerConnectionFactory.mm",
+ #"objc/Framework/Classes/RTCRtpEncodingParameters+Private.h",
+ #"objc/Framework/Classes/RTCRtpEncodingParameters.mm",
+ #"objc/Framework/Classes/RTCRtpParameters+Private.h",
+ #"objc/Framework/Classes/RTCRtpParameters.mm",
+ #"objc/Framework/Classes/RTCRtpReceiver+Private.h",
+ #"objc/Framework/Classes/RTCRtpReceiver.mm",
+ #"objc/Framework/Classes/RTCRtpSender+Private.h",
+ #"objc/Framework/Classes/RTCRtpSender.mm",
+ #"objc/Framework/Classes/RTCSessionDescription+Private.h",
+ #"objc/Framework/Classes/RTCSessionDescription.mm",
+ #"objc/Framework/Classes/RTCStatsReport+Private.h",
+ #"objc/Framework/Classes/RTCStatsReport.mm",
+ #"objc/Framework/Classes/RTCVideoFrame+Private.h",
+ #"objc/Framework/Classes/RTCVideoFrame.mm",
+ #"objc/Framework/Classes/RTCVideoRendererAdapter+Private.h",
+ #"objc/Framework/Classes/RTCVideoRendererAdapter.h",
+ #"objc/Framework/Classes/RTCVideoRendererAdapter.mm",
+ #"objc/Framework/Classes/RTCVideoSource+Private.h",
+ #"objc/Framework/Classes/RTCVideoSource.mm",
+ #"objc/Framework/Classes/RTCVideoTrack+Private.h",
+ #"objc/Framework/Classes/RTCVideoTrack.mm",
+ #"objc/Framework/Classes/avfoundationvideocapturer.h",
+ #"objc/Framework/Classes/avfoundationvideocapturer.mm",
+ #"objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
+ #"objc/Framework/Headers/WebRTC/RTCAudioTrack.h",
+ #"objc/Framework/Headers/WebRTC/RTCConfiguration.h",
+ #"objc/Framework/Headers/WebRTC/RTCDataChannel.h",
+ #"objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h",
+ #"objc/Framework/Headers/WebRTC/RTCIceCandidate.h",
+ #"objc/Framework/Headers/WebRTC/RTCIceServer.h",
+ #"objc/Framework/Headers/WebRTC/RTCMediaConstraints.h",
+ #"objc/Framework/Headers/WebRTC/RTCMediaStream.h",
+ #"objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h",
+ #"objc/Framework/Headers/WebRTC/RTCOpenGLVideoRenderer.h",
+ #"objc/Framework/Headers/WebRTC/RTCPeerConnection.h",
+ #"objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h",
+ #"objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h",
+ #"objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h",
+ #"objc/Framework/Headers/WebRTC/RTCRtpParameters.h",
+ #"objc/Framework/Headers/WebRTC/RTCRtpReceiver.h",
+ #"objc/Framework/Headers/WebRTC/RTCRtpSender.h",
+ #"objc/Framework/Headers/WebRTC/RTCSessionDescription.h",
+ #"objc/Framework/Headers/WebRTC/RTCStatsReport.h",
+ #"objc/Framework/Headers/WebRTC/RTCVideoFrame.h",
+ #"objc/Framework/Headers/WebRTC/RTCVideoRenderer.h",
+ #"objc/Framework/Headers/WebRTC/RTCVideoSource.h",
+ #"objc/Framework/Headers/WebRTC/RTCVideoTrack.h",
+ ]
+
+ if (is_ios) {
+ sources += [
+ "objc/Framework/Classes/RTCEAGLVideoView.m",
+ "objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h",
+ ]
+ }
+
+ if (is_mac) {
+ sources += [
+ "objc/Framework/Classes/RTCNSGLVideoView.m",
+ "objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h",
+ ]
+ }
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/video_render/DEPS b/chromium/third_party/webrtc/sdk/DEPS
index 58ae9fe714f..1bb7a2fe8c8 100644
--- a/chromium/third_party/webrtc/modules/video_render/DEPS
+++ b/chromium/third_party/webrtc/sdk/DEPS
@@ -1,5 +1,6 @@
include_rules = [
- "+webrtc/base",
- "+webrtc/common_video",
+ "+WebRTC",
+ "+webrtc/api",
+ "+webrtc/media",
"+webrtc/system_wrappers",
]
diff --git a/chromium/third_party/webrtc/api/objc/OWNERS b/chromium/third_party/webrtc/sdk/OWNERS
index cd06158b7fc..cd06158b7fc 100644
--- a/chromium/third_party/webrtc/api/objc/OWNERS
+++ b/chromium/third_party/webrtc/sdk/OWNERS
diff --git a/chromium/third_party/webrtc/base/objc/NSString+StdString.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/NSString+StdString.h
index 8bf6cc94be0..8bf6cc94be0 100644
--- a/chromium/third_party/webrtc/base/objc/NSString+StdString.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/NSString+StdString.h
diff --git a/chromium/third_party/webrtc/base/objc/NSString+StdString.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/NSString+StdString.mm
index 3210ff0b65d..3210ff0b65d 100644
--- a/chromium/third_party/webrtc/base/objc/NSString+StdString.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/NSString+StdString.mm
diff --git a/chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h
index 067e5067c05..7a4de08240b 100644
--- a/chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h
@@ -8,9 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCAVFoundationVideoSource.h"
+#import "WebRTC/RTCAVFoundationVideoSource.h"
-#include "webrtc/api/objc/avfoundationvideocapturer.h"
+#include "avfoundationvideocapturer.h"
NS_ASSUME_NONNULL_BEGIN
diff --git a/chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource.mm
index 1005c7dfa12..528e8cb6244 100644
--- a/chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAVFoundationVideoSource.mm
@@ -10,20 +10,25 @@
#import "RTCAVFoundationVideoSource+Private.h"
-#import "webrtc/api/objc/RTCMediaConstraints+Private.h"
-#import "webrtc/api/objc/RTCPeerConnectionFactory+Private.h"
-#import "webrtc/api/objc/RTCVideoSource+Private.h"
+#import "RTCMediaConstraints+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoSource+Private.h"
-@implementation RTCAVFoundationVideoSource
+@implementation RTCAVFoundationVideoSource {
+ webrtc::AVFoundationVideoCapturer *_capturer;
+}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
constraints:(RTCMediaConstraints *)constraints {
NSParameterAssert(factory);
- rtc::scoped_ptr<webrtc::AVFoundationVideoCapturer> capturer;
- capturer.reset(new webrtc::AVFoundationVideoCapturer());
+ // We pass ownership of the capturer to the source, but since we own
+ // the source, it should be ok to keep a raw pointer to the
+ // capturer.
+ _capturer = new webrtc::AVFoundationVideoCapturer();
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
factory.nativeFactory->CreateVideoSource(
- capturer.release(), constraints.nativeConstraints.get());
+ _capturer, constraints.nativeConstraints.get());
+
return [super initWithNativeVideoSource:source];
}
@@ -44,12 +49,7 @@
}
- (webrtc::AVFoundationVideoCapturer *)capturer {
- cricket::VideoCapturer *capturer = self.nativeVideoSource->GetVideoCapturer();
- // This should be safe because no one should have changed the underlying video
- // source.
- webrtc::AVFoundationVideoCapturer *foundationCapturer =
- static_cast<webrtc::AVFoundationVideoCapturer *>(capturer);
- return foundationCapturer;
+ return _capturer;
}
@end
diff --git a/chromium/third_party/webrtc/api/objc/RTCAudioTrack+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack+Private.h
index ea86b1feaa7..cb5f1865aee 100644
--- a/chromium/third_party/webrtc/api/objc/RTCAudioTrack+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCAudioTrack.h"
+#import "WebRTC/RTCAudioTrack.h"
#include "webrtc/api/mediastreaminterface.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCAudioTrack.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack.mm
index 158d1b362b8..42542b8b8f0 100644
--- a/chromium/third_party/webrtc/api/objc/RTCAudioTrack.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCAudioTrack.mm
@@ -8,12 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCAudioTrack.h"
+#import "RTCAudioTrack+Private.h"
-#import "webrtc/api/objc/RTCAudioTrack+Private.h"
-#import "webrtc/api/objc/RTCMediaStreamTrack+Private.h"
-#import "webrtc/api/objc/RTCPeerConnectionFactory+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
@implementation RTCAudioTrack
diff --git a/chromium/third_party/webrtc/base/objc/RTCCameraPreviewView.m b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCCameraPreviewView.m
index 5a57483676f..659973ff180 100644
--- a/chromium/third_party/webrtc/base/objc/RTCCameraPreviewView.m
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCCameraPreviewView.m
@@ -8,15 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#if !defined(__has_feature) || !__has_feature(objc_arc)
-#error "This file requires ARC support."
-#endif
-
-#import "webrtc/base/objc/RTCCameraPreviewView.h"
+#import "WebRTC/RTCCameraPreviewView.h"
#import <AVFoundation/AVFoundation.h>
-#import "webrtc/base/objc/RTCDispatcher.h"
+#import "RTCDispatcher+Private.h"
@implementation RTCCameraPreviewView
diff --git a/chromium/third_party/webrtc/api/objc/RTCConfiguration+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCConfiguration+Private.h
index 001dac60166..7f90b35246b 100644
--- a/chromium/third_party/webrtc/api/objc/RTCConfiguration+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCConfiguration+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCConfiguration.h"
+#import "WebRTC/RTCConfiguration.h"
#include "webrtc/api/peerconnectioninterface.h"
@@ -16,13 +16,6 @@ NS_ASSUME_NONNULL_BEGIN
@interface RTCConfiguration ()
-/**
- * RTCConfiguration struct representation of this RTCConfiguration. This is
- * needed to pass to the underlying C++ APIs.
- */
-@property(nonatomic, readonly)
- webrtc::PeerConnectionInterface::RTCConfiguration nativeConfiguration;
-
+ (webrtc::PeerConnectionInterface::IceTransportsType)
nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy;
@@ -55,6 +48,13 @@ NS_ASSUME_NONNULL_BEGIN
+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy;
+/**
+ * RTCConfiguration struct representation of this RTCConfiguration. This is
+ * needed to pass to the underlying C++ APIs.
+ */
+- (webrtc::PeerConnectionInterface::RTCConfiguration *)
+ createNativeConfiguration;
+
@end
NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/api/objc/RTCConfiguration.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCConfiguration.mm
index 2d44d015675..0a63f69fd98 100644
--- a/chromium/third_party/webrtc/api/objc/RTCConfiguration.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCConfiguration.mm
@@ -8,13 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCConfiguration.h"
+#import "RTCConfiguration+Private.h"
-#include "webrtc/base/sslidentity.h"
+#include <memory>
+
+#import "RTCIceServer+Private.h"
+#import "WebRTC/RTCLogging.h"
-#import "webrtc/api/objc/RTCConfiguration+Private.h"
-#import "webrtc/api/objc/RTCIceServer+Private.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#include "webrtc/base/rtccertificategenerator.h"
+#include "webrtc/base/sslidentity.h"
@implementation RTCConfiguration
@@ -23,6 +25,7 @@
@synthesize bundlePolicy = _bundlePolicy;
@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
+@synthesize continualGatheringPolicy = _continualGatheringPolicy;
@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
@synthesize iceBackupCandidatePairPingInterval =
@@ -42,6 +45,10 @@
[[self class] rtcpMuxPolicyForNativePolicy:config.rtcp_mux_policy];
_tcpCandidatePolicy = [[self class] tcpCandidatePolicyForNativePolicy:
config.tcp_candidate_policy];
+ webrtc::PeerConnectionInterface::ContinualGatheringPolicy nativePolicy =
+ config.continual_gathering_policy;
+ _continualGatheringPolicy =
+ [[self class] continualGatheringPolicyForNativePolicy:nativePolicy];
_audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
_iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
_iceBackupCandidatePairPingInterval =
@@ -53,12 +60,14 @@
- (NSString *)description {
return [NSString stringWithFormat:
- @"RTCConfiguration: {\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n}\n",
+ @"RTCConfiguration: {\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n}\n",
_iceServers,
[[self class] stringForTransportPolicy:_iceTransportPolicy],
[[self class] stringForBundlePolicy:_bundlePolicy],
[[self class] stringForRtcpMuxPolicy:_rtcpMuxPolicy],
[[self class] stringForTcpCandidatePolicy:_tcpCandidatePolicy],
+ [[self class]
+ stringForContinualGatheringPolicy:_continualGatheringPolicy],
_audioJitterBufferMaxPackets,
_iceConnectionReceivingTimeout,
_iceBackupCandidatePairPingInterval];
@@ -66,37 +75,44 @@
#pragma mark - Private
-- (webrtc::PeerConnectionInterface::RTCConfiguration)nativeConfiguration {
- webrtc::PeerConnectionInterface::RTCConfiguration nativeConfig;
+- (webrtc::PeerConnectionInterface::RTCConfiguration *)
+ createNativeConfiguration {
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
+ nativeConfig(new webrtc::PeerConnectionInterface::RTCConfiguration());
for (RTCIceServer *iceServer in _iceServers) {
- nativeConfig.servers.push_back(iceServer.nativeServer);
+ nativeConfig->servers.push_back(iceServer.nativeServer);
}
- nativeConfig.type =
+ nativeConfig->type =
[[self class] nativeTransportsTypeForTransportPolicy:_iceTransportPolicy];
- nativeConfig.bundle_policy =
+ nativeConfig->bundle_policy =
[[self class] nativeBundlePolicyForPolicy:_bundlePolicy];
- nativeConfig.rtcp_mux_policy =
+ nativeConfig->rtcp_mux_policy =
[[self class] nativeRtcpMuxPolicyForPolicy:_rtcpMuxPolicy];
- nativeConfig.tcp_candidate_policy =
+ nativeConfig->tcp_candidate_policy =
[[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy];
- nativeConfig.audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
- nativeConfig.ice_connection_receiving_timeout =
+ nativeConfig->continual_gathering_policy = [[self class]
+ nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy];
+ nativeConfig->audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
+ nativeConfig->ice_connection_receiving_timeout =
_iceConnectionReceivingTimeout;
- nativeConfig.ice_backup_candidate_pair_ping_interval =
+ nativeConfig->ice_backup_candidate_pair_ping_interval =
_iceBackupCandidatePairPingInterval;
- if (_keyType == RTCEncryptionKeyTypeECDSA) {
- rtc::scoped_ptr<rtc::SSLIdentity> identity(
- rtc::SSLIdentity::Generate(webrtc::kIdentityName, rtc::KT_ECDSA));
- if (identity) {
- nativeConfig.certificates.push_back(
- rtc::RTCCertificate::Create(std::move(identity)));
- } else {
- RTCLogWarning(@"Failed to generate ECDSA identity. RSA will be used.");
+ rtc::KeyType keyType =
+ [[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
+ // Generate non-default certificate.
+ if (keyType != rtc::KT_DEFAULT) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(
+ rtc::KeyParams(keyType), rtc::Optional<uint64_t>());
+ if (!certificate) {
+ RTCLogError(@"Failed to generate certificate.");
+ return nullptr;
}
+ nativeConfig->certificates.push_back(certificate);
}
- return nativeConfig;
+ return nativeConfig.release();
}
+ (webrtc::PeerConnectionInterface::IceTransportsType)
@@ -214,6 +230,16 @@
}
}
++ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:
+ (RTCEncryptionKeyType)keyType {
+ switch (keyType) {
+ case RTCEncryptionKeyTypeRSA:
+ return rtc::KT_RSA;
+ case RTCEncryptionKeyTypeECDSA:
+ return rtc::KT_ECDSA;
+ }
+}
+
+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy {
switch (nativePolicy) {
@@ -233,4 +259,35 @@
}
}
++ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)
+ nativeContinualGatheringPolicyForPolicy:
+ (RTCContinualGatheringPolicy)policy {
+ switch (policy) {
+ case RTCContinualGatheringPolicyGatherOnce:
+ return webrtc::PeerConnectionInterface::GATHER_ONCE;
+ case RTCContinualGatheringPolicyGatherContinually:
+ return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY;
+ }
+}
+
++ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy:
+ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy {
+ switch (nativePolicy) {
+ case webrtc::PeerConnectionInterface::GATHER_ONCE:
+ return RTCContinualGatheringPolicyGatherOnce;
+ case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY:
+ return RTCContinualGatheringPolicyGatherContinually;
+ }
+}
+
++ (NSString *)stringForContinualGatheringPolicy:
+ (RTCContinualGatheringPolicy)policy {
+ switch (policy) {
+ case RTCContinualGatheringPolicyGatherOnce:
+ return @"GATHER_ONCE";
+ case RTCContinualGatheringPolicyGatherContinually:
+ return @"GATHER_CONTINUALLY";
+ }
+}
+
@end
diff --git a/chromium/third_party/webrtc/api/objc/RTCDataChannel+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannel+Private.h
index 179192c83f4..82e132fa2e0 100644
--- a/chromium/third_party/webrtc/api/objc/RTCDataChannel+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannel+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCDataChannel.h"
+#import "WebRTC/RTCDataChannel.h"
#include "webrtc/api/datachannelinterface.h"
#include "webrtc/base/scoped_ref_ptr.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCDataChannel.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannel.mm
index a2c2e182ed1..706e43e2ed5 100644
--- a/chromium/third_party/webrtc/api/objc/RTCDataChannel.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannel.mm
@@ -8,12 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCDataChannel.h"
+#import "RTCDataChannel+Private.h"
-#import "webrtc/api/objc/RTCDataChannel+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
namespace webrtc {
@@ -47,7 +46,7 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
@implementation RTCDataBuffer {
- rtc::scoped_ptr<webrtc::DataBuffer> _dataBuffer;
+ std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
}
- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
@@ -87,7 +86,7 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
@implementation RTCDataChannel {
rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
- rtc::scoped_ptr<webrtc::DataChannelDelegateAdapter> _observer;
+ std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
BOOL _isObserverRegistered;
}
@@ -96,7 +95,7 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
- (void)dealloc {
// Handles unregistering the observer properly. We need to do this because
// there may still be other references to the underlying data channel.
- self.delegate = nil;
+ _nativeDataChannel->UnregisterObserver();
}
- (NSString *)label {
@@ -148,21 +147,6 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
return _nativeDataChannel->buffered_amount();
}
-- (void)setDelegate:(id<RTCDataChannelDelegate>)delegate {
- if (_delegate == delegate) {
- return;
- }
- if (_isObserverRegistered) {
- _nativeDataChannel->UnregisterObserver();
- _isObserverRegistered = NO;
- }
- _delegate = delegate;
- if (_delegate) {
- _nativeDataChannel->RegisterObserver(_observer.get());
- _isObserverRegistered = YES;
- }
-}
-
- (void)close {
_nativeDataChannel->Close();
}
@@ -187,6 +171,7 @@ class DataChannelDelegateAdapter : public DataChannelObserver {
if (self = [super init]) {
_nativeDataChannel = nativeDataChannel;
_observer.reset(new webrtc::DataChannelDelegateAdapter(self));
+ _nativeDataChannel->RegisterObserver(_observer.get());
}
return self;
}
diff --git a/chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration+Private.h
index 13478e78e8d..e9ea74705d4 100644
--- a/chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCDataChannelConfiguration.h"
+#import "WebRTC/RTCDataChannelConfiguration.h"
#include "webrtc/api/datachannelinterface.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration.mm
index 4a1dcb061c8..89c56de7863 100644
--- a/chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDataChannelConfiguration.mm
@@ -8,10 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCDataChannelConfiguration.h"
+#import "RTCDataChannelConfiguration+Private.h"
-#import "webrtc/api/objc/RTCDataChannelConfiguration+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
@implementation RTCDataChannelConfiguration
diff --git a/chromium/third_party/webrtc/base/objc/RTCDispatcher+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDispatcher+Private.h
index 2ef6cf1ef35..3c114e553df 100644
--- a/chromium/third_party/webrtc/base/objc/RTCDispatcher+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDispatcher+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/base/objc/RTCDispatcher.h"
+#import "WebRTC/RTCDispatcher.h"
@interface RTCDispatcher ()
diff --git a/chromium/third_party/webrtc/base/objc/RTCDispatcher.m b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDispatcher.m
index dc2a990dfc3..94176ac6d81 100644
--- a/chromium/third_party/webrtc/base/objc/RTCDispatcher.m
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCDispatcher.m
@@ -8,8 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCDispatcher.h"
-
#import "RTCDispatcher+Private.h"
static dispatch_queue_t kAudioSessionQueue = nil;
diff --git a/chromium/third_party/webrtc/api/objc/RTCEAGLVideoView.m b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m
index 58fd108c4c1..d215265d3b0 100644
--- a/chromium/third_party/webrtc/api/objc/RTCEAGLVideoView.m
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCEAGLVideoView.h"
+#import "WebRTC/RTCEAGLVideoView.h"
#import <GLKit/GLKit.h>
-#import "webrtc/api/objc/RTCOpenGLVideoRenderer.h"
-#import "webrtc/api/objc/RTCVideoFrame.h"
+#import "RTCOpenGLVideoRenderer.h"
+#import "WebRTC//RTCVideoFrame.h"
// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
// refreshes, which should be 30fps. We wrap the display link in order to avoid
diff --git a/chromium/third_party/webrtc/base/objc/RTCFieldTrials.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCFieldTrials.mm
index 3072f394ee1..38c293f0ecd 100644
--- a/chromium/third_party/webrtc/base/objc/RTCFieldTrials.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCFieldTrials.mm
@@ -8,12 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/base/objc/RTCFieldTrials.h"
+#import "WebRTC/RTCFieldTrials.h"
#include <memory>
-#include "webrtc/system_wrappers/include/field_trial_default.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/system_wrappers/include/field_trial_default.h"
static NSString * const kRTCEnableSendSideBweString =
@"WebRTC-SendSideBwe/Enabled/";
diff --git a/chromium/third_party/webrtc/base/objc/RTCFileLogger.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCFileLogger.mm
index bf61794bd14..c1fbd747c3c 100644
--- a/chromium/third_party/webrtc/base/objc/RTCFileLogger.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCFileLogger.mm
@@ -8,13 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCFileLogger.h"
+#import "WebRTC/RTCFileLogger.h"
+
+#include <memory>
#include "webrtc/base/checks.h"
#include "webrtc/base/filerotatingstream.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/logsinks.h"
-#include "webrtc/base/scoped_ptr.h"
NSString *const kDefaultLogDirName = @"webrtc_logs";
NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
@@ -24,7 +25,7 @@ const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
BOOL _hasStarted;
NSString *_dirPath;
NSUInteger _maxFileSize;
- rtc::scoped_ptr<rtc::FileRotatingLogSink> _logSink;
+ std::unique_ptr<rtc::FileRotatingLogSink> _logSink;
}
@synthesize severity = _severity;
@@ -129,7 +130,7 @@ const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
return nil;
}
NSMutableData* logData = [NSMutableData data];
- rtc::scoped_ptr<rtc::FileRotatingStream> stream;
+ std::unique_ptr<rtc::FileRotatingStream> stream;
switch(_rotationType) {
case RTCFileLoggerTypeApp:
stream.reset(
@@ -150,7 +151,7 @@ const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
size_t read = 0;
// Allocate memory using malloc so we can pass it direcly to NSData without
// copying.
- rtc::scoped_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
+ std::unique_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
stream->ReadAll(buffer.get(), bufferSize, &read, nullptr);
logData = [[NSMutableData alloc] initWithBytesNoCopy:buffer.release()
length:read];
diff --git a/chromium/third_party/webrtc/api/objc/RTCIceCandidate+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate+Private.h
index f98e04ba1c2..b00c8da3a3e 100644
--- a/chromium/third_party/webrtc/api/objc/RTCIceCandidate+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate+Private.h
@@ -8,10 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCIceCandidate.h"
+#import "WebRTC/RTCIceCandidate.h"
+
+#include <memory>
#include "webrtc/api/jsep.h"
-#include "webrtc/base/scoped_ptr.h"
NS_ASSUME_NONNULL_BEGIN
@@ -22,7 +23,7 @@ NS_ASSUME_NONNULL_BEGIN
* object. This is needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly)
- rtc::scoped_ptr<webrtc::IceCandidateInterface> nativeCandidate;
+ std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
/**
* Initialize an RTCIceCandidate from a native IceCandidateInterface. No
diff --git a/chromium/third_party/webrtc/api/objc/RTCIceCandidate.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate.mm
index ecc128f8b29..193403d11d8 100644
--- a/chromium/third_party/webrtc/api/objc/RTCIceCandidate.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceCandidate.mm
@@ -8,11 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCIceCandidate.h"
+#import "RTCIceCandidate+Private.h"
-#import "webrtc/api/objc/RTCIceCandidate+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#include <memory>
+
+#import "NSString+StdString.h"
+#import "WebRTC/RTCLogging.h"
@implementation RTCIceCandidate
@@ -52,7 +53,7 @@
sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
}
-- (rtc::scoped_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
+- (std::unique_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
webrtc::SdpParseError error;
webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
@@ -64,7 +65,7 @@
error.line.c_str());
}
- return rtc::scoped_ptr<webrtc::IceCandidateInterface>(candidate);
+ return std::unique_ptr<webrtc::IceCandidateInterface>(candidate);
}
@end
diff --git a/chromium/third_party/webrtc/api/objc/RTCIceServer+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceServer+Private.h
index 8098bee8f8b..7efeda412fc 100644
--- a/chromium/third_party/webrtc/api/objc/RTCIceServer+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceServer+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCIceServer.h"
+#import "WebRTC/RTCIceServer.h"
#include "webrtc/api/peerconnectioninterface.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCIceServer.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceServer.mm
index 95c380e0713..41084b93571 100644
--- a/chromium/third_party/webrtc/api/objc/RTCIceServer.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCIceServer.mm
@@ -8,10 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCIceServer.h"
+#import "RTCIceServer+Private.h"
-#import "webrtc/api/objc/RTCIceServer+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
@implementation RTCIceServer
diff --git a/chromium/third_party/webrtc/base/objc/RTCLogging.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCLogging.mm
index 598d52de664..ef62d1f6697 100644
--- a/chromium/third_party/webrtc/base/objc/RTCLogging.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCLogging.mm
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCLogging.h"
+#import "WebRTC/RTCLogging.h"
#include "webrtc/base/logging.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaConstraints+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints+Private.h
index fa582ecae59..606a132da64 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaConstraints+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints+Private.h
@@ -8,10 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCMediaConstraints.h"
+#import "WebRTC/RTCMediaConstraints.h"
+
+#include <memory>
#include "webrtc/api/mediaconstraintsinterface.h"
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
@@ -41,7 +42,7 @@ NS_ASSUME_NONNULL_BEGIN
* A MediaConstraints representation of this RTCMediaConstraints object. This is
* needed to pass to the underlying C++ APIs.
*/
-- (rtc::scoped_ptr<webrtc::MediaConstraints>)nativeConstraints;
+- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
/** Return a native Constraints object representing these constraints */
+ (webrtc::MediaConstraintsInterface::Constraints)
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaConstraints.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints.mm
index bf50668e1d2..11be2ec026c 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaConstraints.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaConstraints.mm
@@ -8,10 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCMediaConstraints.h"
+#import "RTCMediaConstraints+Private.h"
-#import "webrtc/api/objc/RTCMediaConstraints+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+
+#include <memory>
namespace webrtc {
@@ -63,7 +64,7 @@ MediaConstraints::GetOptional() const {
#pragma mark - Private
-- (rtc::scoped_ptr<webrtc::MediaConstraints>)nativeConstraints {
+- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints {
webrtc::MediaConstraintsInterface::Constraints mandatory =
[[self class] nativeConstraintsForConstraints:_mandatory];
webrtc::MediaConstraintsInterface::Constraints optional =
@@ -71,7 +72,7 @@ MediaConstraints::GetOptional() const {
webrtc::MediaConstraints *nativeConstraints =
new webrtc::MediaConstraints(mandatory, optional);
- return rtc::scoped_ptr<webrtc::MediaConstraints>(nativeConstraints);
+ return std::unique_ptr<webrtc::MediaConstraints>(nativeConstraints);
}
+ (webrtc::MediaConstraintsInterface::Constraints)
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaStream+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStream+Private.h
index b03b091c11f..7f8707228d3 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaStream+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStream+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCMediaStream.h"
+#import "WebRTC/RTCMediaStream.h"
#include "webrtc/api/mediastreaminterface.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaStream.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStream.mm
index 666022301a1..0737dee8a93 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaStream.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStream.mm
@@ -8,16 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCMediaStream.h"
+#import "RTCMediaStream+Private.h"
#include <vector>
-#import "webrtc/api/objc/RTCAudioTrack+Private.h"
-#import "webrtc/api/objc/RTCMediaStream+Private.h"
-#import "webrtc/api/objc/RTCMediaStreamTrack+Private.h"
-#import "webrtc/api/objc/RTCPeerConnectionFactory+Private.h"
-#import "webrtc/api/objc/RTCVideoTrack+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoTrack+Private.h"
@implementation RTCMediaStream {
NSMutableArray *_audioTracks;
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack+Private.h
index 155e31228b3..d5261266b89 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack+Private.h
@@ -8,10 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCMediaStreamTrack.h"
+#import "WebRTC/RTCMediaStreamTrack.h"
#include "webrtc/api/mediastreaminterface.h"
-#include "webrtc/base/scoped_ptr.h"
typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
RTCMediaStreamTrackTypeAudio,
@@ -37,6 +36,11 @@ NS_ASSUME_NONNULL_BEGIN
type:(RTCMediaStreamTrackType)type
NS_DESIGNATED_INITIALIZER;
+- (instancetype)initWithNativeTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
+
+- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track;
+
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
(RTCMediaStreamTrackState)state;
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack.mm
index 25979b38d9a..208550f6c86 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCMediaStreamTrack.mm
@@ -8,10 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCMediaStreamTrack.h"
+#import "RTCMediaStreamTrack+Private.h"
-#import "webrtc/api/objc/RTCMediaStreamTrack+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+
+NSString * const kRTCMediaStreamTrackKindAudio =
+ @(webrtc::MediaStreamTrackInterface::kAudioKind);
+NSString * const kRTCMediaStreamTrackKindVideo =
+ @(webrtc::MediaStreamTrackInterface::kVideoKind);
@implementation RTCMediaStreamTrack {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
@@ -47,6 +51,20 @@
readyState];
}
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ return [self isEqualToTrack:(RTCMediaStreamTrack *)object];
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeTrack.get();
+}
+
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
@@ -64,6 +82,29 @@
return self;
}
+- (instancetype)initWithNativeTrack:
+ (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
+ NSParameterAssert(nativeTrack);
+ if (nativeTrack->kind() ==
+ std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) {
+ return [self initWithNativeTrack:nativeTrack
+ type:RTCMediaStreamTrackTypeAudio];
+ }
+ if (nativeTrack->kind() ==
+ std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) {
+ return [self initWithNativeTrack:nativeTrack
+ type:RTCMediaStreamTrackTypeVideo];
+ }
+ return nil;
+}
+
+- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track {
+ if (!track) {
+ return NO;
+ }
+ return _nativeTrack == track.nativeTrack;
+}
+
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
(RTCMediaStreamTrackState)state {
switch (state) {
diff --git a/chromium/third_party/webrtc/api/objc/RTCNSGLVideoView.m b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m
index 063e6f1330e..415efe82282 100644
--- a/chromium/third_party/webrtc/api/objc/RTCNSGLVideoView.m
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m
@@ -8,12 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCNSGLVideoView.h"
+#if !TARGET_OS_IPHONE
+
+#import "WebRTC/RTCNSGLVideoView.h"
#import <CoreVideo/CVDisplayLink.h>
#import <OpenGL/gl3.h>
-#import "RTCVideoFrame.h"
+
#import "RTCOpenGLVideoRenderer.h"
+#import "WebRTC/RTCVideoFrame.h"
@interface RTCNSGLVideoView ()
// |videoFrame| is set when we receive a frame from a worker thread and is read
@@ -139,3 +142,5 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
}
@end
+
+#endif // !TARGET_OS_IPHONE
diff --git a/chromium/third_party/webrtc/api/objc/RTCOpenGLVideoRenderer.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h
index 729839c6a30..7041861014a 100644
--- a/chromium/third_party/webrtc/api/objc/RTCOpenGLVideoRenderer.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h
@@ -15,6 +15,8 @@
#import <AppKit/NSOpenGL.h>
#endif
+#import "WebRTC/RTCMacros.h"
+
NS_ASSUME_NONNULL_BEGIN
@class RTCVideoFrame;
@@ -23,6 +25,7 @@ NS_ASSUME_NONNULL_BEGIN
// the currently bound framebuffer. Supports OpenGL 3.2 and OpenGLES 2.0. OpenGL
// framebuffer creation and management should be handled elsewhere using the
// same context used to initialize this class.
+RTC_EXPORT
@interface RTCOpenGLVideoRenderer : NSObject
// The last successfully drawn frame. Used to avoid drawing frames unnecessarily
diff --git a/chromium/third_party/webrtc/api/objc/RTCOpenGLVideoRenderer.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm
index b4a7c7b13a0..7d7b416b888 100644
--- a/chromium/third_party/webrtc/api/objc/RTCOpenGLVideoRenderer.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm
@@ -10,17 +10,16 @@
#import "RTCOpenGLVideoRenderer.h"
-#include <string.h>
-
-#include "webrtc/base/scoped_ptr.h"
-
#if TARGET_OS_IPHONE
#import <OpenGLES/ES3/gl.h>
#else
#import <OpenGL/gl3.h>
#endif
+#include <string.h>
+#include <memory>
+
+#import "WebRTC/RTCVideoFrame.h"
-#import "RTCVideoFrame.h"
// TODO(tkchin): check and log openGL errors. Methods here return BOOLs in
// anticipation of that happening in the future.
@@ -163,7 +162,7 @@ static const GLsizei kNumTextures = 3 * kNumTextureSets;
GLint _vSampler;
// Used to create a non-padded plane for GPU upload when we receive padded
// frames.
- rtc::scoped_ptr<uint8_t[]> _planeBuffer;
+ std::unique_ptr<uint8_t[]> _planeBuffer;
}
@synthesize lastDrawnFrame = _lastDrawnFrame;
diff --git a/chromium/third_party/webrtc/api/objc/RTCPeerConnection+DataChannel.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+DataChannel.mm
index cf646adda67..b3825d4e4eb 100644
--- a/chromium/third_party/webrtc/api/objc/RTCPeerConnection+DataChannel.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+DataChannel.mm
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCPeerConnection+Private.h"
+#import "RTCPeerConnection+Private.h"
-#import "webrtc/api/objc/RTCDataChannel+Private.h"
-#import "webrtc/api/objc/RTCDataChannelConfiguration+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCDataChannel+Private.h"
+#import "RTCDataChannelConfiguration+Private.h"
@implementation RTCPeerConnection (DataChannel)
diff --git a/chromium/third_party/webrtc/api/objc/RTCPeerConnection+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Private.h
index 031631a37a9..cbae3607cf6 100644
--- a/chromium/third_party/webrtc/api/objc/RTCPeerConnection+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCPeerConnection.h"
+#import "WebRTC/RTCPeerConnection.h"
#include "webrtc/api/peerconnectioninterface.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCPeerConnection+Stats.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Stats.mm
index 5032c84e54b..ccbd58f103d 100644
--- a/chromium/third_party/webrtc/api/objc/RTCPeerConnection+Stats.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection+Stats.mm
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCPeerConnection+Private.h"
+#import "RTCPeerConnection+Private.h"
-#include "webrtc/base/checks.h"
+#import "NSString+StdString.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCStatsReport+Private.h"
-#import "webrtc/api/objc/RTCMediaStreamTrack+Private.h"
-#import "webrtc/api/objc/RTCStatsReport+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#include "webrtc/base/checks.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/api/objc/RTCPeerConnection.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection.mm
index 50d05f1bc69..9a488fdf494 100644
--- a/chromium/third_party/webrtc/api/objc/RTCPeerConnection.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnection.mm
@@ -8,22 +8,25 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/api/objc/RTCPeerConnection.h"
+#import "RTCPeerConnection+Private.h"
+
+#import "NSString+StdString.h"
+#import "RTCConfiguration+Private.h"
+#import "RTCDataChannel+Private.h"
+#import "RTCIceCandidate+Private.h"
+#import "RTCMediaConstraints+Private.h"
+#import "RTCMediaStream+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCRtpReceiver+Private.h"
+#import "RTCRtpSender+Private.h"
+#import "RTCSessionDescription+Private.h"
+#import "RTCStatsReport+Private.h"
+#import "WebRTC/RTCLogging.h"
+
+#include <memory>
#include "webrtc/base/checks.h"
-#import "webrtc/api/objc/RTCPeerConnection+Private.h"
-#import "webrtc/api/objc/RTCConfiguration+Private.h"
-#import "webrtc/api/objc/RTCDataChannel+Private.h"
-#import "webrtc/api/objc/RTCIceCandidate+Private.h"
-#import "webrtc/api/objc/RTCMediaConstraints+Private.h"
-#import "webrtc/api/objc/RTCMediaStream+Private.h"
-#import "webrtc/api/objc/RTCPeerConnectionFactory+Private.h"
-#import "webrtc/api/objc/RTCSessionDescription+Private.h"
-#import "webrtc/api/objc/RTCStatsReport+Private.h"
-#import "webrtc/base/objc/RTCLogging.h"
-#import "webrtc/base/objc/NSString+StdString.h"
-
NSString * const kRTCPeerConnectionErrorDomain =
@"org.webrtc.RTCPeerConnection";
int const kRTCPeerConnnectionSessionDescriptionError = -1;
@@ -45,8 +48,8 @@ class CreateSessionDescriptionObserverAdapter
void OnSuccess(SessionDescriptionInterface *desc) override {
RTC_DCHECK(completion_handler_);
- rtc::scoped_ptr<webrtc::SessionDescriptionInterface> description =
- rtc::scoped_ptr<webrtc::SessionDescriptionInterface>(desc);
+ std::unique_ptr<webrtc::SessionDescriptionInterface> description =
+ std::unique_ptr<webrtc::SessionDescriptionInterface>(desc);
RTCSessionDescription* session =
[[RTCSessionDescription alloc] initWithNativeDescription:
description.get()];
@@ -184,7 +187,7 @@ void PeerConnectionDelegateAdapter::OnIceCandidate(
@implementation RTCPeerConnection {
NSMutableArray *_localStreams;
- rtc::scoped_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
+ std::unique_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
}
@@ -195,14 +198,17 @@ void PeerConnectionDelegateAdapter::OnIceCandidate(
constraints:(RTCMediaConstraints *)constraints
delegate:(id<RTCPeerConnectionDelegate>)delegate {
NSParameterAssert(factory);
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
+ [configuration createNativeConfiguration]);
+ if (!config) {
+ return nil;
+ }
if (self = [super init]) {
_observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
- webrtc::PeerConnectionInterface::RTCConfiguration config =
- configuration.nativeConfiguration;
- rtc::scoped_ptr<webrtc::MediaConstraints> nativeConstraints =
+ std::unique_ptr<webrtc::MediaConstraints> nativeConstraints =
constraints.nativeConstraints;
_peerConnection =
- factory.nativeFactory->CreatePeerConnection(config,
+ factory.nativeFactory->CreatePeerConnection(*config,
nativeConstraints.get(),
nullptr,
nullptr,
@@ -249,7 +255,12 @@ void PeerConnectionDelegateAdapter::OnIceCandidate(
}
- (BOOL)setConfiguration:(RTCConfiguration *)configuration {
- return _peerConnection->SetConfiguration(configuration.nativeConfiguration);
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
+ [configuration createNativeConfiguration]);
+ if (!config) {
+ return NO;
+ }
+ return _peerConnection->SetConfiguration(*config);
}
- (void)close {
@@ -257,7 +268,7 @@ void PeerConnectionDelegateAdapter::OnIceCandidate(
}
- (void)addIceCandidate:(RTCIceCandidate *)candidate {
- rtc::scoped_ptr<const webrtc::IceCandidateInterface> iceCandidate(
+ std::unique_ptr<const webrtc::IceCandidateInterface> iceCandidate(
candidate.nativeCandidate);
_peerConnection->AddIceCandidate(iceCandidate.get());
}
@@ -311,6 +322,41 @@ void PeerConnectionDelegateAdapter::OnIceCandidate(
_peerConnection->SetRemoteDescription(observer, sdp.nativeDescription);
}
+- (RTCRtpSender *)senderWithKind:(NSString *)kind
+ streamId:(NSString *)streamId {
+ std::string nativeKind = [NSString stdStringForString:kind];
+ std::string nativeStreamId = [NSString stdStringForString:streamId];
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeSender(
+ _peerConnection->CreateSender(nativeKind, nativeStreamId));
+ return nativeSender ?
+ [[RTCRtpSender alloc] initWithNativeRtpSender:nativeSender]
+ : nil;
+}
+
+- (NSArray<RTCRtpSender *> *)senders {
+ std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
+ _peerConnection->GetSenders());
+ NSMutableArray *senders = [[NSMutableArray alloc] init];
+ for (const auto &nativeSender : nativeSenders) {
+ RTCRtpSender *sender =
+ [[RTCRtpSender alloc] initWithNativeRtpSender:nativeSender];
+ [senders addObject:sender];
+ }
+ return senders;
+}
+
+- (NSArray<RTCRtpReceiver *> *)receivers {
+ std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> nativeReceivers(
+ _peerConnection->GetReceivers());
+ NSMutableArray *receivers = [[NSMutableArray alloc] init];
+ for (const auto &nativeReceiver : nativeReceivers) {
+ RTCRtpReceiver *receiver =
+ [[RTCRtpReceiver alloc] initWithNativeRtpReceiver:nativeReceiver];
+ [receivers addObject:receiver];
+ }
+ return receivers;
+}
+
#pragma mark - Private
+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
diff --git a/chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory+Private.h
index 55a473b4cd1..a7e453c917f 100644
--- a/chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCPeerConnectionFactory.h"
+#import "WebRTC/RTCPeerConnectionFactory.h"
#include "webrtc/api/peerconnectionfactory.h"
#include "webrtc/base/scoped_ref_ptr.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm
index e2d60212310..2398ce5ddc4 100644
--- a/chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm
@@ -8,43 +8,50 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCPeerConnectionFactory.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "NSString+StdString.h"
#if defined(WEBRTC_IOS)
-#import "webrtc/api/objc/RTCAVFoundationVideoSource+Private.h"
+#import "RTCAVFoundationVideoSource+Private.h"
#endif
-#import "webrtc/api/objc/RTCAudioTrack+Private.h"
-#import "webrtc/api/objc/RTCMediaStream+Private.h"
-#import "webrtc/api/objc/RTCPeerConnection+Private.h"
-#import "webrtc/api/objc/RTCPeerConnectionFactory+Private.h"
-#import "webrtc/api/objc/RTCVideoSource+Private.h"
-#import "webrtc/api/objc/RTCVideoTrack+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "RTCAudioTrack+Private.h"
+#import "RTCMediaStream+Private.h"
+#import "RTCPeerConnection+Private.h"
+#import "RTCVideoSource+Private.h"
+#import "RTCVideoTrack+Private.h"
+
+#include <memory>
@implementation RTCPeerConnectionFactory {
- rtc::scoped_ptr<rtc::Thread> _signalingThread;
- rtc::scoped_ptr<rtc::Thread> _workerThread;
+ std::unique_ptr<rtc::Thread> _networkThread;
+ std::unique_ptr<rtc::Thread> _workerThread;
+ std::unique_ptr<rtc::Thread> _signalingThread;
}
@synthesize nativeFactory = _nativeFactory;
- (instancetype)init {
if ((self = [super init])) {
- _signalingThread.reset(new rtc::Thread());
- BOOL result = _signalingThread->Start();
- NSAssert(result, @"Failed to start signaling thread.");
- _workerThread.reset(new rtc::Thread());
+ _networkThread = rtc::Thread::CreateWithSocketServer();
+ BOOL result = _networkThread->Start();
+ NSAssert(result, @"Failed to start network thread.");
+
+ _workerThread = rtc::Thread::Create();
result = _workerThread->Start();
NSAssert(result, @"Failed to start worker thread.");
+ _signalingThread = rtc::Thread::Create();
+ result = _signalingThread->Start();
+ NSAssert(result, @"Failed to start signaling thread.");
+
_nativeFactory = webrtc::CreatePeerConnectionFactory(
- _workerThread.get(), _signalingThread.get(), nullptr, nullptr, nullptr);
+ _networkThread.get(), _workerThread.get(), _signalingThread.get(),
+ nullptr, nullptr, nullptr);
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
}
return self;
}
-
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
(nullable RTCMediaConstraints *)constraints {
#if defined(WEBRTC_IOS)
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters+Private.h
new file mode 100644
index 00000000000..fe33e9e9633
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters+Private.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpCodecParameters.h"
+
+#include "webrtc/api/rtpparameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpCodecParameters ()
+
+/** Returns the equivalent native RtpCodecParameters structure. */
+@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
+
+/** Initialize the object with a native RtpCodecParameters structure. */
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpCodecParameters &)nativeParameters;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters.mm
new file mode 100644
index 00000000000..77047694723
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpCodecParameters.mm
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpCodecParameters+Private.h"
+
+#import "NSString+StdString.h"
+
+#include "webrtc/media/base/mediaconstants.h"
+
+const NSString * const kRTCRtxCodecMimeType = @(cricket::kRtxCodecName);
+const NSString * const kRTCRedCodecMimeType = @(cricket::kRedCodecName);
+const NSString * const kRTCUlpfecCodecMimeType = @(cricket::kUlpfecCodecName);
+const NSString * const kRTCOpusCodecMimeType = @(cricket::kOpusCodecName);
+const NSString * const kRTCIsacCodecMimeType = @(cricket::kIsacCodecName);
+const NSString * const kRTCL16CodecMimeType = @(cricket::kL16CodecName);
+const NSString * const kRTCG722CodecMimeType = @(cricket::kG722CodecName);
+const NSString * const kRTCIlbcCodecMimeType = @(cricket::kIlbcCodecName);
+const NSString * const kRTCPcmuCodecMimeType = @(cricket::kPcmuCodecName);
+const NSString * const kRTCPcmaCodecMimeType = @(cricket::kPcmaCodecName);
+const NSString * const kRTCDtmfCodecMimeType = @(cricket::kDtmfCodecName);
+const NSString * const kRTCComfortNoiseCodecMimeType =
+ @(cricket::kComfortNoiseCodecName);
+const NSString * const kVp8CodecMimeType = @(cricket::kVp8CodecName);
+const NSString * const kVp9CodecMimeType = @(cricket::kVp9CodecName);
+const NSString * const kH264CodecMimeType = @(cricket::kH264CodecName);
+
+@implementation RTCRtpCodecParameters
+
+@synthesize payloadType = _payloadType;
+@synthesize mimeType = _mimeType;
+@synthesize clockRate = _clockRate;
+@synthesize channels = _channels;
+
+- (instancetype)init {
+ return [super init];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpCodecParameters &)nativeParameters {
+ if (self = [self init]) {
+ _payloadType = nativeParameters.payload_type;
+ _mimeType = [NSString stringForStdString:nativeParameters.mime_type];
+ _clockRate = nativeParameters.clock_rate;
+ _channels = nativeParameters.channels;
+ }
+ return self;
+}
+
+- (webrtc::RtpCodecParameters)nativeParameters {
+ webrtc::RtpCodecParameters parameters;
+ parameters.payload_type = _payloadType;
+ parameters.mime_type = [NSString stdStringForString:_mimeType];
+ parameters.clock_rate = _clockRate;
+ parameters.channels = _channels;
+ return parameters;
+}
+
+@end
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters+Private.h
new file mode 100644
index 00000000000..1d752684785
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters+Private.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpEncodingParameters.h"
+
+#include "webrtc/api/rtpparameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpEncodingParameters ()
+
+/** Returns the equivalent native RtpEncodingParameters structure. */
+@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
+
+/** Initialize the object with a native RtpEncodingParameters structure. */
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpEncodingParameters &)nativeParameters;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters.mm
new file mode 100644
index 00000000000..af07a0485d6
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpEncodingParameters.mm
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpEncodingParameters+Private.h"
+
+@implementation RTCRtpEncodingParameters
+
+@synthesize isActive = _isActive;
+@synthesize maxBitrateBps = _maxBitrateBps;
+
+static const int kBitrateUnlimited = -1;
+
+- (instancetype)init {
+ return [super init];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpEncodingParameters &)nativeParameters {
+ if (self = [self init]) {
+ _isActive = nativeParameters.active;
+ // TODO(skvlad): Replace with rtc::Optional once the C++ code is updated.
+ if (nativeParameters.max_bitrate_bps != kBitrateUnlimited) {
+ _maxBitrateBps =
+ [NSNumber numberWithInt:nativeParameters.max_bitrate_bps];
+ }
+ }
+ return self;
+}
+
+- (webrtc::RtpEncodingParameters)nativeParameters {
+ webrtc::RtpEncodingParameters parameters;
+ parameters.active = _isActive;
+ if (_maxBitrateBps != nil) {
+ parameters.max_bitrate_bps = _maxBitrateBps.intValue;
+ }
+ return parameters;
+}
+
+@end
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters+Private.h
new file mode 100644
index 00000000000..0cb1ffed814
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters+Private.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpParameters.h"
+
+#include "webrtc/api/rtpparameters.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpParameters ()
+
+/** Returns the equivalent native RtpParameters structure. */
+@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
+
+/** Initialize the object with a native RtpParameters structure. */
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpParameters &)nativeParameters;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters.mm
new file mode 100644
index 00000000000..5e791066df1
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpParameters.mm
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpParameters+Private.h"
+
+#import "RTCRtpCodecParameters+Private.h"
+#import "RTCRtpEncodingParameters+Private.h"
+
+@implementation RTCRtpParameters
+
+@synthesize encodings = _encodings;
+@synthesize codecs = _codecs;
+
+- (instancetype)init {
+ return [super init];
+}
+
+- (instancetype)initWithNativeParameters:
+ (const webrtc::RtpParameters &)nativeParameters {
+ if (self = [self init]) {
+ NSMutableArray *encodings = [[NSMutableArray alloc] init];
+ for (const auto &encoding : nativeParameters.encodings) {
+ [encodings addObject:[[RTCRtpEncodingParameters alloc]
+ initWithNativeParameters:encoding]];
+ }
+ _encodings = encodings;
+
+ NSMutableArray *codecs = [[NSMutableArray alloc] init];
+ for (const auto &codec : nativeParameters.codecs) {
+ [codecs addObject:[[RTCRtpCodecParameters alloc]
+ initWithNativeParameters:codec]];
+ }
+ _codecs = codecs;
+ }
+ return self;
+}
+
+- (webrtc::RtpParameters)nativeParameters {
+ webrtc::RtpParameters parameters;
+ for (RTCRtpEncodingParameters *encoding in _encodings) {
+ parameters.encodings.push_back(encoding.nativeParameters);
+ }
+ for (RTCRtpCodecParameters *codec in _codecs) {
+ parameters.codecs.push_back(codec.nativeParameters);
+ }
+ return parameters;
+}
+
+@end
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpReceiver+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpReceiver+Private.h
new file mode 100644
index 00000000000..14b68fa0113
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpReceiver+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpReceiver.h"
+
+#include "webrtc/api/rtpreceiverinterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpReceiver ()
+
+@property(nonatomic, readonly)
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
+
+/** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */
+- (instancetype)initWithNativeRtpReceiver:
+ (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpReceiver.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpReceiver.mm
new file mode 100644
index 00000000000..46c0e6544a4
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpReceiver.mm
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpReceiver+Private.h"
+
+#import "NSString+StdString.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpParameters+Private.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+
+@implementation RTCRtpReceiver {
+ rtc::scoped_refptr<webrtc::RtpReceiverInterface> _nativeRtpReceiver;
+}
+
+- (NSString *)receiverId {
+ return [NSString stringForStdString:_nativeRtpReceiver->id()];
+}
+
+- (RTCRtpParameters *)parameters {
+ return [[RTCRtpParameters alloc]
+ initWithNativeParameters:_nativeRtpReceiver->GetParameters()];
+}
+
+- (void)setParameters:(RTCRtpParameters *)parameters {
+ if (!_nativeRtpReceiver->SetParameters(parameters.nativeParameters)) {
+ RTCLogError(@"RTCRtpReceiver(%p): Failed to set parameters: %@", self,
+ parameters);
+ }
+}
+
+- (RTCMediaStreamTrack *)track {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
+ _nativeRtpReceiver->track());
+ if (nativeTrack) {
+ return [[RTCMediaStreamTrack alloc] initWithNativeTrack:nativeTrack];
+ }
+ return nil;
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTCRtpReceiver {\n receiverId: %@\n}",
+ self.receiverId];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (object == nil) {
+ return NO;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ RTCRtpReceiver *receiver = (RTCRtpReceiver *)object;
+ return _nativeRtpReceiver == receiver.nativeRtpReceiver;
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeRtpReceiver.get();
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
+ return _nativeRtpReceiver;
+}
+
+- (instancetype)initWithNativeRtpReceiver:
+ (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
+ if (self = [super init]) {
+ _nativeRtpReceiver = nativeRtpReceiver;
+ RTCLogInfo(
+ @"RTCRtpReceiver(%p): created receiver: %@", self, self.description);
+ }
+ return self;
+}
+
+@end
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpSender+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpSender+Private.h
new file mode 100644
index 00000000000..e372c523325
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpSender+Private.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "WebRTC/RTCRtpSender.h"
+
+#include "webrtc/api/rtpsenderinterface.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface RTCRtpSender ()
+
+@property(nonatomic, readonly)
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
+
+/** Initialize an RTCRtpSender with a native RtpSenderInterface. */
+- (instancetype)initWithNativeRtpSender:
+ (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
+ NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpSender.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpSender.mm
new file mode 100644
index 00000000000..9ecf0ae5ae9
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCRtpSender.mm
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "RTCRtpSender+Private.h"
+
+#import "NSString+StdString.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCRtpParameters+Private.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/api/mediastreaminterface.h"
+
+@implementation RTCRtpSender {
+ rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
+}
+
+- (NSString *)senderId {
+ return [NSString stringForStdString:_nativeRtpSender->id()];
+}
+
+- (RTCRtpParameters *)parameters {
+ return [[RTCRtpParameters alloc]
+ initWithNativeParameters:_nativeRtpSender->GetParameters()];
+}
+
+- (void)setParameters:(RTCRtpParameters *)parameters {
+ if (!_nativeRtpSender->SetParameters(parameters.nativeParameters)) {
+ RTCLogError(@"RTCRtpSender(%p): Failed to set parameters: %@", self,
+ parameters);
+ }
+}
+
+- (RTCMediaStreamTrack *)track {
+ rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
+ _nativeRtpSender->track());
+ if (nativeTrack) {
+ return [[RTCMediaStreamTrack alloc] initWithNativeTrack:nativeTrack];
+ }
+ return nil;
+}
+
+- (void)setTrack:(RTCMediaStreamTrack *)track {
+ if (!_nativeRtpSender->SetTrack(track.nativeTrack)) {
+ RTCLogError(@"RTCRtpSender(%p): Failed to set track %@", self, track);
+ }
+}
+
+- (NSString *)description {
+ return [NSString stringWithFormat:@"RTCRtpSender {\n senderId: %@\n}",
+ self.senderId];
+}
+
+- (BOOL)isEqual:(id)object {
+ if (self == object) {
+ return YES;
+ }
+ if (object == nil) {
+ return NO;
+ }
+ if (![object isMemberOfClass:[self class]]) {
+ return NO;
+ }
+ RTCRtpSender *sender = (RTCRtpSender *)object;
+ return _nativeRtpSender == sender.nativeRtpSender;
+}
+
+- (NSUInteger)hash {
+ return (NSUInteger)_nativeRtpSender.get();
+}
+
+#pragma mark - Private
+
+- (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
+ return _nativeRtpSender;
+}
+
+- (instancetype)initWithNativeRtpSender:
+ (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
+ NSParameterAssert(nativeRtpSender);
+ if (self = [super init]) {
+ _nativeRtpSender = nativeRtpSender;
+ RTCLogInfo(@"RTCRtpSender(%p): created sender: %@", self, self.description);
+ }
+ return self;
+}
+
+@end
diff --git a/chromium/third_party/webrtc/base/objc/RTCSSLAdapter.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSSLAdapter.mm
index c881a4487ed..a0da105bc35 100644
--- a/chromium/third_party/webrtc/base/objc/RTCSSLAdapter.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSSLAdapter.mm
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCSSLAdapter.h"
+#import "WebRTC/RTCSSLAdapter.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/ssladapter.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCSessionDescription+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription+Private.h
index 9de8f0e9430..04b6fbe8642 100644
--- a/chromium/third_party/webrtc/api/objc/RTCSessionDescription+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCSessionDescription.h"
+#import "WebRTC/RTCSessionDescription.h"
#include "webrtc/api/jsep.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCSessionDescription.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription.mm
index 94c1a3f2d98..417ff7dfca2 100644
--- a/chromium/third_party/webrtc/api/objc/RTCSessionDescription.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCSessionDescription.mm
@@ -8,13 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCSessionDescription.h"
+#import "RTCSessionDescription+Private.h"
-#include "webrtc/base/checks.h"
+#import "NSString+StdString.h"
+#import "WebRTC/RTCLogging.h"
-#import "webrtc/api/objc/RTCSessionDescription+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#include "webrtc/base/checks.h"
@implementation RTCSessionDescription
diff --git a/chromium/third_party/webrtc/api/objc/RTCStatsReport+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCStatsReport+Private.h
index 5ce5801bee2..e1af6f75ea6 100644
--- a/chromium/third_party/webrtc/api/objc/RTCStatsReport+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCStatsReport+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCStatsReport.h"
+#import "WebRTC/RTCStatsReport.h"
#include "webrtc/api/statstypes.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCStatsReport.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCStatsReport.mm
index 99cdd287b8a..1bc3eb9f0ee 100644
--- a/chromium/third_party/webrtc/api/objc/RTCStatsReport.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCStatsReport.mm
@@ -8,13 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCStatsReport.h"
+#import "RTCStatsReport+Private.h"
-#include "webrtc/base/checks.h"
+#import "NSString+StdString.h"
+#import "WebRTC/RTCLogging.h"
-#import "webrtc/api/objc/RTCStatsReport+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#include "webrtc/base/checks.h"
@implementation RTCStatsReport
diff --git a/chromium/third_party/webrtc/base/objc/RTCTracing.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCTracing.mm
index a51c7033402..37755a6b9ab 100644
--- a/chromium/third_party/webrtc/base/objc/RTCTracing.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCTracing.mm
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "webrtc/base/objc/RTCTracing.h"
+#import "WebRTC/RTCTracing.h"
#include "webrtc/base/event_tracer.h"
diff --git a/chromium/third_party/webrtc/base/objc/RTCUIApplication.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.h
index c06404f777b..fb11edef44e 100644
--- a/chromium/third_party/webrtc/base/objc/RTCUIApplication.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.h
@@ -11,11 +11,11 @@
#ifndef WEBRTC_BASE_OBJC_RTC_UI_APPLICATION_H_
#define WEBRTC_BASE_OBJC_RTC_UI_APPLICATION_H_
-#include "webrtc/base/objc/RTCMacros.h"
+#include "WebRTC/RTCMacros.h"
#if defined(WEBRTC_IOS)
/** Convenience function to get UIApplicationState from C++. */
-RTC_EXPORT bool RTCIsUIApplicationActive();
+RTC_EXTERN bool RTCIsUIApplicationActive();
#endif // WEBRTC_IOS
#endif // WEBRTC_BASE_OBJC_RTC_UI_APPLICATION_H_
diff --git a/chromium/third_party/webrtc/base/objc/RTCUIApplication.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.mm
index 85376b4e0e7..7e8aea62955 100644
--- a/chromium/third_party/webrtc/base/objc/RTCUIApplication.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCUIApplication.mm
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/base/objc/RTCUIApplication.h"
+#include "RTCUIApplication.h"
#if defined(WEBRTC_IOS)
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoFrame+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h
index 52f532ce885..a4807127ca7 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoFrame+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCVideoFrame.h"
+#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/media/base/videoframe.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoFrame.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm
index 95f4ac82bfe..1fb26954a39 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoFrame.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm
@@ -8,14 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCVideoFrame.h"
+#import "RTCVideoFrame+Private.h"
-#include "webrtc/base/scoped_ptr.h"
-
-#import "webrtc/api/objc/RTCVideoFrame+Private.h"
+#include <memory>
@implementation RTCVideoFrame {
- rtc::scoped_ptr<cricket::VideoFrame> _videoFrame;
+ std::unique_ptr<cricket::VideoFrame> _videoFrame;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _i420Buffer;
}
@@ -92,10 +90,10 @@
if (!_i420Buffer) {
if (_videoFrame->GetNativeHandle()) {
// Convert to I420.
- _i420Buffer = _videoFrame->GetVideoFrameBuffer()->NativeToI420Buffer();
+ _i420Buffer = _videoFrame->video_frame_buffer()->NativeToI420Buffer();
} else {
// Should already be I420.
- _i420Buffer = _videoFrame->GetVideoFrameBuffer();
+ _i420Buffer = _videoFrame->video_frame_buffer();
}
}
}
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter+Private.h
index f0e0c6c0add..b413f7e3f6b 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter+Private.h
@@ -10,9 +10,9 @@
#import "RTCVideoRendererAdapter.h"
-#include "webrtc/api/mediastreaminterface.h"
+#import "WebRTC/RTCVideoRenderer.h"
-#import "RTCVideoRenderer.h"
+#include "webrtc/api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.h
index b0b6f044884..b0b6f044884 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.h
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm
index 81e7888f3bc..4976ba9f1f1 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoRendererAdapter.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm
@@ -8,10 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCVideoRendererAdapter.h"
+#import "RTCVideoRendererAdapter+Private.h"
-#import "webrtc/api/objc/RTCVideoFrame+Private.h"
-#import "webrtc/api/objc/RTCVideoRendererAdapter+Private.h"
+#import "RTCVideoFrame+Private.h"
+
+#include <memory>
+
+#include "webrtc/media/engine/webrtcvideoframe.h"
namespace webrtc {
@@ -24,15 +27,31 @@ class VideoRendererAdapter
}
void OnFrame(const cricket::VideoFrame& nativeVideoFrame) override {
- const cricket::VideoFrame *frame =
- nativeVideoFrame.GetCopyWithRotationApplied();
- CGSize current_size = CGSizeMake(frame->width(), frame->height());
+ RTCVideoFrame *videoFrame = nil;
+ // Rotation of native handles is unsupported right now. Convert to CPU
+ // I420 buffer for rotation before calling the rotation method otherwise
+ // it will hit a DCHECK.
+ if (nativeVideoFrame.rotation() != webrtc::kVideoRotation_0 &&
+ nativeVideoFrame.GetNativeHandle()) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer =
+ nativeVideoFrame.video_frame_buffer()->NativeToI420Buffer();
+ std::unique_ptr<cricket::VideoFrame> cpuFrame(
+ new cricket::WebRtcVideoFrame(i420Buffer,
+ nativeVideoFrame.rotation(),
+ nativeVideoFrame.timestamp_us()));
+ const cricket::VideoFrame *rotatedFrame =
+ cpuFrame->GetCopyWithRotationApplied();
+ videoFrame = [[RTCVideoFrame alloc] initWithNativeFrame:rotatedFrame];
+ } else {
+ const cricket::VideoFrame *rotatedFrame =
+ nativeVideoFrame.GetCopyWithRotationApplied();
+ videoFrame = [[RTCVideoFrame alloc] initWithNativeFrame:rotatedFrame];
+ }
+ CGSize current_size = CGSizeMake(videoFrame.width, videoFrame.height);
if (!CGSizeEqualToSize(size_, current_size)) {
size_ = current_size;
[adapter_.videoRenderer setSize:size_];
}
- RTCVideoFrame *videoFrame =
- [[RTCVideoFrame alloc] initWithNativeFrame:frame];
[adapter_.videoRenderer renderFrame:videoFrame];
}
@@ -43,7 +62,7 @@ class VideoRendererAdapter
}
@implementation RTCVideoRendererAdapter {
- rtc::scoped_ptr<webrtc::VideoRendererAdapter> _adapter;
+ std::unique_ptr<webrtc::VideoRendererAdapter> _adapter;
}
@synthesize videoRenderer = _videoRenderer;
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoSource+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoSource+Private.h
index 7746ba518ff..757c1746ef7 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoSource+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoSource+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCVideoSource.h"
+#import "WebRTC/RTCVideoSource.h"
#include "webrtc/api/mediastreaminterface.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoSource.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoSource.mm
index 8e9c39d3ddc..eddf5e0c684 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoSource.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoSource.mm
@@ -8,9 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCVideoSource.h"
-
-#import "webrtc/api/objc/RTCVideoSource+Private.h"
+#import "RTCVideoSource+Private.h"
@implementation RTCVideoSource {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoTrack+Private.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack+Private.h
index be041246c3a..5199be3b26c 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoTrack+Private.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack+Private.h
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCVideoTrack.h"
+#import "WebRTC/RTCVideoTrack.h"
#include "webrtc/api/mediastreaminterface.h"
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoTrack.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack.mm
index a32ea7bc7c1..6691375fbff 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoTrack.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/RTCVideoTrack.mm
@@ -8,14 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCVideoTrack.h"
+#import "RTCVideoTrack+Private.h"
-#import "webrtc/api/objc/RTCMediaStreamTrack+Private.h"
-#import "webrtc/api/objc/RTCPeerConnectionFactory+Private.h"
-#import "webrtc/api/objc/RTCVideoRendererAdapter+Private.h"
-#import "webrtc/api/objc/RTCVideoSource+Private.h"
-#import "webrtc/api/objc/RTCVideoTrack+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCMediaStreamTrack+Private.h"
+#import "RTCPeerConnectionFactory+Private.h"
+#import "RTCVideoRendererAdapter+Private.h"
+#import "RTCVideoSource+Private.h"
@implementation RTCVideoTrack {
NSMutableArray *_adapters;
diff --git a/chromium/third_party/webrtc/api/objc/avfoundationvideocapturer.h b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
index 73cecd9d6c2..c523b527bbb 100644
--- a/chromium/third_party/webrtc/api/objc/avfoundationvideocapturer.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
@@ -11,12 +11,11 @@
#ifndef WEBRTC_API_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
#define WEBRTC_API_OBJC_AVFOUNDATION_VIDEO_CAPTURER_H_
-#include "webrtc/base/scoped_ptr.h"
+#import <AVFoundation/AVFoundation.h>
+
#include "webrtc/media/base/videocapturer.h"
#include "webrtc/video_frame.h"
-#import <AVFoundation/AVFoundation.h>
-
@class RTCAVFoundationVideoCapturerInternal;
namespace rtc {
diff --git a/chromium/third_party/webrtc/api/objc/avfoundationvideocapturer.mm b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
index 85b96586502..cb945f41cae 100644
--- a/chromium/third_party/webrtc/api/objc/avfoundationvideocapturer.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
@@ -8,18 +8,18 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/api/objc/avfoundationvideocapturer.h"
-
-#include "webrtc/base/bind.h"
-#include "webrtc/base/checks.h"
-#include "webrtc/base/thread.h"
+#include "avfoundationvideocapturer.h"
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
-#import "webrtc/base/objc/RTCDispatcher+Private.h"
-#import "webrtc/base/objc/RTCLogging.h"
+#import "RTCDispatcher+Private.h"
+#import "WebRTC/RTCLogging.h"
+
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/thread.h"
// TODO(tkchin): support other formats.
static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
diff --git a/chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h
index 6d224d3f8e5..2a732b9686b 100644
--- a/chromium/third_party/webrtc/api/objc/RTCAVFoundationVideoSource.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h
@@ -8,7 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCVideoSource.h"
+#import <WebRTC/RTCMacros.h>
+#import <WebRTC/RTCVideoSource.h>
@class AVCaptureSession;
@class RTCMediaConstraints;
@@ -23,6 +24,7 @@ NS_ASSUME_NONNULL_BEGIN
* guarantee its lifetime. Instead, we expose its properties through the ref
* counted video source interface.
*/
+RTC_EXPORT
@interface RTCAVFoundationVideoSource : RTCVideoSource
- (instancetype)init NS_UNAVAILABLE;
diff --git a/chromium/third_party/webrtc/api/objc/RTCAudioTrack.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAudioTrack.h
index c33a7b42bf6..a9f5fe22777 100644
--- a/chromium/third_party/webrtc/api/objc/RTCAudioTrack.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCAudioTrack.h
@@ -8,10 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCMediaStreamTrack.h"
+#import <WebRTC/RTCMacros.h>
+#import <WebRTC/RTCMediaStreamTrack.h>
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCAudioTrack : RTCMediaStreamTrack
- (instancetype)init NS_UNAVAILABLE;
diff --git a/chromium/third_party/webrtc/base/objc/RTCCameraPreviewView.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h
index 03e94c29aeb..9018aec3d18 100644
--- a/chromium/third_party/webrtc/base/objc/RTCCameraPreviewView.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h
@@ -11,12 +11,15 @@
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
+#import <WebRTC/RTCMacros.h>
+
@class AVCaptureSession;
@class RTCAVFoundationVideoSource;
/** RTCCameraPreviewView is a view that renders local video from an
* AVCaptureSession.
*/
+RTC_EXPORT
@interface RTCCameraPreviewView : UIView
/** The capture session being rendered in the view. Capture session
diff --git a/chromium/third_party/webrtc/api/objc/RTCConfiguration.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCConfiguration.h
index 144c8d3bd5e..74e7801ac05 100644
--- a/chromium/third_party/webrtc/api/objc/RTCConfiguration.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCConfiguration.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
@class RTCIceServer;
/**
@@ -42,6 +44,12 @@ typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) {
RTCTcpCandidatePolicyDisabled
};
+/** Represents the continual gathering policy. */
+typedef NS_ENUM(NSInteger, RTCContinualGatheringPolicy) {
+ RTCContinualGatheringPolicyGatherOnce,
+ RTCContinualGatheringPolicyGatherContinually
+};
+
/** Represents the encryption key type. */
typedef NS_ENUM(NSInteger, RTCEncryptionKeyType) {
RTCEncryptionKeyTypeRSA,
@@ -50,6 +58,7 @@ typedef NS_ENUM(NSInteger, RTCEncryptionKeyType) {
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCConfiguration : NSObject
/** An array of Ice Servers available to be used by ICE. */
@@ -65,6 +74,8 @@ NS_ASSUME_NONNULL_BEGIN
/** The rtcp-mux policy to use when gathering ICE candidates. */
@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy;
@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy;
+@property(nonatomic, assign)
+ RTCContinualGatheringPolicy continualGatheringPolicy;
@property(nonatomic, assign) int audioJitterBufferMaxPackets;
@property(nonatomic, assign) int iceConnectionReceivingTimeout;
@property(nonatomic, assign) int iceBackupCandidatePairPingInterval;
diff --git a/chromium/third_party/webrtc/api/objc/RTCDataChannel.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDataChannel.h
index 59ab2b8b26f..893bd0a3365 100644
--- a/chromium/third_party/webrtc/api/objc/RTCDataChannel.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDataChannel.h
@@ -11,8 +11,11 @@
#import <AvailabilityMacros.h>
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCDataBuffer : NSObject
/** NSData representation of the underlying buffer. */
@@ -33,6 +36,7 @@ NS_ASSUME_NONNULL_BEGIN
@class RTCDataChannel;
+RTC_EXPORT
@protocol RTCDataChannelDelegate <NSObject>
/** The data channel state changed. */
@@ -58,7 +62,7 @@ typedef NS_ENUM(NSInteger, RTCDataChannelState) {
RTCDataChannelStateClosed,
};
-
+RTC_EXPORT
@interface RTCDataChannel : NSObject
/**
diff --git a/chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h
index fbe342d2c86..65f59318447 100644
--- a/chromium/third_party/webrtc/api/objc/RTCDataChannelConfiguration.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h
@@ -11,8 +11,11 @@
#import <AvailabilityMacros.h>
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCDataChannelConfiguration : NSObject
/** Set to YES if ordered delivery is required. */
diff --git a/chromium/third_party/webrtc/base/objc/RTCDispatcher.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDispatcher.h
index a9b92a661fd..328beaf8728 100644
--- a/chromium/third_party/webrtc/base/objc/RTCDispatcher.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCDispatcher.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) {
// Main dispatcher queue.
RTCDispatcherTypeMain,
@@ -23,6 +25,7 @@ typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) {
/** Dispatcher that asynchronously dispatches blocks to a specific
* shared dispatch queue.
*/
+RTC_EXPORT
@interface RTCDispatcher : NSObject
- (instancetype)init NS_UNAVAILABLE;
diff --git a/chromium/third_party/webrtc/api/objc/RTCEAGLVideoView.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h
index 1a57df76bbe..5f8d73bc8d6 100644
--- a/chromium/third_party/webrtc/api/objc/RTCEAGLVideoView.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h
@@ -11,11 +11,13 @@
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
-#import "RTCVideoRenderer.h"
+#import <WebRTC/RTCMacros.h>
+#import <WebRTC/RTCVideoRenderer.h>
NS_ASSUME_NONNULL_BEGIN
@class RTCEAGLVideoView;
+RTC_EXPORT
@protocol RTCEAGLVideoViewDelegate
- (void)videoView:(RTCEAGLVideoView *)videoView didChangeVideoSize:(CGSize)size;
@@ -26,6 +28,7 @@ NS_ASSUME_NONNULL_BEGIN
* RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its
* bounds using OpenGLES 2.0.
*/
+RTC_EXPORT
@interface RTCEAGLVideoView : UIView <RTCVideoRenderer>
@property(nonatomic, weak) id<RTCEAGLVideoViewDelegate> delegate;
diff --git a/chromium/third_party/webrtc/base/objc/RTCFieldTrials.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFieldTrials.h
index 837a8f7a609..7e7c1c4480a 100644
--- a/chromium/third_party/webrtc/base/objc/RTCFieldTrials.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFieldTrials.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/base/objc/RTCMacros.h"
-
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
typedef NS_OPTIONS(NSUInteger, RTCFieldTrialOptions) {
RTCFieldTrialOptionsNone = 0,
RTCFieldTrialOptionsSendSideBwe = 1 << 0,
@@ -20,4 +20,4 @@ typedef NS_OPTIONS(NSUInteger, RTCFieldTrialOptions) {
/** Must be called before any other call into WebRTC. See:
* webrtc/system_wrappers/include/field_trial_default.h
*/
-RTC_EXPORT void RTCInitFieldTrials(RTCFieldTrialOptions options);
+RTC_EXTERN void RTCInitFieldTrials(RTCFieldTrialOptions options);
diff --git a/chromium/third_party/webrtc/base/objc/RTCFileLogger.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFileLogger.h
index 281be4e31ff..5656b7bf43f 100644
--- a/chromium/third_party/webrtc/base/objc/RTCFileLogger.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCFileLogger.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) {
RTCFileLoggerSeverityVerbose,
RTCFileLoggerSeverityInfo,
@@ -31,6 +33,7 @@ NS_ASSUME_NONNULL_BEGIN
// are preserved while the middle section is overwritten instead.
// For kRTCFileLoggerTypeApp, the oldest log is overwritten.
// This class is not threadsafe.
+RTC_EXPORT
@interface RTCFileLogger : NSObject
// The severity level to capture. The default is kRTCFileLoggerSeverityInfo.
diff --git a/chromium/third_party/webrtc/api/objc/RTCIceCandidate.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIceCandidate.h
index e521ae04f6d..9f472b8df05 100644
--- a/chromium/third_party/webrtc/api/objc/RTCIceCandidate.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIceCandidate.h
@@ -10,8 +10,11 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCIceCandidate : NSObject
/**
diff --git a/chromium/third_party/webrtc/api/objc/RTCIceServer.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIceServer.h
index 487588ebc48..ff5489cb412 100644
--- a/chromium/third_party/webrtc/api/objc/RTCIceServer.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCIceServer.h
@@ -10,8 +10,11 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCIceServer : NSObject
/** URI(s) for this server represented as NSStrings. */
diff --git a/chromium/third_party/webrtc/base/objc/RTCLogging.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCLogging.h
index fbc4ffade27..e4f19203829 100644
--- a/chromium/third_party/webrtc/base/objc/RTCLogging.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCLogging.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
// Subset of rtc::LoggingSeverity.
typedef NS_ENUM(NSInteger, RTCLoggingSeverity) {
RTCLoggingSeverityVerbose,
@@ -18,24 +20,16 @@ typedef NS_ENUM(NSInteger, RTCLoggingSeverity) {
RTCLoggingSeverityError,
};
-#if defined(__cplusplus)
-extern "C" void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string);
-extern "C" void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
-extern "C" NSString* RTCFileName(const char* filePath);
-#else
-
// Wrapper for C++ LOG(sev) macros.
// Logs the log string to the webrtc logstream for the given severity.
-extern void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string);
+RTC_EXTERN void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string);
// Wrapper for rtc::LogMessage::LogToDebug.
// Sets the minimum severity to be logged to console.
-extern void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
+RTC_EXTERN void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity);
// Returns the filename with the path prefix removed.
-extern NSString* RTCFileName(const char* filePath);
-
-#endif
+RTC_EXTERN NSString* RTCFileName(const char* filePath);
// Some convenience macros.
diff --git a/chromium/third_party/webrtc/base/objc/RTCMacros.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMacros.h
index 4979a2b87f5..f9f15c37d39 100644
--- a/chromium/third_party/webrtc/base/objc/RTCMacros.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMacros.h
@@ -11,10 +11,12 @@
#ifndef WEBRTC_BASE_OBJC_RTC_MACROS_H_
#define WEBRTC_BASE_OBJC_RTC_MACROS_H_
+#define RTC_EXPORT __attribute__((visibility("default")))
+
#if defined(__cplusplus)
-#define RTC_EXPORT extern "C"
+#define RTC_EXTERN extern "C" RTC_EXPORT
#else
-#define RTC_EXPORT extern
+#define RTC_EXTERN extern RTC_EXPORT
#endif
#ifdef __OBJC__
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaConstraints.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaConstraints.h
index a8ad39142e3..adc73f2f432 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaConstraints.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaConstraints.h
@@ -10,8 +10,11 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCMediaConstraints : NSObject
- (instancetype)init NS_UNAVAILABLE;
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaStream.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaStream.h
index 50ae7df925c..b97960d436c 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaStream.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaStream.h
@@ -10,12 +10,15 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
@class RTCAudioTrack;
@class RTCPeerConnectionFactory;
@class RTCVideoTrack;
+RTC_EXPORT
@interface RTCMediaStream : NSObject
/** The audio tracks in this stream. */
diff --git a/chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h
index b8576bdf3bd..c42f0cfb1a0 100644
--- a/chromium/third_party/webrtc/api/objc/RTCMediaStreamTrack.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
/**
* Represents the state of the track. This exposes the same states in C++.
*/
@@ -20,6 +22,10 @@ typedef NS_ENUM(NSInteger, RTCMediaStreamTrackState) {
NS_ASSUME_NONNULL_BEGIN
+RTC_EXTERN NSString * const kRTCMediaStreamTrackKindAudio;
+RTC_EXTERN NSString * const kRTCMediaStreamTrackKindVideo;
+
+RTC_EXPORT
@interface RTCMediaStreamTrack : NSObject
/**
diff --git a/chromium/third_party/webrtc/api/objc/RTCNSGLVideoView.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h
index 27eb31e9af1..d9ed6fb9f26 100644
--- a/chromium/third_party/webrtc/api/objc/RTCNSGLVideoView.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h
@@ -8,13 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#if TARGET_OS_IPHONE
-#error "This file targets OSX."
-#endif
+#if !TARGET_OS_IPHONE
#import <AppKit/NSOpenGLView.h>
-#import "RTCVideoRenderer.h"
+#import <WebRTC/RTCVideoRenderer.h>
NS_ASSUME_NONNULL_BEGIN
@@ -32,3 +30,5 @@ NS_ASSUME_NONNULL_BEGIN
@end
NS_ASSUME_NONNULL_END
+
+#endif
diff --git a/chromium/third_party/webrtc/api/objc/RTCPeerConnection.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h
index e0f9b78e8eb..2ba8661d13a 100644
--- a/chromium/third_party/webrtc/api/objc/RTCPeerConnection.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnection.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
@class RTCConfiguration;
@class RTCDataChannel;
@class RTCDataChannelConfiguration;
@@ -18,6 +20,8 @@
@class RTCMediaStream;
@class RTCMediaStreamTrack;
@class RTCPeerConnectionFactory;
+@class RTCRtpReceiver;
+@class RTCRtpSender;
@class RTCSessionDescription;
@class RTCStatsReport;
@@ -64,6 +68,7 @@ typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) {
@class RTCPeerConnection;
+RTC_EXPORT
@protocol RTCPeerConnectionDelegate <NSObject>
/** Called when the SignalingState changed. */
@@ -99,7 +104,7 @@ typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) {
@end
-
+RTC_EXPORT
@interface RTCPeerConnection : NSObject
/** The object that will be notifed about events such as state changes and
@@ -115,6 +120,18 @@ typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) {
@property(nonatomic, readonly) RTCIceConnectionState iceConnectionState;
@property(nonatomic, readonly) RTCIceGatheringState iceGatheringState;
+/** Gets all RTCRtpSenders associated with this peer connection.
+ * Note: reading this property returns different instances of RTCRtpSender.
+ * Use isEqual: instead of == to compare RTCRtpSender instances.
+ */
+@property(nonatomic, readonly) NSArray<RTCRtpSender *> *senders;
+
+/** Gets all RTCRtpReceivers associated with this peer connection.
+ * Note: reading this property returns different instances of RTCRtpReceiver.
+ * Use isEqual: instead of == to compare RTCRtpReceiver instances.
+ */
+@property(nonatomic, readonly) NSArray<RTCRtpReceiver *> *receivers;
+
- (instancetype)init NS_UNAVAILABLE;
/** Sets the PeerConnection's global configuration to |configuration|.
@@ -161,6 +178,16 @@ typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) {
@end
+@interface RTCPeerConnection (Media)
+
+/**
+ * Create an RTCRtpSender with the specified kind and media stream ID.
+ * See RTCMediaStreamTrack.h for available kinds.
+ */
+- (RTCRtpSender *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId;
+
+@end
+
@interface RTCPeerConnection (DataChannel)
/** Create a new data channel with the given label and configuration. */
diff --git a/chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h
index f98a551846b..f21c107581e 100644
--- a/chromium/third_party/webrtc/api/objc/RTCPeerConnectionFactory.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
@class RTCAVFoundationVideoSource;
@@ -22,6 +24,7 @@ NS_ASSUME_NONNULL_BEGIN
@class RTCVideoTrack;
@protocol RTCPeerConnectionDelegate;
+RTC_EXPORT
@interface RTCPeerConnectionFactory : NSObject
- (instancetype)init NS_DESIGNATED_INITIALIZER;
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h
new file mode 100644
index 00000000000..fef8429803c
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import <WebRTC/RTCMacros.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXTERN const NSString * const kRTCRtxCodecMimeType;
+RTC_EXTERN const NSString * const kRTCRedCodecMimeType;
+RTC_EXTERN const NSString * const kRTCUlpfecCodecMimeType;
+RTC_EXTERN const NSString * const kRTCOpusCodecMimeType;
+RTC_EXTERN const NSString * const kRTCIsacCodecMimeType;
+RTC_EXTERN const NSString * const kRTCL16CodecMimeType;
+RTC_EXTERN const NSString * const kRTCG722CodecMimeType;
+RTC_EXTERN const NSString * const kRTCIlbcCodecMimeType;
+RTC_EXTERN const NSString * const kRTCPcmuCodecMimeType;
+RTC_EXTERN const NSString * const kRTCPcmaCodecMimeType;
+RTC_EXTERN const NSString * const kRTCDtmfCodecMimeType;
+RTC_EXTERN const NSString * const kRTCComfortNoiseCodecMimeType;
+RTC_EXTERN const NSString * const kRTCVp8CodecMimeType;
+RTC_EXTERN const NSString * const kRTCVp9CodecMimeType;
+RTC_EXTERN const NSString * const kRTCH264CodecMimeType;
+
+/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTCRtpCodecParameters */
+RTC_EXPORT
+@interface RTCRtpCodecParameters : NSObject
+
+/** The RTP payload type. */
+@property(nonatomic, assign) int payloadType;
+
+/**
+ * The codec MIME type. Valid types are listed in:
+ * http://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml#rtp-parameters-2
+ *
+ * Several supported types are represented by the constants above.
+ */
+@property(nonatomic, nonnull) NSString *mimeType;
+
+/** The codec clock rate expressed in Hertz. */
+@property(nonatomic, assign) int clockRate;
+
+/** The number of channels (mono=1, stereo=2). */
+@property(nonatomic, assign) int channels;
+
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h
new file mode 100644
index 00000000000..65b8deafc08
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import <WebRTC/RTCMacros.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXPORT
+@interface RTCRtpEncodingParameters : NSObject
+
+/** Controls whether the encoding is currently transmitted. */
+@property(nonatomic, assign) BOOL isActive;
+
+/** The maximum bitrate to use for the encoding, or nil if there is no
+ * limit.
+ */
+@property(nonatomic, copy, nullable) NSNumber *maxBitrateBps;
+
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpParameters.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpParameters.h
new file mode 100644
index 00000000000..bdebf84884c
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpParameters.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import <WebRTC/RTCMacros.h>
+#import <WebRTC/RTCRtpCodecParameters.h>
+#import <WebRTC/RTCRtpEncodingParameters.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXPORT
+@interface RTCRtpParameters : NSObject
+
+/** The currently active encodings in the order of preference. */
+@property(nonatomic, copy) NSArray<RTCRtpEncodingParameters *> *encodings;
+
+/** The negotiated set of send codecs in order of preference. */
+@property(nonatomic, copy) NSArray<RTCRtpCodecParameters *> *codecs;
+
+- (instancetype)init NS_DESIGNATED_INITIALIZER;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpReceiver.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpReceiver.h
new file mode 100644
index 00000000000..e98a8f3bea1
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpReceiver.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import <WebRTC/RTCMacros.h>
+#import <WebRTC/RTCMediaStreamTrack.h>
+#import <WebRTC/RTCRtpParameters.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXPORT
+@protocol RTCRtpReceiver <NSObject>
+
+/** A unique identifier for this receiver. */
+@property(nonatomic, readonly) NSString *receiverId;
+
+/** The currently active RTCRtpParameters, as defined in
+ * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
+ *
+ * The WebRTC specification only defines RTCRtpParameters in terms of senders,
+ * but this API also applies them to receivers, similar to ORTC:
+ * http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*.
+ */
+@property(nonatomic, readonly) RTCRtpParameters *parameters;
+
+/** The RTCMediaStreamTrack associated with the receiver.
+ * Note: reading this property returns a new instance of
+ * RTCMediaStreamTrack. Use isEqual: instead of == to compare
+ * RTCMediaStreamTrack instances.
+ */
+@property(nonatomic, readonly) RTCMediaStreamTrack *track;
+
+@end
+
+RTC_EXPORT
+@interface RTCRtpReceiver : NSObject <RTCRtpReceiver>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpSender.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpSender.h
new file mode 100644
index 00000000000..d910c6ceb28
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCRtpSender.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Foundation/Foundation.h>
+
+#import <WebRTC/RTCMacros.h>
+#import <WebRTC/RTCMediaStreamTrack.h>
+#import <WebRTC/RTCRtpParameters.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+RTC_EXPORT
+@protocol RTCRtpSender <NSObject>
+
+/** A unique identifier for this sender. */
+@property(nonatomic, readonly) NSString *senderId;
+
+/** The currently active RTCRtpParameters, as defined in
+ * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters.
+ */
+@property(nonatomic, copy) RTCRtpParameters *parameters;
+
+/** The RTCMediaStreamTrack associated with the sender.
+ * Note: reading this property returns a new instance of
+ * RTCMediaStreamTrack. Use isEqual: instead of == to compare
+ * RTCMediaStreamTrack instances.
+ */
+@property(nonatomic, copy, nullable) RTCMediaStreamTrack *track;
+
+@end
+
+RTC_EXPORT
+@interface RTCRtpSender : NSObject <RTCRtpSender>
+
+- (instancetype)init NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/chromium/third_party/webrtc/base/objc/RTCSSLAdapter.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCSSLAdapter.h
index 34db8432c2f..1341a5c003c 100644
--- a/chromium/third_party/webrtc/base/objc/RTCSSLAdapter.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCSSLAdapter.h
@@ -10,11 +10,11 @@
#import <Foundation/Foundation.h>
-#import "webrtc/base/objc/RTCMacros.h"
+#import <WebRTC/RTCMacros.h>
/**
* Initialize and clean up the SSL library. Failure is fatal. These call the
* corresponding functions in webrtc/base/ssladapter.h.
*/
-RTC_EXPORT BOOL RTCInitializeSSL();
-RTC_EXPORT BOOL RTCCleanupSSL();
+RTC_EXTERN BOOL RTCInitializeSSL();
+RTC_EXTERN BOOL RTCCleanupSSL();
diff --git a/chromium/third_party/webrtc/api/objc/RTCSessionDescription.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCSessionDescription.h
index 2635633f39f..41439804a8a 100644
--- a/chromium/third_party/webrtc/api/objc/RTCSessionDescription.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCSessionDescription.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
/**
* Represents the session description type. This exposes the same types that are
* in C++, which doesn't include the rollback type that is in the W3C spec.
@@ -22,6 +24,7 @@ typedef NS_ENUM(NSInteger, RTCSdpType) {
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCSessionDescription : NSObject
/** The type of session description. */
diff --git a/chromium/third_party/webrtc/api/objc/RTCStatsReport.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCStatsReport.h
index f3f8907dfb3..5a559679991 100644
--- a/chromium/third_party/webrtc/api/objc/RTCStatsReport.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCStatsReport.h
@@ -10,9 +10,12 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
/** This does not currently conform to the spec. */
+RTC_EXPORT
@interface RTCStatsReport : NSObject
/** Time since 1970-01-01T00:00:00Z in milliseconds. */
diff --git a/chromium/third_party/webrtc/base/objc/RTCTracing.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCTracing.h
index 5b8e2f6ac37..136479118b5 100644
--- a/chromium/third_party/webrtc/base/objc/RTCTracing.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCTracing.h
@@ -10,12 +10,12 @@
#import <Foundation/Foundation.h>
-#import "webrtc/base/objc/RTCMacros.h"
+#import <WebRTC/RTCMacros.h>
-RTC_EXPORT void RTCSetupInternalTracer();
+RTC_EXTERN void RTCSetupInternalTracer();
/** Starts capture to specified file. Must be a valid writable path.
* Returns YES if capture starts.
*/
-RTC_EXPORT BOOL RTCStartInternalCapture(NSString *filePath);
-RTC_EXPORT void RTCStopInternalCapture();
-RTC_EXPORT void RTCShutdownInternalTracer();
+RTC_EXTERN BOOL RTCStartInternalCapture(NSString *filePath);
+RTC_EXTERN void RTCStopInternalCapture();
+RTC_EXTERN void RTCShutdownInternalTracer();
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoFrame.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h
index b44bf7331d1..efb666e4bf2 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoFrame.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h
@@ -11,9 +11,12 @@
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
// RTCVideoFrame is an ObjectiveC version of cricket::VideoFrame.
+RTC_EXPORT
@interface RTCVideoFrame : NSObject
/** Width without rotation applied. */
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoRenderer.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoRenderer.h
index 2fe4efb45d6..5e2e820918a 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoRenderer.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoRenderer.h
@@ -13,10 +13,13 @@
#import <UIKit/UIKit.h>
#endif
+#import <WebRTC/RTCMacros.h>
+
NS_ASSUME_NONNULL_BEGIN
@class RTCVideoFrame;
+RTC_EXPORT
@protocol RTCVideoRenderer <NSObject>
/** The size of the frame. */
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoSource.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoSource.h
index a98fa6c6bba..e234c3e0896 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoSource.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoSource.h
@@ -10,6 +10,8 @@
#import <Foundation/Foundation.h>
+#import <WebRTC/RTCMacros.h>
+
typedef NS_ENUM(NSInteger, RTCSourceState) {
RTCSourceStateInitializing,
RTCSourceStateLive,
@@ -19,6 +21,7 @@ typedef NS_ENUM(NSInteger, RTCSourceState) {
NS_ASSUME_NONNULL_BEGIN
+RTC_EXPORT
@interface RTCVideoSource : NSObject
/** The current state of the RTCVideoSource. */
diff --git a/chromium/third_party/webrtc/api/objc/RTCVideoTrack.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoTrack.h
index 1d883760681..899d7c34780 100644
--- a/chromium/third_party/webrtc/api/objc/RTCVideoTrack.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoTrack.h
@@ -8,7 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCMediaStreamTrack.h"
+#import <WebRTC/RTCMediaStreamTrack.h>
+
+#import <WebRTC/RTCMacros.h>
NS_ASSUME_NONNULL_BEGIN
@@ -16,6 +18,7 @@ NS_ASSUME_NONNULL_BEGIN
@class RTCPeerConnectionFactory;
@class RTCVideoSource;
+RTC_EXPORT
@interface RTCVideoTrack : RTCMediaStreamTrack
/** The video source for this video track. */
diff --git a/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC/WebRTC.h b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/WebRTC.h
index 9f2d8454e97..bf08a0d3478 100644
--- a/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC/WebRTC.h
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Headers/WebRTC/WebRTC.h
@@ -8,16 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import <Foundation/Foundation.h>
-
-//! Project version number for WebRTC.
-FOUNDATION_EXPORT double WebRTCVersionNumber;
-
-//! Project version string for WebRTC.
-FOUNDATION_EXPORT const unsigned char WebRTCVersionString[];
-
-#import <WebRTC/RTCAudioTrack.h>
#import <WebRTC/RTCAVFoundationVideoSource.h>
+#import <WebRTC/RTCAudioTrack.h>
#import <WebRTC/RTCCameraPreviewView.h>
#import <WebRTC/RTCConfiguration.h>
#import <WebRTC/RTCDataChannel.h>
@@ -33,15 +25,18 @@ FOUNDATION_EXPORT const unsigned char WebRTCVersionString[];
#import <WebRTC/RTCMediaConstraints.h>
#import <WebRTC/RTCMediaStream.h>
#import <WebRTC/RTCMediaStreamTrack.h>
-#import <WebRTC/RTCOpenGLVideoRenderer.h>
#import <WebRTC/RTCPeerConnection.h>
#import <WebRTC/RTCPeerConnectionFactory.h>
-#import <WebRTC/RTCSessionDescription.h>
+#import <WebRTC/RTCRtpCodecParameters.h>
+#import <WebRTC/RTCRtpEncodingParameters.h>
+#import <WebRTC/RTCRtpParameters.h>
+#import <WebRTC/RTCRtpReceiver.h>
+#import <WebRTC/RTCRtpSender.h>
#import <WebRTC/RTCSSLAdapter.h>
+#import <WebRTC/RTCSessionDescription.h>
#import <WebRTC/RTCStatsReport.h>
#import <WebRTC/RTCTracing.h>
#import <WebRTC/RTCVideoFrame.h>
#import <WebRTC/RTCVideoRenderer.h>
-#import <WebRTC/RTCVideoRendererAdapter.h>
#import <WebRTC/RTCVideoSource.h>
#import <WebRTC/RTCVideoTrack.h>
diff --git a/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC/Info.plist b/chromium/third_party/webrtc/sdk/objc/Framework/Info.plist
index d3de8eefb69..38c437e7fed 100644
--- a/chromium/third_party/webrtc/build/ios/SDK/Framework/WebRTC/Info.plist
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Info.plist
@@ -5,13 +5,13 @@
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
- <string>$(EXECUTABLE_NAME)</string>
+ <string>WebRTC</string>
<key>CFBundleIdentifier</key>
- <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+ <string>org.webrtc.WebRTC</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
- <string>$(PRODUCT_NAME)</string>
+ <string>WebRTC</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
@@ -19,7 +19,7 @@
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
- <string>$(CURRENT_PROJECT_VERSION)</string>
+ <string>1.0</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
diff --git a/chromium/third_party/webrtc/sdk/objc/Framework/Modules/module.modulemap b/chromium/third_party/webrtc/sdk/objc/Framework/Modules/module.modulemap
new file mode 100644
index 00000000000..cd485a4e81e
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/Modules/module.modulemap
@@ -0,0 +1,6 @@
+framework module WebRTC {
+ umbrella header "WebRTC.h"
+
+ export *
+ module * { export * }
+}
diff --git a/chromium/third_party/webrtc/api/objctests/RTCConfigurationTest.mm b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCConfigurationTest.mm
index 8cfa04d993c..f565adbefd0 100644
--- a/chromium/third_party/webrtc/api/objctests/RTCConfigurationTest.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCConfigurationTest.mm
@@ -14,10 +14,10 @@
#include "webrtc/base/gunit.h"
-#import "webrtc/api/objc/RTCConfiguration.h"
-#import "webrtc/api/objc/RTCConfiguration+Private.h"
-#import "webrtc/api/objc/RTCIceServer.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCConfiguration+Private.h"
+#import "WebRTC/RTCConfiguration.h"
+#import "WebRTC/RTCIceServer.h"
@interface RTCConfigurationTest : NSObject
- (void)testConversionToNativeConfiguration;
@@ -41,25 +41,30 @@
config.audioJitterBufferMaxPackets = maxPackets;
config.iceConnectionReceivingTimeout = timeout;
config.iceBackupCandidatePairPingInterval = interval;
+ config.continualGatheringPolicy =
+ RTCContinualGatheringPolicyGatherContinually;
- webrtc::PeerConnectionInterface::RTCConfiguration nativeConfig =
- config.nativeConfiguration;
- EXPECT_EQ(1u, nativeConfig.servers.size());
+ std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
+ nativeConfig([config createNativeConfiguration]);
+ EXPECT_TRUE(nativeConfig.get());
+ EXPECT_EQ(1u, nativeConfig->servers.size());
webrtc::PeerConnectionInterface::IceServer nativeServer =
- nativeConfig.servers.front();
+ nativeConfig->servers.front();
EXPECT_EQ(1u, nativeServer.urls.size());
EXPECT_EQ("stun:stun1.example.net", nativeServer.urls.front());
- EXPECT_EQ(webrtc::PeerConnectionInterface::kRelay, nativeConfig.type);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::kRelay, nativeConfig->type);
EXPECT_EQ(webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle,
- nativeConfig.bundle_policy);
+ nativeConfig->bundle_policy);
EXPECT_EQ(webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate,
- nativeConfig.rtcp_mux_policy);
+ nativeConfig->rtcp_mux_policy);
EXPECT_EQ(webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled,
- nativeConfig.tcp_candidate_policy);
- EXPECT_EQ(maxPackets, nativeConfig.audio_jitter_buffer_max_packets);
- EXPECT_EQ(timeout, nativeConfig.ice_connection_receiving_timeout);
- EXPECT_EQ(interval, nativeConfig.ice_backup_candidate_pair_ping_interval);
+ nativeConfig->tcp_candidate_policy);
+ EXPECT_EQ(maxPackets, nativeConfig->audio_jitter_buffer_max_packets);
+ EXPECT_EQ(timeout, nativeConfig->ice_connection_receiving_timeout);
+ EXPECT_EQ(interval, nativeConfig->ice_backup_candidate_pair_ping_interval);
+ EXPECT_EQ(webrtc::PeerConnectionInterface::GATHER_CONTINUALLY,
+ nativeConfig->continual_gathering_policy);
}
@end
diff --git a/chromium/third_party/webrtc/api/objctests/RTCDataChannelConfigurationTest.mm b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCDataChannelConfigurationTest.mm
index 1b3f21b8f90..275898dba85 100644
--- a/chromium/third_party/webrtc/api/objctests/RTCDataChannelConfigurationTest.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCDataChannelConfigurationTest.mm
@@ -12,9 +12,9 @@
#include "webrtc/base/gunit.h"
-#import "webrtc/api/objc/RTCDataChannelConfiguration.h"
-#import "webrtc/api/objc/RTCDataChannelConfiguration+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCDataChannelConfiguration+Private.h"
+#import "WebRTC/RTCDataChannelConfiguration.h"
@interface RTCDataChannelConfigurationTest : NSObject
- (void)testConversionToNativeDataChannelInit;
diff --git a/chromium/third_party/webrtc/api/objctests/RTCIceCandidateTest.mm b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCIceCandidateTest.mm
index 391db44ae10..20a9d739a96 100644
--- a/chromium/third_party/webrtc/api/objctests/RTCIceCandidateTest.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCIceCandidateTest.mm
@@ -10,11 +10,13 @@
#import <Foundation/Foundation.h>
+#include <memory>
+
#include "webrtc/base/gunit.h"
-#import "webrtc/api/objc/RTCIceCandidate.h"
-#import "webrtc/api/objc/RTCIceCandidate+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCIceCandidate+Private.h"
+#import "WebRTC/RTCIceCandidate.h"
@interface RTCIceCandidateTest : NSObject
- (void)testCandidate;
@@ -32,7 +34,7 @@
sdpMLineIndex:0
sdpMid:@"audio"];
- rtc::scoped_ptr<webrtc::IceCandidateInterface> nativeCandidate =
+ std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate =
candidate.nativeCandidate;
EXPECT_EQ("audio", nativeCandidate->sdp_mid());
EXPECT_EQ(0, nativeCandidate->sdp_mline_index());
diff --git a/chromium/third_party/webrtc/api/objctests/RTCIceServerTest.mm b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCIceServerTest.mm
index 5116ba0c18c..a5159dcaad3 100644
--- a/chromium/third_party/webrtc/api/objctests/RTCIceServerTest.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCIceServerTest.mm
@@ -14,9 +14,9 @@
#include "webrtc/base/gunit.h"
-#import "webrtc/api/objc/RTCIceServer.h"
-#import "webrtc/api/objc/RTCIceServer+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCIceServer+Private.h"
+#import "WebRTC/RTCIceServer.h"
@interface RTCIceServerTest : NSObject
- (void)testOneURLServer;
diff --git a/chromium/third_party/webrtc/api/objctests/RTCMediaConstraintsTest.mm b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCMediaConstraintsTest.mm
index f73a6ca895d..3413dfc039a 100644
--- a/chromium/third_party/webrtc/api/objctests/RTCMediaConstraintsTest.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCMediaConstraintsTest.mm
@@ -10,11 +10,13 @@
#import <Foundation/Foundation.h>
+#include <memory>
+
#include "webrtc/base/gunit.h"
-#import "webrtc/api/objc/RTCMediaConstraints.h"
-#import "webrtc/api/objc/RTCMediaConstraints+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCMediaConstraints+Private.h"
+#import "WebRTC/RTCMediaConstraints.h"
@interface RTCMediaConstraintsTest : NSObject
- (void)testMediaConstraints;
@@ -29,7 +31,7 @@
RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:optional];
- rtc::scoped_ptr<webrtc::MediaConstraints> nativeConstraints =
+ std::unique_ptr<webrtc::MediaConstraints> nativeConstraints =
[constraints nativeConstraints];
webrtc::MediaConstraintsInterface::Constraints nativeMandatory =
diff --git a/chromium/third_party/webrtc/api/objctests/RTCSessionDescriptionTest.mm b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCSessionDescriptionTest.mm
index 37ac65cf944..90f4698b301 100644
--- a/chromium/third_party/webrtc/api/objctests/RTCSessionDescriptionTest.mm
+++ b/chromium/third_party/webrtc/sdk/objc/Framework/UnitTests/RTCSessionDescriptionTest.mm
@@ -12,9 +12,9 @@
#include "webrtc/base/gunit.h"
-#import "webrtc/api/objc/RTCSessionDescription.h"
-#import "webrtc/api/objc/RTCSessionDescription+Private.h"
-#import "webrtc/base/objc/NSString+StdString.h"
+#import "NSString+StdString.h"
+#import "RTCSessionDescription+Private.h"
+#import "WebRTC/RTCSessionDescription.h"
@interface RTCSessionDescriptionTest : NSObject
- (void)testSessionDescriptionConversion;
diff --git a/chromium/third_party/webrtc/build/WebRTC-Prefix.pch b/chromium/third_party/webrtc/sdk/objc/WebRTC-Prefix.pch
index 506d4fbb75e..c56cb48ab87 100644
--- a/chromium/third_party/webrtc/build/WebRTC-Prefix.pch
+++ b/chromium/third_party/webrtc/sdk/objc/WebRTC-Prefix.pch
@@ -18,10 +18,12 @@
#error "This file requires nullability support."
#endif
+#import <Foundation/Foundation.h>
+
// The following nonnull macros were introduced in OSX SDK 10.10.3. However,
// the bots appear to be running something older. We define them here if they
-// aren't already defined in NSObjCRuntime.h
-#include <Foundation/NSObjCRuntime.h>
+// aren't already defined in NSObjCRuntime.h, which is included by
+// Foundation/Foundation.h.
#if !defined(NS_ASSUME_NONNULL_BEGIN)
#define NS_ASSUME_NONNULL_BEGIN
diff --git a/chromium/third_party/webrtc/build/ios/SDK/WebRTC.podspec b/chromium/third_party/webrtc/sdk/objc/WebRTC.podspec
index 523b840a59d..37ab166b0c5 100644
--- a/chromium/third_party/webrtc/build/ios/SDK/WebRTC.podspec
+++ b/chromium/third_party/webrtc/sdk/objc/WebRTC.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |s|
s.name = "WebRTC"
- s.version = "0.0.1"
+ s.version = "${FRAMEWORK_VERSION_NUMBER}"
s.summary = "WebRTC SDK for iOS"
s.description = <<-DESC
WebRTC is a free, open project that provides browsers and mobile
diff --git a/chromium/third_party/webrtc/sdk/sdk.gyp b/chromium/third_party/webrtc/sdk/sdk.gyp
new file mode 100644
index 00000000000..5bfbb249ea6
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/sdk.gyp
@@ -0,0 +1,274 @@
+# Copyright 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': [
+ '../build/common.gypi',
+ 'sdk.gypi',
+ ],
+ 'conditions': [
+ ['OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7")', {
+ 'targets': [
+ {
+ 'target_name': 'rtc_sdk_common_objc',
+ 'type': 'static_library',
+ 'includes': [ '../build/objc_common.gypi' ],
+ 'dependencies': [
+ '../base/base.gyp:rtc_base',
+ ],
+ 'include_dirs': [
+ 'objc/Framework/Classes',
+ 'objc/Framework/Headers',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'objc/Framework/Classes',
+ 'objc/Framework/Headers',
+ ],
+ },
+ 'sources': [
+ 'objc/Framework/Classes/NSString+StdString.h',
+ 'objc/Framework/Classes/NSString+StdString.mm',
+ 'objc/Framework/Classes/RTCDispatcher.m',
+ 'objc/Framework/Classes/RTCFieldTrials.mm',
+ 'objc/Framework/Classes/RTCLogging.mm',
+ 'objc/Framework/Classes/RTCSSLAdapter.mm',
+ 'objc/Framework/Classes/RTCTracing.mm',
+ 'objc/Framework/Headers/WebRTC/RTCDispatcher.h',
+ 'objc/Framework/Headers/WebRTC/RTCFieldTrials.h',
+ 'objc/Framework/Headers/WebRTC/RTCLogging.h',
+ 'objc/Framework/Headers/WebRTC/RTCMacros.h',
+ 'objc/Framework/Headers/WebRTC/RTCSSLAdapter.h',
+ 'objc/Framework/Headers/WebRTC/RTCTracing.h',
+ ],
+ 'conditions': [
+ ['OS=="ios"', {
+ 'sources': [
+ 'objc/Framework/Classes/RTCCameraPreviewView.m',
+ 'objc/Framework/Classes/RTCUIApplication.h',
+ 'objc/Framework/Classes/RTCUIApplication.mm',
+ 'objc/Framework/Headers/WebRTC/RTCCameraPreviewView.h',
+ ],
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework AVFoundation',
+ ],
+ },
+ },
+ }], # OS=="ios"
+ ['build_with_chromium==0', {
+ 'sources': [
+ 'objc/Framework/Classes/RTCFileLogger.mm',
+ 'objc/Framework/Headers/WebRTC/RTCFileLogger.h',
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'rtc_sdk_peerconnection_objc',
+ 'type': 'static_library',
+ 'includes': [ '../build/objc_common.gypi' ],
+ 'dependencies': [
+ '<(webrtc_root)/api/api.gyp:libjingle_peerconnection',
+ 'rtc_sdk_common_objc',
+ ],
+ 'include_dirs': [
+ 'objc/Framework/Classes',
+ 'objc/Framework/Headers',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'objc/Framework/Classes',
+ 'objc/Framework/Headers',
+ ],
+ },
+ 'link_settings': {
+ 'libraries': [
+ '-lstdc++',
+ ],
+ }, # link_settings
+ 'sources': [
+ 'objc/Framework/Classes/RTCAudioTrack+Private.h',
+ 'objc/Framework/Classes/RTCAudioTrack.mm',
+ 'objc/Framework/Classes/RTCConfiguration+Private.h',
+ 'objc/Framework/Classes/RTCConfiguration.mm',
+ 'objc/Framework/Classes/RTCDataChannel+Private.h',
+ 'objc/Framework/Classes/RTCDataChannel.mm',
+ 'objc/Framework/Classes/RTCDataChannelConfiguration+Private.h',
+ 'objc/Framework/Classes/RTCDataChannelConfiguration.mm',
+ 'objc/Framework/Classes/RTCIceCandidate+Private.h',
+ 'objc/Framework/Classes/RTCIceCandidate.mm',
+ 'objc/Framework/Classes/RTCIceServer+Private.h',
+ 'objc/Framework/Classes/RTCIceServer.mm',
+ 'objc/Framework/Classes/RTCMediaConstraints+Private.h',
+ 'objc/Framework/Classes/RTCMediaConstraints.mm',
+ 'objc/Framework/Classes/RTCMediaStream+Private.h',
+ 'objc/Framework/Classes/RTCMediaStream.mm',
+ 'objc/Framework/Classes/RTCMediaStreamTrack+Private.h',
+ 'objc/Framework/Classes/RTCMediaStreamTrack.mm',
+ 'objc/Framework/Classes/RTCOpenGLVideoRenderer.h',
+ 'objc/Framework/Classes/RTCOpenGLVideoRenderer.mm',
+ 'objc/Framework/Classes/RTCPeerConnection+DataChannel.mm',
+ 'objc/Framework/Classes/RTCPeerConnection+Private.h',
+ 'objc/Framework/Classes/RTCPeerConnection+Stats.mm',
+ 'objc/Framework/Classes/RTCPeerConnection.mm',
+ 'objc/Framework/Classes/RTCPeerConnectionFactory+Private.h',
+ 'objc/Framework/Classes/RTCPeerConnectionFactory.mm',
+ 'objc/Framework/Classes/RTCRtpCodecParameters+Private.h',
+ 'objc/Framework/Classes/RTCRtpCodecParameters.mm',
+ 'objc/Framework/Classes/RTCRtpEncodingParameters+Private.h',
+ 'objc/Framework/Classes/RTCRtpEncodingParameters.mm',
+ 'objc/Framework/Classes/RTCRtpParameters+Private.h',
+ 'objc/Framework/Classes/RTCRtpParameters.mm',
+ 'objc/Framework/Classes/RTCRtpReceiver+Private.h',
+ 'objc/Framework/Classes/RTCRtpReceiver.mm',
+ 'objc/Framework/Classes/RTCRtpSender+Private.h',
+ 'objc/Framework/Classes/RTCRtpSender.mm',
+ 'objc/Framework/Classes/RTCSessionDescription+Private.h',
+ 'objc/Framework/Classes/RTCSessionDescription.mm',
+ 'objc/Framework/Classes/RTCStatsReport+Private.h',
+ 'objc/Framework/Classes/RTCStatsReport.mm',
+ 'objc/Framework/Classes/RTCVideoFrame+Private.h',
+ 'objc/Framework/Classes/RTCVideoFrame.mm',
+ 'objc/Framework/Classes/RTCVideoRendererAdapter+Private.h',
+ 'objc/Framework/Classes/RTCVideoRendererAdapter.h',
+ 'objc/Framework/Classes/RTCVideoRendererAdapter.mm',
+ 'objc/Framework/Classes/RTCVideoSource+Private.h',
+ 'objc/Framework/Classes/RTCVideoSource.mm',
+ 'objc/Framework/Classes/RTCVideoTrack+Private.h',
+ 'objc/Framework/Classes/RTCVideoTrack.mm',
+ 'objc/Framework/Headers/WebRTC/RTCAudioTrack.h',
+ 'objc/Framework/Headers/WebRTC/RTCConfiguration.h',
+ 'objc/Framework/Headers/WebRTC/RTCDataChannel.h',
+ 'objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h',
+ 'objc/Framework/Headers/WebRTC/RTCIceCandidate.h',
+ 'objc/Framework/Headers/WebRTC/RTCIceServer.h',
+ 'objc/Framework/Headers/WebRTC/RTCMediaConstraints.h',
+ 'objc/Framework/Headers/WebRTC/RTCMediaStream.h',
+ 'objc/Framework/Headers/WebRTC/RTCMediaStreamTrack.h',
+ 'objc/Framework/Headers/WebRTC/RTCPeerConnection.h',
+ 'objc/Framework/Headers/WebRTC/RTCPeerConnectionFactory.h',
+ 'objc/Framework/Headers/WebRTC/RTCRtpCodecParameters.h',
+ 'objc/Framework/Headers/WebRTC/RTCRtpEncodingParameters.h',
+ 'objc/Framework/Headers/WebRTC/RTCRtpParameters.h',
+ 'objc/Framework/Headers/WebRTC/RTCRtpReceiver.h',
+ 'objc/Framework/Headers/WebRTC/RTCRtpSender.h',
+ 'objc/Framework/Headers/WebRTC/RTCSessionDescription.h',
+ 'objc/Framework/Headers/WebRTC/RTCStatsReport.h',
+ 'objc/Framework/Headers/WebRTC/RTCVideoFrame.h',
+ 'objc/Framework/Headers/WebRTC/RTCVideoRenderer.h',
+ 'objc/Framework/Headers/WebRTC/RTCVideoSource.h',
+ 'objc/Framework/Headers/WebRTC/RTCVideoTrack.h',
+ ], # sources
+ 'conditions': [
+ ['OS=="ios"', {
+ 'sources': [
+ 'objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h',
+ 'objc/Framework/Classes/RTCAVFoundationVideoSource.mm',
+ 'objc/Framework/Classes/RTCEAGLVideoView.m',
+ 'objc/Framework/Classes/avfoundationvideocapturer.h',
+ 'objc/Framework/Classes/avfoundationvideocapturer.mm',
+ 'objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h',
+ 'objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h',
+ ],
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework CoreGraphics',
+ '-framework GLKit',
+ '-framework OpenGLES',
+ '-framework QuartzCore',
+ ],
+ },
+ }, # link_settings
+ }], # OS=="ios"
+ ['OS=="mac"', {
+ 'sources': [
+ 'objc/Framework/Classes/RTCNSGLVideoView.m',
+ 'objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h',
+ ],
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework OpenGL',
+ ],
+ },
+ },
+ }],
+ ], # conditions
+ }, # rtc_sdk_peerconnection_objc
+ {
+ 'target_name': 'rtc_sdk_framework_objc',
+ 'type': 'shared_library',
+ 'product_name': 'WebRTC',
+ 'mac_bundle': 1,
+ 'includes': [ '../build/objc_common.gypi' ],
+ # Slightly hacky, but we need to re-declare files here that are C
+ # interfaces because otherwise they will be dead-stripped during
+ # linking (ObjC classes cannot be dead-stripped). We might consider
+ # just only using ObjC interfaces.
+ 'sources': [
+ 'objc/Framework/Classes/RTCFieldTrials.mm',
+ 'objc/Framework/Classes/RTCLogging.mm',
+ 'objc/Framework/Classes/RTCSSLAdapter.mm',
+ 'objc/Framework/Classes/RTCTracing.mm',
+ 'objc/Framework/Headers/WebRTC/RTCFieldTrials.h',
+ 'objc/Framework/Headers/WebRTC/RTCLogging.h',
+ 'objc/Framework/Headers/WebRTC/RTCSSLAdapter.h',
+ 'objc/Framework/Headers/WebRTC/RTCTracing.h',
+ 'objc/Framework/Headers/WebRTC/WebRTC.h',
+ 'objc/Framework/Modules/module.modulemap',
+ ],
+ 'mac_framework_headers': [
+ '<!@(find -E objc/Framework/Headers -regex ".*(h)")',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
+ 'rtc_sdk_peerconnection_objc',
+ ],
+ 'xcode_settings': {
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'CODE_SIGN_IDENTITY': '',
+ 'DEFINES_MODULE': 'YES',
+ 'INFOPLIST_FILE': 'objc/Framework/Info.plist',
+ 'LD_DYLIB_INSTALL_NAME': '@rpath/WebRTC.framework/WebRTC',
+ 'MODULEMAP_FILE': '<(webrtc_root)/sdk/Framework/Modules/module.modulemap',
+ },
+ 'link_settings': {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': [
+ '-framework AVFoundation',
+ '-framework AudioToolbox',
+ '-framework CoreGraphics',
+ '-framework CoreMedia',
+ '-framework GLKit',
+ '-framework VideoToolbox',
+ ],
+ },
+ }, # link_settings
+ 'conditions': [
+ # TODO(tkchin): Generate WebRTC.h based off of
+ # mac_framework_headers instead of hard-coding. Ok for now since we
+ # only really care about dynamic lib on iOS outside of chromium.
+ ['OS!="mac"', {
+ 'mac_framework_headers!': [
+ 'objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h',
+ ],
+ }],
+ ['build_with_chromium==1', {
+ 'mac_framework_headers!': [
+ 'objc/Framework/Headers/WebRTC/RTCFileLogger.h',
+ ],
+ }],
+ ], # conditions
+ }, # rtc_sdk_framework_objc
+ ], # targets
+ }], # OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7")
+ ],
+}
diff --git a/chromium/third_party/webrtc/sdk/sdk.gypi b/chromium/third_party/webrtc/sdk/sdk.gypi
new file mode 100644
index 00000000000..8f8ee97a9d2
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/sdk.gypi
@@ -0,0 +1,26 @@
+# Copyright 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'target_defaults': {
+ 'configurations': {
+ 'Profile': {
+ 'xcode_settings': {
+ 'DEBUG_INFORMARTION_FORMAT': 'dwarf-with-dsym',
+ # We manually strip using strip -S and strip -x. We need to run
+ # dsymutil ourselves so we need symbols around before that.
+ 'DEPLOYMENT_POSTPROCESSING': 'NO',
+ 'GCC_OPTIMIZATION_LEVEL': 's',
+ 'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'NO',
+ 'USE_HEADERMAP': 'YES',
+ },
+ },
+ },
+ },
+}
diff --git a/chromium/third_party/webrtc/sdk/sdk_tests.gyp b/chromium/third_party/webrtc/sdk/sdk_tests.gyp
new file mode 100644
index 00000000000..70e8e7b901f
--- /dev/null
+++ b/chromium/third_party/webrtc/sdk/sdk_tests.gyp
@@ -0,0 +1,42 @@
+# Copyright 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': [ '../build/common.gypi', ],
+ 'conditions': [
+ ['OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7")', {
+ 'targets': [
+ {
+ 'target_name': 'rtc_sdk_peerconnection_objc_tests',
+ 'type': 'executable',
+ 'includes': [
+ '../build/objc_common.gypi',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/base/base_tests.gyp:rtc_base_tests_utils',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_peerconnection_objc',
+ ],
+ 'sources': [
+ 'objc/Framework/UnitTests/RTCConfigurationTest.mm',
+ 'objc/Framework/UnitTests/RTCDataChannelConfigurationTest.mm',
+ 'objc/Framework/UnitTests/RTCIceCandidateTest.mm',
+ 'objc/Framework/UnitTests/RTCIceServerTest.mm',
+ 'objc/Framework/UnitTests/RTCMediaConstraintsTest.mm',
+ 'objc/Framework/UnitTests/RTCSessionDescriptionTest.mm',
+ ],
+ 'xcode_settings': {
+ # |-ObjC| flag needed to make sure category method implementations
+ # are included:
+ # https://developer.apple.com/library/mac/qa/qa1490/_index.html
+ 'OTHER_LDFLAGS': ['-ObjC'],
+ },
+ },
+ ],
+ }], # OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7")
+ ],
+}
diff --git a/chromium/third_party/webrtc/stream.h b/chromium/third_party/webrtc/stream.h
deleted file mode 100644
index 5afab0f200c..00000000000
--- a/chromium/third_party/webrtc/stream.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#ifndef WEBRTC_STREAM_H_
-#define WEBRTC_STREAM_H_
-
-#include "webrtc/common_types.h"
-
-namespace webrtc {
-
-enum NetworkState {
- kNetworkUp,
- kNetworkDown,
-};
-
-// Common base class for streams.
-class Stream {
- public:
- // Starts stream activity.
- // When a stream is active, it can receive, process and deliver packets.
- virtual void Start() = 0;
- // Stops stream activity.
- // When a stream is stopped, it can't receive, process or deliver packets.
- virtual void Stop() = 0;
- // Called to notify that network state has changed, so that the stream can
- // respond, e.g. by pausing or resuming activity.
- virtual void SignalNetworkState(NetworkState state) = 0;
- // Called when a RTCP packet is received.
- virtual bool DeliverRtcp(const uint8_t* packet, size_t length) = 0;
-
- protected:
- virtual ~Stream() {}
-};
-
-// Common base class for receive streams.
-class ReceiveStream : public Stream {
- public:
- // Called when a RTP packet is received.
- virtual bool DeliverRtp(const uint8_t* packet,
- size_t length,
- const PacketTime& packet_time) = 0;
-};
-
-// Common base class for send streams.
-// A tag class that denotes send stream type.
-class SendStream : public Stream {};
-
-} // namespace webrtc
-
-#endif // WEBRTC_STREAM_H_
diff --git a/chromium/third_party/webrtc/supplement.gypi b/chromium/third_party/webrtc/supplement.gypi
index 2cd5400f224..f1974d73d83 100644
--- a/chromium/third_party/webrtc/supplement.gypi
+++ b/chromium/third_party/webrtc/supplement.gypi
@@ -13,9 +13,6 @@
'build_with_chromium': 0,
'conditions': [
['OS=="ios"', {
- # Default to using BoringSSL on iOS.
- 'use_openssl%': 1,
-
# Set target_subarch for if not already set. This is needed because the
# Chromium iOS toolchain relies on target_subarch being set.
'conditions': [
@@ -27,6 +24,11 @@
}],
],
}],
+ ['OS=="android"', {
+ # MJPEG capture is not used on Android. Disable to reduce
+ # libjingle_peerconnection_so file size.
+ 'libyuv_disable_jpeg%': 1,
+ }],
],
},
'target_defaults': {
diff --git a/chromium/third_party/webrtc/system_wrappers/BUILD.gn b/chromium/third_party/webrtc/system_wrappers/BUILD.gn
index aff29de502a..66e6df5cc1a 100644
--- a/chromium/third_party/webrtc/system_wrappers/BUILD.gn
+++ b/chromium/third_party/webrtc/system_wrappers/BUILD.gn
@@ -34,7 +34,6 @@ static_library("system_wrappers") {
"include/static_instance.h",
"include/stl_util.h",
"include/stringize_macros.h",
- "include/tick_util.h",
"include/timestamp_extrapolator.h",
"include/trace.h",
"include/utf_util_win.h",
@@ -65,7 +64,6 @@ static_library("system_wrappers") {
"source/rw_lock_winxp_win.h",
"source/sleep.cc",
"source/sort.cc",
- "source/tick_util.cc",
"source/timestamp_extrapolator.cc",
"source/trace_impl.cc",
"source/trace_impl.h",
diff --git a/chromium/third_party/webrtc/system_wrappers/include/aligned_malloc.h b/chromium/third_party/webrtc/system_wrappers/include/aligned_malloc.h
index 277abec020d..bdd82ccfd8c 100644
--- a/chromium/third_party/webrtc/system_wrappers/include/aligned_malloc.h
+++ b/chromium/third_party/webrtc/system_wrappers/include/aligned_malloc.h
@@ -46,8 +46,8 @@ T* AlignedMalloc(size_t size, size_t alignment) {
return reinterpret_cast<T*>(AlignedMalloc(size, alignment));
}
-// Deleter for use with scoped_ptr. E.g., use as
-// scoped_ptr<Foo, AlignedFreeDeleter> foo;
+// Deleter for use with unique_ptr. E.g., use as
+// std::unique_ptr<Foo, AlignedFreeDeleter> foo;
struct AlignedFreeDeleter {
inline void operator()(void* ptr) const {
AlignedFree(ptr);
diff --git a/chromium/third_party/webrtc/system_wrappers/include/clock.h b/chromium/third_party/webrtc/system_wrappers/include/clock.h
index f443057bea3..a209770261f 100644
--- a/chromium/third_party/webrtc/system_wrappers/include/clock.h
+++ b/chromium/third_party/webrtc/system_wrappers/include/clock.h
@@ -11,7 +11,8 @@
#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CLOCK_H_
#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_CLOCK_H_
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
#include "webrtc/typedefs.h"
@@ -76,7 +77,7 @@ class SimulatedClock : public Clock {
private:
int64_t time_us_;
- rtc::scoped_ptr<RWLockWrapper> lock_;
+ std::unique_ptr<RWLockWrapper> lock_;
};
}; // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/include/data_log_impl.h b/chromium/third_party/webrtc/system_wrappers/include/data_log_impl.h
index 35519609b99..6d59fa8c66b 100644
--- a/chromium/third_party/webrtc/system_wrappers/include/data_log_impl.h
+++ b/chromium/third_party/webrtc/system_wrappers/include/data_log_impl.h
@@ -18,12 +18,12 @@
#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_DATA_LOG_IMPL_H_
#include <map>
+#include <memory>
#include <sstream>
#include <string>
#include <vector>
#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -139,16 +139,16 @@ class DataLogImpl {
// Collection of tables indexed by the table name as std::string.
typedef std::map<std::string, LogTable*> TableMap;
- typedef rtc::scoped_ptr<CriticalSectionWrapper> CritSectScopedPtr;
+ typedef std::unique_ptr<CriticalSectionWrapper> CritSectScopedPtr;
static CritSectScopedPtr crit_sect_;
static DataLogImpl* instance_;
int counter_;
TableMap tables_;
EventWrapper* flush_event_;
- // This is a scoped_ptr so that we don't have to create threads in the no-op
+ // This is a unique_ptr so that we don't have to create threads in the no-op
// impl.
- rtc::scoped_ptr<rtc::PlatformThread> file_writer_thread_;
+ std::unique_ptr<rtc::PlatformThread> file_writer_thread_;
RWLockWrapper* tables_lock_;
};
diff --git a/chromium/third_party/webrtc/system_wrappers/include/metrics.h b/chromium/third_party/webrtc/system_wrappers/include/metrics.h
index 1576f919107..d5e549285fa 100644
--- a/chromium/third_party/webrtc/system_wrappers/include/metrics.h
+++ b/chromium/third_party/webrtc/system_wrappers/include/metrics.h
@@ -69,6 +69,9 @@
#define RTC_HISTOGRAM_COUNTS_200(name, sample) \
RTC_HISTOGRAM_COUNTS(name, sample, 1, 200, 50)
+#define RTC_HISTOGRAM_COUNTS_500(name, sample) \
+ RTC_HISTOGRAM_COUNTS(name, sample, 1, 500, 50)
+
#define RTC_HISTOGRAM_COUNTS_1000(name, sample) \
RTC_HISTOGRAM_COUNTS(name, sample, 1, 1000, 50)
@@ -89,6 +92,9 @@
#define RTC_LOGGED_HISTOGRAM_COUNTS_200(name, sample) \
RTC_LOGGED_HISTOGRAM_COUNTS(name, sample, 1, 200, 50)
+#define RTC_LOGGED_HISTOGRAM_COUNTS_500(name, sample) \
+ RTC_LOGGED_HISTOGRAM_COUNTS(name, sample, 1, 500, 50)
+
#define RTC_LOGGED_HISTOGRAM_COUNTS_1000(name, sample) \
RTC_LOGGED_HISTOGRAM_COUNTS(name, sample, 1, 1000, 50)
@@ -175,6 +181,10 @@
RTC_HISTOGRAMS_COMMON(index, name, sample, \
RTC_HISTOGRAM_COUNTS(name, sample, 1, 200, 50))
+#define RTC_HISTOGRAMS_COUNTS_500(index, name, sample) \
+ RTC_HISTOGRAMS_COMMON(index, name, sample, \
+ RTC_HISTOGRAM_COUNTS(name, sample, 1, 500, 50))
+
#define RTC_HISTOGRAMS_COUNTS_1000(index, name, sample) \
RTC_HISTOGRAMS_COMMON(index, name, sample, \
RTC_HISTOGRAM_COUNTS(name, sample, 1, 1000, 50))
@@ -204,6 +214,10 @@
RTC_HISTOGRAMS_COMMON(index, name, sample, \
RTC_LOGGED_HISTOGRAM_COUNTS(name, sample, 1, 200, 50))
+#define RTC_LOGGED_HISTOGRAMS_COUNTS_500(index, name, sample) \
+ RTC_HISTOGRAMS_COMMON(index, name, sample, \
+ RTC_LOGGED_HISTOGRAM_COUNTS(name, sample, 1, 500, 50))
+
#define RTC_LOGGED_HISTOGRAMS_COUNTS_1000(index, name, sample) \
RTC_HISTOGRAMS_COMMON(index, name, sample, \
RTC_LOGGED_HISTOGRAM_COUNTS(name, sample, 1, 1000, 50))
diff --git a/chromium/third_party/webrtc/system_wrappers/include/tick_util.h b/chromium/third_party/webrtc/system_wrappers/include/tick_util.h
deleted file mode 100644
index 52f9b4ae4d3..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/include/tick_util.h
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// System independant wrapper for polling elapsed time in ms and us.
-// The implementation works in the tick domain which can be mapped over to the
-// time domain.
-#ifndef WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TICK_UTIL_H_
-#define WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TICK_UTIL_H_
-
-#if _WIN32
-// Note: The Windows header must always be included before mmsystem.h
-#include <windows.h>
-#include <mmsystem.h>
-#elif WEBRTC_LINUX
-#include <time.h>
-#elif WEBRTC_MAC
-#include <mach/mach_time.h>
-#include <string.h>
-#else
-#include <sys/time.h>
-#include <time.h>
-#endif
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class TickInterval;
-
-// Class representing the current time.
-class TickTime {
- public:
- TickTime();
- explicit TickTime(int64_t ticks);
-
- // Current time in the tick domain.
- static TickTime Now();
-
- // Now in the time domain in ms.
- static int64_t MillisecondTimestamp();
-
- // Now in the time domain in us.
- static int64_t MicrosecondTimestamp();
-
- // Returns the number of ticks in the tick domain.
- int64_t Ticks() const;
-
- static int64_t MillisecondsToTicks(const int64_t ms);
-
- static int64_t TicksToMilliseconds(const int64_t ticks);
-
- static int64_t TicksToMicroseconds(const int64_t ticks);
-
- // Returns a TickTime that is ticks later than the passed TickTime.
- friend TickTime operator+(const TickTime lhs, const int64_t ticks);
- TickTime& operator+=(const int64_t& ticks);
-
- // Returns a TickInterval that is the difference in ticks beween rhs and lhs.
- friend TickInterval operator-(const TickTime& lhs, const TickTime& rhs);
-
- private:
- static int64_t QueryOsForTicks();
-
- int64_t ticks_;
-};
-
-// Represents a time delta in ticks.
-class TickInterval {
- public:
- TickInterval();
- explicit TickInterval(int64_t interval);
-
- int64_t Milliseconds() const;
- int64_t Microseconds() const;
-
- // Returns the sum of two TickIntervals as a TickInterval.
- friend TickInterval operator+(const TickInterval& lhs,
- const TickInterval& rhs);
- TickInterval& operator+=(const TickInterval& rhs);
-
- // Returns a TickInterval corresponding to rhs - lhs.
- friend TickInterval operator-(const TickInterval& lhs,
- const TickInterval& rhs);
- TickInterval& operator-=(const TickInterval& rhs);
-
- friend bool operator>(const TickInterval& lhs, const TickInterval& rhs);
- friend bool operator<=(const TickInterval& lhs, const TickInterval& rhs);
- friend bool operator<(const TickInterval& lhs, const TickInterval& rhs);
- friend bool operator>=(const TickInterval& lhs, const TickInterval& rhs);
-
- private:
- friend class TickTime;
- friend TickInterval operator-(const TickTime& lhs, const TickTime& rhs);
-
- private:
- int64_t interval_;
-};
-
-inline int64_t TickInterval::Milliseconds() const {
- return TickTime::TicksToMilliseconds(interval_);
-}
-
-inline int64_t TickInterval::Microseconds() const {
- return TickTime::TicksToMicroseconds(interval_);
-}
-
-inline TickInterval operator+(const TickInterval& lhs,
- const TickInterval& rhs) {
- return TickInterval(lhs.interval_ + rhs.interval_);
-}
-
-inline TickInterval operator-(const TickInterval& lhs,
- const TickInterval& rhs) {
- return TickInterval(lhs.interval_ - rhs.interval_);
-}
-
-inline TickInterval operator-(const TickTime& lhs, const TickTime& rhs) {
- return TickInterval(lhs.ticks_ - rhs.ticks_);
-}
-
-inline TickTime operator+(const TickTime lhs, const int64_t ticks) {
- TickTime time = lhs;
- time.ticks_ += ticks;
- return time;
-}
-
-inline bool operator>(const TickInterval& lhs, const TickInterval& rhs) {
- return lhs.interval_ > rhs.interval_;
-}
-
-inline bool operator<=(const TickInterval& lhs, const TickInterval& rhs) {
- return lhs.interval_ <= rhs.interval_;
-}
-
-inline bool operator<(const TickInterval& lhs, const TickInterval& rhs) {
- return lhs.interval_ <= rhs.interval_;
-}
-
-inline bool operator>=(const TickInterval& lhs, const TickInterval& rhs) {
- return lhs.interval_ >= rhs.interval_;
-}
-
-inline TickTime::TickTime()
- : ticks_(0) {
-}
-
-inline TickTime::TickTime(int64_t ticks)
- : ticks_(ticks) {
-}
-
-inline TickTime TickTime::Now() {
- return TickTime(QueryOsForTicks());
-}
-
-inline int64_t TickTime::Ticks() const {
- return ticks_;
-}
-
-inline TickTime& TickTime::operator+=(const int64_t& ticks) {
- ticks_ += ticks;
- return *this;
-}
-
-inline TickInterval::TickInterval() : interval_(0) {
-}
-
-inline TickInterval::TickInterval(const int64_t interval)
- : interval_(interval) {
-}
-
-inline TickInterval& TickInterval::operator+=(const TickInterval& rhs) {
- interval_ += rhs.interval_;
- return *this;
-}
-
-inline TickInterval& TickInterval::operator-=(const TickInterval& rhs) {
- interval_ -= rhs.interval_;
- return *this;
-}
-
-} // namespace webrtc
-
-#endif // WEBRTC_SYSTEM_WRAPPERS_INCLUDE_TICK_UTIL_H_
diff --git a/chromium/third_party/webrtc/system_wrappers/include/utf_util_win.h b/chromium/third_party/webrtc/system_wrappers/include/utf_util_win.h
index 0e3f2d01c60..ac91fe30fe1 100644
--- a/chromium/third_party/webrtc/system_wrappers/include/utf_util_win.h
+++ b/chromium/third_party/webrtc/system_wrappers/include/utf_util_win.h
@@ -15,16 +15,17 @@
#ifdef WIN32
#include <windows.h>
+
+#include <memory>
#include <string>
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
inline std::wstring ToUtf16(const char* utf8, size_t len) {
int len16 = ::MultiByteToWideChar(CP_UTF8, 0, utf8, static_cast<int>(len),
NULL, 0);
- rtc::scoped_ptr<wchar_t[]> ws(new wchar_t[len16]);
+ std::unique_ptr<wchar_t[]> ws(new wchar_t[len16]);
::MultiByteToWideChar(CP_UTF8, 0, utf8, static_cast<int>(len), ws.get(),
len16);
return std::wstring(ws.get(), len16);
@@ -37,7 +38,7 @@ inline std::wstring ToUtf16(const std::string& str) {
inline std::string ToUtf8(const wchar_t* wide, size_t len) {
int len8 = ::WideCharToMultiByte(CP_UTF8, 0, wide, static_cast<int>(len),
NULL, 0, NULL, NULL);
- rtc::scoped_ptr<char[]> ns(new char[len8]);
+ std::unique_ptr<char[]> ns(new char[len8]);
::WideCharToMultiByte(CP_UTF8, 0, wide, static_cast<int>(len), ns.get(), len8,
NULL, NULL);
return std::string(ns.get(), len8);
diff --git a/chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc
index 3933c2ac05d..ed6cd424db9 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/aligned_malloc_unittest.cc
@@ -10,6 +10,8 @@
#include "webrtc/system_wrappers/include/aligned_malloc.h"
+#include <memory>
+
#if _WIN32
#include <windows.h>
#else
@@ -17,14 +19,13 @@
#endif
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
// Returns true if |size| and |alignment| are valid combinations.
bool CorrectUsage(size_t size, size_t alignment) {
- rtc::scoped_ptr<char, AlignedFreeDeleter> scoped(
+ std::unique_ptr<char, AlignedFreeDeleter> scoped(
static_cast<char*>(AlignedMalloc(size, alignment)));
if (scoped.get() == NULL) {
return false;
@@ -37,7 +38,7 @@ TEST(AlignedMalloc, GetRightAlign) {
const size_t size = 100;
const size_t alignment = 32;
const size_t left_misalignment = 1;
- rtc::scoped_ptr<char, AlignedFreeDeleter> scoped(
+ std::unique_ptr<char, AlignedFreeDeleter> scoped(
static_cast<char*>(AlignedMalloc(size, alignment)));
EXPECT_TRUE(scoped.get() != NULL);
const uintptr_t aligned_address = reinterpret_cast<uintptr_t> (scoped.get());
diff --git a/chromium/third_party/webrtc/system_wrappers/source/clock.cc b/chromium/third_party/webrtc/system_wrappers/source/clock.cc
index 926b95a3dfd..05dabd8538c 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/clock.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/clock.cc
@@ -20,8 +20,8 @@
#endif
#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/system_wrappers/include/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
@@ -37,13 +37,13 @@ class RealTimeClock : public Clock {
// Return a timestamp in milliseconds relative to some arbitrary source; the
// source is fixed for this clock.
int64_t TimeInMilliseconds() const override {
- return TickTime::MillisecondTimestamp();
+ return rtc::TimeMillis();
}
// Return a timestamp in microseconds relative to some arbitrary source; the
// source is fixed for this clock.
int64_t TimeInMicroseconds() const override {
- return TickTime::MicrosecondTimestamp();
+ return rtc::TimeMicros();
}
// Retrieve an NTP absolute timestamp in seconds and fractions of a second.
diff --git a/chromium/third_party/webrtc/system_wrappers/source/condition_variable_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/condition_variable_unittest.cc
index 4b1b6cc6089..5fc6bc6b73e 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/condition_variable_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/condition_variable_unittest.cc
@@ -17,9 +17,8 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
@@ -192,9 +191,9 @@ TEST(CondVarWaitTest, WaitingWaits) {
InitializeCriticalSection(&crit_sect);
ConditionVariableEventWin cond_var;
EnterCriticalSection(&crit_sect);
- int64_t start_ms = TickTime::MillisecondTimestamp();
+ int64_t start_ms = rtc::TimeMillis();
EXPECT_FALSE(cond_var.SleepCS(&crit_sect, kVeryShortWaitMs));
- int64_t end_ms = TickTime::MillisecondTimestamp();
+ int64_t end_ms = rtc::TimeMillis();
EXPECT_LE(start_ms + kVeryShortWaitMs, end_ms)
<< "actual elapsed:" << end_ms - start_ms;
LeaveCriticalSection(&crit_sect);
diff --git a/chromium/third_party/webrtc/system_wrappers/source/event_timer_posix.h b/chromium/third_party/webrtc/system_wrappers/source/event_timer_posix.h
index af3715ee8b2..599eb55ee16 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/event_timer_posix.h
+++ b/chromium/third_party/webrtc/system_wrappers/source/event_timer_posix.h
@@ -13,6 +13,8 @@
#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include <memory>
+
#include <pthread.h>
#include <time.h>
@@ -49,9 +51,9 @@ class EventTimerPosix : public EventTimerWrapper {
pthread_mutex_t mutex_;
bool event_set_;
- // TODO(pbos): Remove scoped_ptr and use PlatformThread directly.
- rtc::scoped_ptr<rtc::PlatformThread> timer_thread_;
- rtc::scoped_ptr<EventTimerPosix> timer_event_;
+ // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
+ std::unique_ptr<rtc::PlatformThread> timer_thread_;
+ std::unique_ptr<EventTimerPosix> timer_event_;
timespec created_at_;
bool periodic_;
diff --git a/chromium/third_party/webrtc/system_wrappers/source/file_impl.h b/chromium/third_party/webrtc/system_wrappers/source/file_impl.h
index 06ba58200bb..51103d648ba 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/file_impl.h
+++ b/chromium/third_party/webrtc/system_wrappers/source/file_impl.h
@@ -13,7 +13,8 @@
#include <stdio.h>
-#include "webrtc/base/scoped_ptr.h"
+#include <memory>
+
#include "webrtc/system_wrappers/include/file_wrapper.h"
namespace webrtc {
@@ -52,7 +53,7 @@ class FileWrapperImpl : public FileWrapper {
int CloseFileImpl();
int FlushImpl();
- rtc::scoped_ptr<RWLockWrapper> rw_lock_;
+ std::unique_ptr<RWLockWrapper> rw_lock_;
FILE* id_;
bool managed_file_handle_;
diff --git a/chromium/third_party/webrtc/system_wrappers/source/logging_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/logging_unittest.cc
index 695b03f93a0..118c3425bf2 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/logging_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/logging_unittest.cc
@@ -13,7 +13,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/arraysize.h"
#include "webrtc/base/event.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc b/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc
index 706d861b55c..6504737fd54 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp.cc
@@ -71,8 +71,9 @@ bool UpdateRtcpList(uint32_t ntp_secs,
for (RtcpList::iterator it = rtcp_list->begin();
it != rtcp_list->end(); ++it) {
- if (measurement.ntp_secs == (*it).ntp_secs &&
- measurement.ntp_frac == (*it).ntp_frac) {
+ if ((measurement.ntp_secs == (*it).ntp_secs &&
+ measurement.ntp_frac == (*it).ntp_frac) ||
+ (measurement.rtp_timestamp == (*it).rtp_timestamp)) {
// This RTCP has already been added to the list.
return true;
}
diff --git a/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc b/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc
index d2929f5cbc0..78e4a52716d 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc
+++ b/chromium/third_party/webrtc/system_wrappers/source/rtp_to_ntp_unittest.cc
@@ -135,4 +135,44 @@ TEST(WrapAroundTests, OldRtp_OldRtcpWrapped) {
int64_t timestamp_in_ms = -1;
EXPECT_FALSE(RtpToNtpMs(timestamp, rtcp, &timestamp_in_ms));
}
+
+TEST(UpdateRtcpListTests, InjectRtcpSrWithEqualNtp) {
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 2;
+ uint32_t timestamp = 0x12345678;
+
+ bool new_sr;
+ EXPECT_TRUE(UpdateRtcpList(ntp_sec, ntp_frac, timestamp, &rtcp, &new_sr));
+ EXPECT_TRUE(new_sr);
+
+ ++timestamp;
+ EXPECT_TRUE(UpdateRtcpList(ntp_sec, ntp_frac, timestamp, &rtcp, &new_sr));
+ EXPECT_FALSE(new_sr);
+}
+
+TEST(UpdateRtcpListTests, InjectRtcpSrWithEqualTimestamp) {
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 2;
+ uint32_t timestamp = 0x12345678;
+
+ bool new_sr;
+ EXPECT_TRUE(UpdateRtcpList(ntp_sec, ntp_frac, timestamp, &rtcp, &new_sr));
+ EXPECT_TRUE(new_sr);
+
+ ++ntp_frac;
+ EXPECT_TRUE(UpdateRtcpList(ntp_sec, ntp_frac, timestamp, &rtcp, &new_sr));
+ EXPECT_FALSE(new_sr);
+}
+
+TEST(UpdateRtcpListTests, InjectRtcpSrWithZeroNtpFails) {
+ RtcpList rtcp;
+ uint32_t ntp_sec = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t timestamp = 0x12345678;
+
+ bool new_sr;
+ EXPECT_FALSE(UpdateRtcpList(ntp_sec, ntp_frac, timestamp, &rtcp, &new_sr));
+}
}; // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/source/tick_util.cc b/chromium/third_party/webrtc/system_wrappers/source/tick_util.cc
deleted file mode 100644
index 0485e429211..00000000000
--- a/chromium/third_party/webrtc/system_wrappers/source/tick_util.cc
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/system_wrappers/include/tick_util.h"
-
-#include "webrtc/base/timeutils.h"
-
-namespace webrtc {
-
-int64_t TickTime::MillisecondTimestamp() {
- return TicksToMilliseconds(TickTime::Now().Ticks());
-}
-
-int64_t TickTime::MicrosecondTimestamp() {
- return TicksToMicroseconds(TickTime::Now().Ticks());
-}
-
-int64_t TickTime::MillisecondsToTicks(const int64_t ms) {
- return ms * rtc::kNumNanosecsPerMillisec;
-}
-
-int64_t TickTime::TicksToMilliseconds(const int64_t ticks) {
- return ticks / rtc::kNumNanosecsPerMillisec;
-}
-
-int64_t TickTime::TicksToMicroseconds(const int64_t ticks) {
- return ticks / rtc::kNumNanosecsPerMicrosec;
-}
-
-// Gets the native system tick count, converted to nanoseconds.
-int64_t TickTime::QueryOsForTicks() {
- return rtc::TimeNanos();
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/system_wrappers/source/trace_impl.h b/chromium/third_party/webrtc/system_wrappers/source/trace_impl.h
index c6d81d5b0bf..182f5809a50 100644
--- a/chromium/third_party/webrtc/system_wrappers/source/trace_impl.h
+++ b/chromium/third_party/webrtc/system_wrappers/source/trace_impl.h
@@ -11,8 +11,9 @@
#ifndef WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_IMPL_H_
#define WEBRTC_SYSTEM_WRAPPERS_SOURCE_TRACE_IMPL_H_
+#include <memory>
+
#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/static_instance.h"
@@ -97,7 +98,7 @@ class TraceImpl : public Trace {
uint32_t row_count_text_ GUARDED_BY(crit_);
uint32_t file_count_text_ GUARDED_BY(crit_);
- const rtc::scoped_ptr<FileWrapper> trace_file_ GUARDED_BY(crit_);
+ const std::unique_ptr<FileWrapper> trace_file_ GUARDED_BY(crit_);
rtc::CriticalSection crit_;
};
diff --git a/chromium/third_party/webrtc/system_wrappers/system_wrappers.gyp b/chromium/third_party/webrtc/system_wrappers/system_wrappers.gyp
index cd6d3ea6714..b61ecf28ee1 100644
--- a/chromium/third_party/webrtc/system_wrappers/system_wrappers.gyp
+++ b/chromium/third_party/webrtc/system_wrappers/system_wrappers.gyp
@@ -42,7 +42,6 @@
'include/static_instance.h',
'include/stl_util.h',
'include/stringize_macros.h',
- 'include/tick_util.h',
'include/timestamp_extrapolator.h',
'include/trace.h',
'include/utf_util_win.h',
@@ -77,7 +76,6 @@
'source/rw_lock_winxp_win.h',
'source/sleep.cc',
'source/sort.cc',
- 'source/tick_util.cc',
'source/timestamp_extrapolator.cc',
'source/trace_impl.cc',
'source/trace_impl.h',
diff --git a/chromium/third_party/webrtc/system_wrappers/system_wrappers_tests.gyp b/chromium/third_party/webrtc/system_wrappers/system_wrappers_tests.gyp
index 863818b1bf5..174c96c948f 100644
--- a/chromium/third_party/webrtc/system_wrappers/system_wrappers_tests.gyp
+++ b/chromium/third_party/webrtc/system_wrappers/system_wrappers_tests.gyp
@@ -56,7 +56,7 @@
},
],
'conditions': [
- ['include_tests==1 and OS=="android"', {
+ ['OS=="android"', {
'targets': [
{
'target_name': 'system_wrappers_unittests_apk_target',
@@ -66,7 +66,28 @@
],
},
],
- }],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'system_wrappers_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):system_wrappers_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'system_wrappers_unittests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
+ }], # OS=="android"
['test_isolation_mode != "noop"', {
'targets': [
{
diff --git a/chromium/third_party/webrtc/system_wrappers/system_wrappers_unittests_apk.isolate b/chromium/third_party/webrtc/system_wrappers/system_wrappers_unittests_apk.isolate
new file mode 100644
index 00000000000..974064a71f8
--- /dev/null
+++ b/chromium/third_party/webrtc/system_wrappers/system_wrappers_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'system_wrappers_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_system_wrappers_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/system_wrappers_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_system_wrappers_unittests',
+ 'system_wrappers_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/system_wrappers/test/TestSort/TestSort.cc b/chromium/third_party/webrtc/system_wrappers/test/TestSort/TestSort.cc
index b2b9f857553..8c585abb674 100644
--- a/chromium/third_party/webrtc/system_wrappers/test/TestSort/TestSort.cc
+++ b/chromium/third_party/webrtc/system_wrappers/test/TestSort/TestSort.cc
@@ -13,8 +13,8 @@
#include <algorithm>
+#include "webrtc/base/timeutils.h"
#include "webrtc/system_wrappers/include/sort.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
// Excellent work polluting the global namespace Visual Studio...
#undef max
@@ -144,7 +144,7 @@ void RunSortTest(webrtc::Type sortType, bool keySort)
printf("Running %s Sort() tests...\n", TypeEnumToString(sortType));
}
- TickInterval accTicks;
+ int64_t accTicks;
for (int i = 0; i < NumOfTests; i++)
{
for (int j = 0; j < DataLength; j++)
@@ -159,7 +159,7 @@ void RunSortTest(webrtc::Type sortType, bool keySort)
memcpy(keyRef, key, sizeof(key));
retVal = 0;
- TickTime t0 = TickTime::Now();
+ int64_t t0 = rtc::TimeNanos();
if (keySort)
{
retVal = webrtc::KeySort(data, key, DataLength, sizeof(LotsOfData<KeyType>),
@@ -176,7 +176,7 @@ void RunSortTest(webrtc::Type sortType, bool keySort)
//std::sort(key, key + DataLength);
//qsort(key, DataLength, sizeof(KeyType), Compare<KeyType>);
}
- TickTime t1 = TickTime::Now();
+ int64_t t1 = rtc::TimeNanos();
accTicks += (t1 - t0);
if (retVal != 0)
@@ -236,7 +236,7 @@ void RunSortTest(webrtc::Type sortType, bool keySort)
printf("Compliance test passed over %d iterations\n", NumOfTests);
- int64_t executeTime = accTicks.Milliseconds();
+ int64_t executeTime = accTicks / rtc::kNumNanosecsPerMillisec;
printf("Execute time: %.2f s\n\n", (float)executeTime / 1000);
}
diff --git a/chromium/third_party/webrtc/test/call_test.cc b/chromium/third_party/webrtc/test/call_test.cc
index a9a502d92c5..768c007c3cb 100644
--- a/chromium/third_party/webrtc/test/call_test.cc
+++ b/chromium/third_party/webrtc/test/call_test.cc
@@ -15,7 +15,6 @@
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
-#include "webrtc/voice_engine/include/voe_network.h"
namespace webrtc {
namespace test {
@@ -78,9 +77,6 @@ void CallTest::RunBaseTest(BaseTest* test) {
if (test->ShouldCreateReceivers()) {
CreateMatchingReceiveConfigs(receive_transport_.get());
}
- if (num_audio_streams_ > 0)
- SetupVoiceEngineTransports(send_transport_.get(), receive_transport_.get());
-
if (num_video_streams_ > 0) {
test->ModifyVideoConfigs(&video_send_config_, &video_receive_configs_,
&video_encoder_config_);
@@ -275,6 +271,10 @@ void CallTest::CreateVideoStreams() {
}
}
+void CallTest::SetFakeVideoCaptureRotation(VideoRotation rotation) {
+ frame_generator_capturer_->SetFakeRotation(rotation);
+}
+
void CallTest::CreateAudioStreams() {
audio_send_stream_ = sender_call_->CreateAudioSendStream(audio_send_config_);
for (size_t i = 0; i < audio_receive_configs_.size(); ++i) {
@@ -306,7 +306,6 @@ void CallTest::CreateVoiceEngines() {
CreateFakeAudioDevices();
voe_send_.voice_engine = VoiceEngine::Create();
voe_send_.base = VoEBase::GetInterface(voe_send_.voice_engine);
- voe_send_.network = VoENetwork::GetInterface(voe_send_.voice_engine);
voe_send_.codec = VoECodec::GetInterface(voe_send_.voice_engine);
EXPECT_EQ(0, voe_send_.base->Init(fake_send_audio_device_.get(), nullptr));
Config voe_config;
@@ -316,35 +315,17 @@ void CallTest::CreateVoiceEngines() {
voe_recv_.voice_engine = VoiceEngine::Create();
voe_recv_.base = VoEBase::GetInterface(voe_recv_.voice_engine);
- voe_recv_.network = VoENetwork::GetInterface(voe_recv_.voice_engine);
voe_recv_.codec = VoECodec::GetInterface(voe_recv_.voice_engine);
EXPECT_EQ(0, voe_recv_.base->Init(fake_recv_audio_device_.get(), nullptr));
voe_recv_.channel_id = voe_recv_.base->CreateChannel();
EXPECT_GE(voe_recv_.channel_id, 0);
}
-void CallTest::SetupVoiceEngineTransports(PacketTransport* send_transport,
- PacketTransport* recv_transport) {
- voe_send_.transport_adapter.reset(
- new internal::TransportAdapter(send_transport));
- voe_send_.transport_adapter->Enable();
- EXPECT_EQ(0, voe_send_.network->RegisterExternalTransport(
- voe_send_.channel_id, *voe_send_.transport_adapter.get()));
-
- voe_recv_.transport_adapter.reset(
- new internal::TransportAdapter(recv_transport));
- voe_recv_.transport_adapter->Enable();
- EXPECT_EQ(0, voe_recv_.network->RegisterExternalTransport(
- voe_recv_.channel_id, *voe_recv_.transport_adapter.get()));
-}
-
void CallTest::DestroyVoiceEngines() {
voe_recv_.base->DeleteChannel(voe_recv_.channel_id);
voe_recv_.channel_id = -1;
voe_recv_.base->Release();
voe_recv_.base = nullptr;
- voe_recv_.network->Release();
- voe_recv_.network = nullptr;
voe_recv_.codec->Release();
voe_recv_.codec = nullptr;
@@ -352,8 +333,6 @@ void CallTest::DestroyVoiceEngines() {
voe_send_.channel_id = -1;
voe_send_.base->Release();
voe_send_.base = nullptr;
- voe_send_.network->Release();
- voe_send_.network = nullptr;
voe_send_.codec->Release();
voe_send_.codec = nullptr;
diff --git a/chromium/third_party/webrtc/test/call_test.h b/chromium/third_party/webrtc/test/call_test.h
index eb33c0f1c40..ebc2bb2a544 100644
--- a/chromium/third_party/webrtc/test/call_test.h
+++ b/chromium/third_party/webrtc/test/call_test.h
@@ -14,7 +14,6 @@
#include <vector>
#include "webrtc/call.h"
-#include "webrtc/call/transport_adapter.h"
#include "webrtc/test/fake_audio_device.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/test/fake_encoder.h"
@@ -25,7 +24,6 @@ namespace webrtc {
class VoEBase;
class VoECodec;
-class VoENetwork;
namespace test {
@@ -80,25 +78,26 @@ class CallTest : public ::testing::Test {
void Start();
void Stop();
void DestroyStreams();
+ void SetFakeVideoCaptureRotation(VideoRotation rotation);
Clock* const clock_;
- rtc::scoped_ptr<Call> sender_call_;
- rtc::scoped_ptr<PacketTransport> send_transport_;
+ std::unique_ptr<Call> sender_call_;
+ std::unique_ptr<PacketTransport> send_transport_;
VideoSendStream::Config video_send_config_;
VideoEncoderConfig video_encoder_config_;
VideoSendStream* video_send_stream_;
AudioSendStream::Config audio_send_config_;
AudioSendStream* audio_send_stream_;
- rtc::scoped_ptr<Call> receiver_call_;
- rtc::scoped_ptr<PacketTransport> receive_transport_;
+ std::unique_ptr<Call> receiver_call_;
+ std::unique_ptr<PacketTransport> receive_transport_;
std::vector<VideoReceiveStream::Config> video_receive_configs_;
std::vector<VideoReceiveStream*> video_receive_streams_;
std::vector<AudioReceiveStream::Config> audio_receive_configs_;
std::vector<AudioReceiveStream*> audio_receive_streams_;
- rtc::scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer_;
+ std::unique_ptr<test::FrameGeneratorCapturer> frame_generator_capturer_;
test::FakeEncoder fake_encoder_;
std::vector<std::unique_ptr<VideoDecoder>> allocated_decoders_;
size_t num_video_streams_;
@@ -112,30 +111,24 @@ class CallTest : public ::testing::Test {
VoiceEngineState()
: voice_engine(nullptr),
base(nullptr),
- network(nullptr),
codec(nullptr),
- channel_id(-1),
- transport_adapter(nullptr) {}
+ channel_id(-1) {}
VoiceEngine* voice_engine;
VoEBase* base;
- VoENetwork* network;
VoECodec* codec;
int channel_id;
- rtc::scoped_ptr<internal::TransportAdapter> transport_adapter;
};
void CreateVoiceEngines();
- void SetupVoiceEngineTransports(PacketTransport* send_transport,
- PacketTransport* recv_transport);
void DestroyVoiceEngines();
VoiceEngineState voe_send_;
VoiceEngineState voe_recv_;
// The audio devices must outlive the voice engines.
- rtc::scoped_ptr<test::FakeAudioDevice> fake_send_audio_device_;
- rtc::scoped_ptr<test::FakeAudioDevice> fake_recv_audio_device_;
+ std::unique_ptr<test::FakeAudioDevice> fake_send_audio_device_;
+ std::unique_ptr<test::FakeAudioDevice> fake_recv_audio_device_;
};
class BaseTest : public RtpRtcpObserver {
diff --git a/chromium/third_party/webrtc/test/channel_transport/udp_transport_impl.cc b/chromium/third_party/webrtc/test/channel_transport/udp_transport_impl.cc
index c7049aa8a2d..897c8085226 100644
--- a/chromium/third_party/webrtc/test/channel_transport/udp_transport_impl.cc
+++ b/chromium/third_party/webrtc/test/channel_transport/udp_transport_impl.cc
@@ -2922,6 +2922,10 @@ bool UdpTransport::IsIpAddressValid(const char* ipadr, const bool ipV6)
// Store index of dots and count number of dots.
iDotPos[nDots++] = i;
}
+ else if (isdigit(ipadr[i]) == 0)
+ {
+ return false;
+ }
}
bool allUnder256 = false;
@@ -2942,7 +2946,7 @@ bool UdpTransport::IsIpAddressValid(const char* ipadr, const bool ipV6)
memset(nr,0,4);
strncpy(nr,&ipadr[0],iDotPos[0]);
int32_t num = atoi(nr);
- if (num > 255)
+ if (num > 255 || num < 0)
{
break;
}
@@ -2956,7 +2960,7 @@ bool UdpTransport::IsIpAddressValid(const char* ipadr, const bool ipV6)
memset(nr,0,4);
strncpy(nr,&ipadr[iDotPos[0]+1], iDotPos[1] - iDotPos[0] - 1);
int32_t num = atoi(nr);
- if (num > 255)
+ if (num > 255 || num < 0)
break;
} else {
break;
@@ -2966,20 +2970,27 @@ bool UdpTransport::IsIpAddressValid(const char* ipadr, const bool ipV6)
{
char nr[4];
memset(nr,0,4);
- strncpy(nr,&ipadr[iDotPos[1]+1], iDotPos[1] - iDotPos[0] - 1);
+ strncpy(nr,&ipadr[iDotPos[1]+1], iDotPos[2] - iDotPos[1] - 1);
int32_t num = atoi(nr);
- if (num > 255)
+ if (num > 255 || num < 0)
break;
+ } else {
+ break;
+ }
+ if (len - iDotPos[2] <= 4)
+ {
+ char nr[4];
memset(nr,0,4);
strncpy(nr,&ipadr[iDotPos[2]+1], len - iDotPos[2] -1);
- num = atoi(nr);
- if (num > 255)
+ int32_t num = atoi(nr);
+ if (num > 255 || num < 0)
break;
else
allUnder256 = true;
- } else
+ } else {
break;
+ }
} while(false);
if (nDots != 3 || !allUnder256)
diff --git a/chromium/third_party/webrtc/test/configurable_frame_size_encoder.h b/chromium/third_party/webrtc/test/configurable_frame_size_encoder.h
index 3794e8db088..d269441667b 100644
--- a/chromium/third_party/webrtc/test/configurable_frame_size_encoder.h
+++ b/chromium/third_party/webrtc/test/configurable_frame_size_encoder.h
@@ -11,9 +11,9 @@
#ifndef WEBRTC_TEST_CONFIGURABLE_FRAME_SIZE_ENCODER_H_
#define WEBRTC_TEST_CONFIGURABLE_FRAME_SIZE_ENCODER_H_
+#include <memory>
#include <vector>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/video_encoder.h"
namespace webrtc {
@@ -49,7 +49,7 @@ class ConfigurableFrameSizeEncoder : public VideoEncoder {
EncodedImageCallback* callback_;
const size_t max_frame_size_;
size_t current_frame_size_;
- rtc::scoped_ptr<uint8_t[]> buffer_;
+ std::unique_ptr<uint8_t[]> buffer_;
};
} // namespace test
diff --git a/chromium/third_party/webrtc/test/direct_transport.h b/chromium/third_party/webrtc/test/direct_transport.h
index d68bc7184eb..e1844763d95 100644
--- a/chromium/third_party/webrtc/test/direct_transport.h
+++ b/chromium/third_party/webrtc/test/direct_transport.h
@@ -17,7 +17,6 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/event.h"
#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/test/fake_network_pipe.h"
#include "webrtc/transport.h"
diff --git a/chromium/third_party/webrtc/test/fake_audio_device.h b/chromium/third_party/webrtc/test/fake_audio_device.h
index 180abf6c924..77a74bac8f6 100644
--- a/chromium/third_party/webrtc/test/fake_audio_device.h
+++ b/chromium/third_party/webrtc/test/fake_audio_device.h
@@ -10,11 +10,11 @@
#ifndef WEBRTC_TEST_FAKE_AUDIO_DEVICE_H_
#define WEBRTC_TEST_FAKE_AUDIO_DEVICE_H_
+#include <memory>
#include <string>
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_device/include/fake_audio_device.h"
#include "webrtc/test/drifting_clock.h"
#include "webrtc/typedefs.h"
@@ -59,11 +59,11 @@ class FakeAudioDevice : public FakeAudioDeviceModule {
int64_t last_playout_ms_;
DriftingClock clock_;
- rtc::scoped_ptr<EventTimerWrapper> tick_;
+ std::unique_ptr<EventTimerWrapper> tick_;
rtc::CriticalSection lock_;
rtc::PlatformThread thread_;
- rtc::scoped_ptr<ModuleFileUtility> file_utility_;
- rtc::scoped_ptr<FileWrapper> input_stream_;
+ std::unique_ptr<ModuleFileUtility> file_utility_;
+ std::unique_ptr<FileWrapper> input_stream_;
};
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/test/fake_network_pipe.h b/chromium/third_party/webrtc/test/fake_network_pipe.h
index d488d492c8d..608ff008c99 100644
--- a/chromium/third_party/webrtc/test/fake_network_pipe.h
+++ b/chromium/third_party/webrtc/test/fake_network_pipe.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_TEST_FAKE_NETWORK_PIPE_H_
#define WEBRTC_TEST_FAKE_NETWORK_PIPE_H_
+#include <memory>
#include <set>
#include <string.h>
#include <queue>
@@ -18,7 +19,6 @@
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/random.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -50,7 +50,7 @@ class NetworkPacket {
private:
// The packet data.
- rtc::scoped_ptr<uint8_t[]> data_;
+ std::unique_ptr<uint8_t[]> data_;
// Length of data_.
size_t data_length_;
// The time the packet was sent out on the network.
diff --git a/chromium/third_party/webrtc/test/fake_network_pipe_unittest.cc b/chromium/third_party/webrtc/test/fake_network_pipe_unittest.cc
index 233c5972b3e..0bd46df7fcf 100644
--- a/chromium/third_party/webrtc/test/fake_network_pipe_unittest.cc
+++ b/chromium/third_party/webrtc/test/fake_network_pipe_unittest.cc
@@ -8,10 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/call.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/test/fake_network_pipe.h"
@@ -71,7 +72,7 @@ class FakeNetworkPipeTest : public ::testing::Test {
void SendPackets(FakeNetworkPipe* pipe, int number_packets, int packet_size) {
RTC_DCHECK_GE(packet_size, static_cast<int>(sizeof(int)));
- rtc::scoped_ptr<uint8_t[]> packet(new uint8_t[packet_size]);
+ std::unique_ptr<uint8_t[]> packet(new uint8_t[packet_size]);
for (int i = 0; i < number_packets; ++i) {
// Set a sequence number for the packets by
// using the first bytes in the packet.
@@ -85,7 +86,7 @@ class FakeNetworkPipeTest : public ::testing::Test {
}
SimulatedClock fake_clock_;
- rtc::scoped_ptr<TestReceiver> receiver_;
+ std::unique_ptr<TestReceiver> receiver_;
};
void DeleteMemory(uint8_t* data, int length) { delete [] data; }
@@ -95,7 +96,7 @@ TEST_F(FakeNetworkPipeTest, CapacityTest) {
FakeNetworkPipe::Config config;
config.queue_length_packets = 20;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ std::unique_ptr<FakeNetworkPipe> pipe(
new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
@@ -135,7 +136,7 @@ TEST_F(FakeNetworkPipeTest, ExtraDelayTest) {
config.queue_length_packets = 20;
config.queue_delay_ms = 100;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ std::unique_ptr<FakeNetworkPipe> pipe(
new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
@@ -169,7 +170,7 @@ TEST_F(FakeNetworkPipeTest, QueueLengthTest) {
FakeNetworkPipe::Config config;
config.queue_length_packets = 2;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ std::unique_ptr<FakeNetworkPipe> pipe(
new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
@@ -193,7 +194,7 @@ TEST_F(FakeNetworkPipeTest, StatisticsTest) {
config.queue_length_packets = 2;
config.queue_delay_ms = 20;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ std::unique_ptr<FakeNetworkPipe> pipe(
new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
@@ -223,7 +224,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) {
FakeNetworkPipe::Config config;
config.queue_length_packets = 20;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ std::unique_ptr<FakeNetworkPipe> pipe(
new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
@@ -282,7 +283,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) {
FakeNetworkPipe::Config config;
config.queue_length_packets = 20;
config.link_capacity_kbps = 80;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ std::unique_ptr<FakeNetworkPipe> pipe(
new FakeNetworkPipe(&fake_clock_, config));
pipe->SetReceiver(receiver_.get());
@@ -337,7 +338,7 @@ TEST_F(FakeNetworkPipeTest, DisallowReorderingThenAllowReordering) {
config.link_capacity_kbps = 800;
config.queue_delay_ms = 100;
config.delay_standard_deviation_ms = 10;
- rtc::scoped_ptr<FakeNetworkPipe> pipe(
+ std::unique_ptr<FakeNetworkPipe> pipe(
new FakeNetworkPipe(&fake_clock_, config));
ReorderTestReceiver* receiver = new ReorderTestReceiver();
receiver_.reset(receiver);
diff --git a/chromium/third_party/webrtc/test/fake_texture_frame.h b/chromium/third_party/webrtc/test/fake_texture_frame.h
index 9575fae469d..15b70d8f7b5 100644
--- a/chromium/third_party/webrtc/test/fake_texture_frame.h
+++ b/chromium/third_party/webrtc/test/fake_texture_frame.h
@@ -42,9 +42,9 @@ class FakeNativeHandleBuffer : public NativeHandleBuffer {
new rtc::RefCountedObject<I420Buffer>(width_, height_));
int half_height = (height_ + 1) / 2;
int half_width = (width_ + 1) / 2;
- memset(buffer->MutableData(kYPlane), 0, height_ * width_);
- memset(buffer->MutableData(kUPlane), 0, half_height * half_width);
- memset(buffer->MutableData(kVPlane), 0, half_height * half_width);
+ memset(buffer->MutableDataY(), 0, height_ * width_);
+ memset(buffer->MutableDataU(), 0, half_height * half_width);
+ memset(buffer->MutableDataV(), 0, half_height * half_width);
return buffer;
}
};
diff --git a/chromium/third_party/webrtc/test/frame_generator.cc b/chromium/third_party/webrtc/test/frame_generator.cc
index 3287abab816..ed7e95a1267 100644
--- a/chromium/third_party/webrtc/test/frame_generator.cc
+++ b/chromium/third_party/webrtc/test/frame_generator.cc
@@ -13,6 +13,8 @@
#include <stdio.h>
#include <string.h>
+#include <memory>
+
#include "webrtc/base/checks.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/include/clock.h"
@@ -39,9 +41,12 @@ class ChromaGenerator : public FrameGenerator {
uint8_t u = fabs(sin(angle_)) * 0xFF;
uint8_t v = fabs(cos(angle_)) * 0xFF;
- memset(frame_.buffer(kYPlane), 0x80, frame_.allocated_size(kYPlane));
- memset(frame_.buffer(kUPlane), u, frame_.allocated_size(kUPlane));
- memset(frame_.buffer(kVPlane), v, frame_.allocated_size(kVPlane));
+ memset(frame_.video_frame_buffer()->MutableDataY(), 0x80,
+ frame_.allocated_size(kYPlane));
+ memset(frame_.video_frame_buffer()->MutableDataU(), u,
+ frame_.allocated_size(kUPlane));
+ memset(frame_.video_frame_buffer()->MutableDataV(), v,
+ frame_.allocated_size(kVPlane));
return &frame_;
}
@@ -121,7 +126,7 @@ class YuvFileGenerator : public FrameGenerator {
const size_t width_;
const size_t height_;
const size_t frame_size_;
- const rtc::scoped_ptr<uint8_t[]> frame_buffer_;
+ const std::unique_ptr<uint8_t[]> frame_buffer_;
const int frame_display_count_;
int current_display_count_;
VideoFrame last_read_frame_;
@@ -200,24 +205,24 @@ class ScrollingImageFrameGenerator : public FrameGenerator {
int pixels_scrolled_y =
static_cast<int>(scroll_margin_y * scroll_factor + 0.5);
- int offset_y = (current_source_frame_->stride(PlaneType::kYPlane) *
+ int offset_y = (current_source_frame_->video_frame_buffer()->StrideY() *
pixels_scrolled_y) +
pixels_scrolled_x;
- int offset_u = (current_source_frame_->stride(PlaneType::kUPlane) *
+ int offset_u = (current_source_frame_->video_frame_buffer()->StrideU() *
(pixels_scrolled_y / 2)) +
(pixels_scrolled_x / 2);
- int offset_v = (current_source_frame_->stride(PlaneType::kVPlane) *
+ int offset_v = (current_source_frame_->video_frame_buffer()->StrideV() *
(pixels_scrolled_y / 2)) +
(pixels_scrolled_x / 2);
current_frame_.CreateFrame(
- &current_source_frame_->buffer(PlaneType::kYPlane)[offset_y],
- &current_source_frame_->buffer(PlaneType::kUPlane)[offset_u],
- &current_source_frame_->buffer(PlaneType::kVPlane)[offset_v],
+ &current_source_frame_->video_frame_buffer()->DataY()[offset_y],
+ &current_source_frame_->video_frame_buffer()->DataU()[offset_u],
+ &current_source_frame_->video_frame_buffer()->DataV()[offset_v],
kTargetWidth, kTargetHeight,
- current_source_frame_->stride(PlaneType::kYPlane),
- current_source_frame_->stride(PlaneType::kUPlane),
- current_source_frame_->stride(PlaneType::kVPlane),
+ current_source_frame_->video_frame_buffer()->StrideY(),
+ current_source_frame_->video_frame_buffer()->StrideU(),
+ current_source_frame_->video_frame_buffer()->StrideV(),
kVideoRotation_0);
}
diff --git a/chromium/third_party/webrtc/test/frame_generator_capturer.cc b/chromium/third_party/webrtc/test/frame_generator_capturer.cc
index 35ce6168a2e..95ac624c421 100644
--- a/chromium/third_party/webrtc/test/frame_generator_capturer.cc
+++ b/chromium/third_party/webrtc/test/frame_generator_capturer.cc
@@ -80,6 +80,11 @@ FrameGeneratorCapturer::~FrameGeneratorCapturer() {
thread_.Stop();
}
+void FrameGeneratorCapturer::SetFakeRotation(VideoRotation rotation) {
+ rtc::CritScope cs(&lock_);
+ fake_rotation_ = rotation;
+}
+
bool FrameGeneratorCapturer::Init() {
// This check is added because frame_generator_ might be file based and should
// not crash because a file moved.
@@ -104,6 +109,7 @@ void FrameGeneratorCapturer::InsertFrame() {
if (sending_) {
VideoFrame* frame = frame_generator_->NextFrame();
frame->set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
+ frame->set_rotation(fake_rotation_);
if (first_frame_capture_time_ == -1) {
first_frame_capture_time_ = frame->ntp_time_ms();
}
diff --git a/chromium/third_party/webrtc/test/frame_generator_capturer.h b/chromium/third_party/webrtc/test/frame_generator_capturer.h
index 6bd0e0b3272..1d6fb626640 100644
--- a/chromium/third_party/webrtc/test/frame_generator_capturer.h
+++ b/chromium/third_party/webrtc/test/frame_generator_capturer.h
@@ -10,11 +10,12 @@
#ifndef WEBRTC_TEST_FRAME_GENERATOR_CAPTURER_H_
#define WEBRTC_TEST_FRAME_GENERATOR_CAPTURER_H_
+#include <memory>
#include <string>
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ptr.h"
+#include "webrtc/common_video/rotation.h"
#include "webrtc/test/video_capturer.h"
#include "webrtc/typedefs.h"
@@ -46,6 +47,7 @@ class FrameGeneratorCapturer : public VideoCapturer {
void Start() override;
void Stop() override;
void ForceFrame();
+ void SetFakeRotation(VideoRotation rotation);
int64_t first_frame_capture_time() const { return first_frame_capture_time_; }
@@ -62,12 +64,13 @@ class FrameGeneratorCapturer : public VideoCapturer {
Clock* const clock_;
bool sending_;
- rtc::scoped_ptr<EventTimerWrapper> tick_;
+ std::unique_ptr<EventTimerWrapper> tick_;
rtc::CriticalSection lock_;
rtc::PlatformThread thread_;
- rtc::scoped_ptr<FrameGenerator> frame_generator_;
+ std::unique_ptr<FrameGenerator> frame_generator_;
int target_fps_;
+ VideoRotation fake_rotation_ = kVideoRotation_0;
int64_t first_frame_capture_time_;
};
diff --git a/chromium/third_party/webrtc/test/frame_generator_unittest.cc b/chromium/third_party/webrtc/test/frame_generator_unittest.cc
index 6376e2c221c..2cffdcaa8c3 100644
--- a/chromium/third_party/webrtc/test/frame_generator_unittest.cc
+++ b/chromium/third_party/webrtc/test/frame_generator_unittest.cc
@@ -9,10 +9,11 @@
*/
#include <stdio.h>
+
+#include <memory>
#include <string>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/test/frame_generator.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -46,7 +47,7 @@ class FrameGeneratorTest : public ::testing::Test {
protected:
void WriteYuvFile(FILE* file, uint8_t y, uint8_t u, uint8_t v) {
assert(file);
- rtc::scoped_ptr<uint8_t[]> plane_buffer(new uint8_t[y_size]);
+ std::unique_ptr<uint8_t[]> plane_buffer(new uint8_t[y_size]);
memset(plane_buffer.get(), y, y_size);
fwrite(plane_buffer.get(), 1, y_size, file);
memset(plane_buffer.get(), u, uv_size);
@@ -58,17 +59,17 @@ class FrameGeneratorTest : public ::testing::Test {
void CheckFrameAndMutate(VideoFrame* frame, uint8_t y, uint8_t u, uint8_t v) {
// Check that frame is valid, has the correct color and timestamp are clean.
ASSERT_NE(nullptr, frame);
- uint8_t* buffer;
+ const uint8_t* buffer;
ASSERT_EQ(y_size, frame->allocated_size(PlaneType::kYPlane));
- buffer = frame->buffer(PlaneType::kYPlane);
+ buffer = frame->video_frame_buffer()->DataY();
for (int i = 0; i < y_size; ++i)
ASSERT_EQ(y, buffer[i]);
ASSERT_EQ(uv_size, frame->allocated_size(PlaneType::kUPlane));
- buffer = frame->buffer(PlaneType::kUPlane);
+ buffer = frame->video_frame_buffer()->DataU();
for (int i = 0; i < uv_size; ++i)
ASSERT_EQ(u, buffer[i]);
ASSERT_EQ(uv_size, frame->allocated_size(PlaneType::kVPlane));
- buffer = frame->buffer(PlaneType::kVPlane);
+ buffer = frame->video_frame_buffer()->DataV();
for (int i = 0; i < uv_size; ++i)
ASSERT_EQ(v, buffer[i]);
EXPECT_EQ(0, frame->ntp_time_ms());
@@ -88,7 +89,7 @@ class FrameGeneratorTest : public ::testing::Test {
};
TEST_F(FrameGeneratorTest, SingleFrameFile) {
- rtc::scoped_ptr<FrameGenerator> generator(FrameGenerator::CreateFromYuvFile(
+ std::unique_ptr<FrameGenerator> generator(FrameGenerator::CreateFromYuvFile(
std::vector<std::string>(1, one_frame_filename_), kFrameWidth,
kFrameHeight, 1));
CheckFrameAndMutate(generator->NextFrame(), 255, 255, 255);
@@ -96,7 +97,7 @@ TEST_F(FrameGeneratorTest, SingleFrameFile) {
}
TEST_F(FrameGeneratorTest, TwoFrameFile) {
- rtc::scoped_ptr<FrameGenerator> generator(FrameGenerator::CreateFromYuvFile(
+ std::unique_ptr<FrameGenerator> generator(FrameGenerator::CreateFromYuvFile(
std::vector<std::string>(1, two_frame_filename_), kFrameWidth,
kFrameHeight, 1));
CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
@@ -109,7 +110,7 @@ TEST_F(FrameGeneratorTest, MultipleFrameFiles) {
files.push_back(two_frame_filename_);
files.push_back(one_frame_filename_);
- rtc::scoped_ptr<FrameGenerator> generator(
+ std::unique_ptr<FrameGenerator> generator(
FrameGenerator::CreateFromYuvFile(files, kFrameWidth, kFrameHeight, 1));
CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
CheckFrameAndMutate(generator->NextFrame(), 127, 127, 127);
@@ -119,7 +120,7 @@ TEST_F(FrameGeneratorTest, MultipleFrameFiles) {
TEST_F(FrameGeneratorTest, TwoFrameFileWithRepeat) {
const int kRepeatCount = 3;
- rtc::scoped_ptr<FrameGenerator> generator(FrameGenerator::CreateFromYuvFile(
+ std::unique_ptr<FrameGenerator> generator(FrameGenerator::CreateFromYuvFile(
std::vector<std::string>(1, two_frame_filename_), kFrameWidth,
kFrameHeight, kRepeatCount));
for (int i = 0; i < kRepeatCount; ++i)
@@ -134,7 +135,7 @@ TEST_F(FrameGeneratorTest, MultipleFrameFilesWithRepeat) {
std::vector<std::string> files;
files.push_back(two_frame_filename_);
files.push_back(one_frame_filename_);
- rtc::scoped_ptr<FrameGenerator> generator(FrameGenerator::CreateFromYuvFile(
+ std::unique_ptr<FrameGenerator> generator(FrameGenerator::CreateFromYuvFile(
files, kFrameWidth, kFrameHeight, kRepeatCount));
for (int i = 0; i < kRepeatCount; ++i)
CheckFrameAndMutate(generator->NextFrame(), 0, 0, 0);
diff --git a/chromium/third_party/webrtc/test/frame_utils.cc b/chromium/third_party/webrtc/test/frame_utils.cc
index 0f411447458..0fad3adec85 100644
--- a/chromium/third_party/webrtc/test/frame_utils.cc
+++ b/chromium/third_party/webrtc/test/frame_utils.cc
@@ -16,53 +16,60 @@ namespace test {
bool EqualPlane(const uint8_t* data1,
const uint8_t* data2,
- int stride,
+ int stride1,
+ int stride2,
int width,
int height) {
for (int y = 0; y < height; ++y) {
if (memcmp(data1, data2, width) != 0)
return false;
- data1 += stride;
- data2 += stride;
+ data1 += stride1;
+ data2 += stride2;
}
return true;
}
+
bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) {
- if (f1.width() != f2.width() || f1.height() != f2.height() ||
- f1.stride(webrtc::kYPlane) != f2.stride(webrtc::kYPlane) ||
- f1.stride(webrtc::kUPlane) != f2.stride(webrtc::kUPlane) ||
- f1.stride(webrtc::kVPlane) != f2.stride(webrtc::kVPlane) ||
- f1.timestamp() != f2.timestamp() ||
+ if (f1.timestamp() != f2.timestamp() ||
f1.ntp_time_ms() != f2.ntp_time_ms() ||
f1.render_time_ms() != f2.render_time_ms()) {
return false;
}
- const int half_width = (f1.width() + 1) / 2;
- const int half_height = (f1.height() + 1) / 2;
- return EqualPlane(f1.buffer(webrtc::kYPlane), f2.buffer(webrtc::kYPlane),
- f1.stride(webrtc::kYPlane), f1.width(), f1.height()) &&
- EqualPlane(f1.buffer(webrtc::kUPlane), f2.buffer(webrtc::kUPlane),
- f1.stride(webrtc::kUPlane), half_width, half_height) &&
- EqualPlane(f1.buffer(webrtc::kVPlane), f2.buffer(webrtc::kVPlane),
- f1.stride(webrtc::kVPlane), half_width, half_height);
+ return FrameBufsEqual(f1.video_frame_buffer(), f2.video_frame_buffer());
}
bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f2) {
- if (f1->width() != f2->width() || f1->height() != f2->height() ||
- f1->stride(webrtc::kYPlane) != f2->stride(webrtc::kYPlane) ||
- f1->stride(webrtc::kUPlane) != f2->stride(webrtc::kUPlane) ||
- f1->stride(webrtc::kVPlane) != f2->stride(webrtc::kVPlane)) {
+ if (f1 == f2) {
+ return true;
+ }
+ // Exlude nullptr (except if both are nullptr, as above)
+ if (!f1 || !f2) {
+ return false;
+ }
+
+ if (f1->width() != f2->width() || f1->height() != f2->height()) {
+ return false;
+ }
+ // Exclude native handle
+ if (f1->native_handle()) {
+ return f1->native_handle() == f2->native_handle();
+ }
+
+ if (f2->native_handle()) {
return false;
}
const int half_width = (f1->width() + 1) / 2;
const int half_height = (f1->height() + 1) / 2;
- return EqualPlane(f1->data(webrtc::kYPlane), f2->data(webrtc::kYPlane),
- f1->stride(webrtc::kYPlane), f1->width(), f1->height()) &&
- EqualPlane(f1->data(webrtc::kUPlane), f2->data(webrtc::kUPlane),
- f1->stride(webrtc::kUPlane), half_width, half_height) &&
- EqualPlane(f1->data(webrtc::kVPlane), f2->data(webrtc::kVPlane),
- f1->stride(webrtc::kVPlane), half_width, half_height);
+ return EqualPlane(f1->DataY(), f2->DataY(),
+ f1->StrideY(), f2->StrideY(),
+ f1->width(), f1->height()) &&
+ EqualPlane(f1->DataU(), f2->DataU(),
+ f1->StrideU(), f2->StrideU(),
+ half_width, half_height) &&
+ EqualPlane(f1->DataV(), f2->DataV(),
+ f1->StrideV(), f2->StrideV(),
+ half_width, half_height);
}
} // namespace test
diff --git a/chromium/third_party/webrtc/test/frame_utils.h b/chromium/third_party/webrtc/test/frame_utils.h
index 668d9994ab7..aef3c9ff2af 100644
--- a/chromium/third_party/webrtc/test/frame_utils.h
+++ b/chromium/third_party/webrtc/test/frame_utils.h
@@ -20,10 +20,19 @@ namespace test {
bool EqualPlane(const uint8_t* data1,
const uint8_t* data2,
- int stride,
+ int stride1,
+ int stride2,
int width,
int height);
+static inline bool EqualPlane(const uint8_t* data1,
+ const uint8_t* data2,
+ int stride,
+ int width,
+ int height) {
+ return EqualPlane(data1, data2, stride, stride, width, height);
+}
+
bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2);
bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
diff --git a/chromium/third_party/webrtc/test/fuzzers/BUILD.gn b/chromium/third_party/webrtc/test/fuzzers/BUILD.gn
index c46cc1e2c0a..3e59339299f 100644
--- a/chromium/third_party/webrtc/test/fuzzers/BUILD.gn
+++ b/chromium/third_party/webrtc/test/fuzzers/BUILD.gn
@@ -75,6 +75,15 @@ webrtc_fuzzer_test("rtcp_receiver_fuzzer") {
]
}
+webrtc_fuzzer_test("rtp_packet_fuzzer") {
+ sources = [
+ "rtp_packet_fuzzer.cc",
+ ]
+ deps = [
+ "../../modules/rtp_rtcp/",
+ ]
+}
+
source_set("audio_decoder_fuzzer") {
public_configs = [ "../..:common_inherited_config" ]
sources = [
diff --git a/chromium/third_party/webrtc/test/fuzzers/producer_fec_fuzzer.cc b/chromium/third_party/webrtc/test/fuzzers/producer_fec_fuzzer.cc
index 77336e6f1e0..53f74938699 100644
--- a/chromium/third_party/webrtc/test/fuzzers/producer_fec_fuzzer.cc
+++ b/chromium/third_party/webrtc/test/fuzzers/producer_fec_fuzzer.cc
@@ -7,8 +7,10 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
+
+#include <memory>
+
#include "webrtc/base/checks.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
@@ -20,9 +22,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
size_t i = 0;
if (size < 4)
return;
- FecProtectionParams params = {data[i++] % 128, data[i++] % 2 == 1,
- static_cast<int>(data[i++] % 10),
- kFecMaskBursty};
+ FecProtectionParams params = {
+ data[i++] % 128, static_cast<int>(data[i++] % 10), kFecMaskBursty};
producer.SetFecParameters(&params, 0);
uint16_t seq_num = data[i++];
@@ -31,14 +32,14 @@ void FuzzOneInput(const uint8_t* data, size_t size) {
size_t payload_size = data[i++] % 10;
if (i + payload_size + rtp_header_length + 2 > size)
break;
- rtc::scoped_ptr<uint8_t[]> packet(
+ std::unique_ptr<uint8_t[]> packet(
new uint8_t[payload_size + rtp_header_length]);
memcpy(packet.get(), &data[i], payload_size + rtp_header_length);
ByteWriter<uint16_t>::WriteBigEndian(&packet[2], seq_num++);
i += payload_size + rtp_header_length;
// Make sure sequence numbers are increasing.
const int kRedPayloadType = 98;
- rtc::scoped_ptr<RedPacket> red_packet(producer.BuildRedPacket(
+ std::unique_ptr<RedPacket> red_packet(producer.BuildRedPacket(
packet.get(), payload_size, rtp_header_length, kRedPayloadType));
const bool protect = data[i++] % 2 == 1;
if (protect) {
diff --git a/chromium/third_party/webrtc/test/fuzzers/rtcp_receiver_fuzzer.cc b/chromium/third_party/webrtc/test/fuzzers/rtcp_receiver_fuzzer.cc
index 944c79d7d3b..b7a4fdfdf8f 100644
--- a/chromium/third_party/webrtc/test/fuzzers/rtcp_receiver_fuzzer.cc
+++ b/chromium/third_party/webrtc/test/fuzzers/rtcp_receiver_fuzzer.cc
@@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/base/checks.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_receiver.h"
#include "webrtc/system_wrappers/include/clock.h"
diff --git a/chromium/third_party/webrtc/test/fuzzers/rtp_packet_fuzzer.cc b/chromium/third_party/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
new file mode 100644
index 00000000000..a9efdb96ecf
--- /dev/null
+++ b/chromium/third_party/webrtc/test/fuzzers/rtp_packet_fuzzer.cc
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/modules/rtp_rtcp/source/rtp_packet_received.h"
+
+namespace webrtc {
+
+void FuzzOneInput(const uint8_t* data, size_t size) {
+ RtpPacketReceived packet;
+
+ packet.Parse(data, size);
+
+ // Call packet accessors because they have extra checks.
+ packet.Marker();
+ packet.PayloadType();
+ packet.SequenceNumber();
+ packet.Timestamp();
+ packet.Ssrc();
+ packet.Csrcs();
+}
+
+} // namespace webrtc
+
diff --git a/chromium/third_party/webrtc/test/layer_filtering_transport.cc b/chromium/third_party/webrtc/test/layer_filtering_transport.cc
index 41d63ad6e7d..cba1a57eb45 100644
--- a/chromium/third_party/webrtc/test/layer_filtering_transport.cc
+++ b/chromium/third_party/webrtc/test/layer_filtering_transport.cc
@@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <memory>
+
#include "webrtc/base/checks.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
@@ -62,7 +64,7 @@ bool LayerFilteringTransport::SendRtp(const uint8_t* packet,
const size_t payload_data_length = payload_length - header.paddingLength;
const bool is_vp8 = header.payloadType == vp8_video_payload_type_;
- rtc::scoped_ptr<RtpDepacketizer> depacketizer(
+ std::unique_ptr<RtpDepacketizer> depacketizer(
RtpDepacketizer::Create(is_vp8 ? kRtpVideoVp8 : kRtpVideoVp9));
RtpDepacketizer::ParsedPayload parsed_payload;
if (depacketizer->Parse(&parsed_payload, payload, payload_data_length)) {
diff --git a/chromium/third_party/webrtc/test/mock_voe_channel_proxy.h b/chromium/third_party/webrtc/test/mock_voe_channel_proxy.h
index c2211f85547..a27a739006e 100644
--- a/chromium/third_party/webrtc/test/mock_voe_channel_proxy.h
+++ b/chromium/third_party/webrtc/test/mock_voe_channel_proxy.h
@@ -44,6 +44,12 @@ class MockVoEChannelProxy : public voe::ChannelProxy {
MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t());
MOCK_METHOD1(SetSendTelephoneEventPayloadType, bool(int payload_type));
MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms));
+ MOCK_METHOD1(RegisterExternalTransport, void(Transport* transport));
+ MOCK_METHOD0(DeRegisterExternalTransport, void());
+ MOCK_METHOD3(ReceivedRTPPacket, bool(const uint8_t* packet,
+ size_t length,
+ const PacketTime& packet_time));
+ MOCK_METHOD2(ReceivedRTCPPacket, bool(const uint8_t* packet, size_t length));
};
} // namespace test
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/test/mock_voice_engine.h b/chromium/third_party/webrtc/test/mock_voice_engine.h
index fac088b0e17..b9eb05fb8be 100644
--- a/chromium/third_party/webrtc/test/mock_voice_engine.h
+++ b/chromium/third_party/webrtc/test/mock_voice_engine.h
@@ -24,6 +24,10 @@ namespace test {
// able to get the various interfaces as usual, via T::GetInterface().
class MockVoiceEngine : public VoiceEngineImpl {
public:
+ // TODO(nisse): Valid overrides commented out, because the gmock
+ // methods don't use any override declarations, and we want to avoid
+ // warnings from -Winconsistent-missing-override. See
+ // http://crbug.com/428099.
MockVoiceEngine() : VoiceEngineImpl(new Config(), true) {
// Increase ref count so this object isn't automatically deleted whenever
// interfaces are Release():d.
@@ -36,7 +40,7 @@ class MockVoiceEngine : public VoiceEngineImpl {
return new testing::NiceMock<MockVoEChannelProxy>();
}));
}
- ~MockVoiceEngine() override {
+ ~MockVoiceEngine() /* override */ {
// Decrease ref count before base class d-tor is called; otherwise it will
// trigger an assertion.
--_ref_count;
@@ -45,7 +49,8 @@ class MockVoiceEngine : public VoiceEngineImpl {
MOCK_METHOD1(ChannelProxyFactory, voe::ChannelProxy*(int channel_id));
// VoiceEngineImpl
- std::unique_ptr<voe::ChannelProxy> GetChannelProxy(int channel_id) override {
+ std::unique_ptr<voe::ChannelProxy> GetChannelProxy(
+ int channel_id) /* override */ {
return std::unique_ptr<voe::ChannelProxy>(ChannelProxyFactory(channel_id));
}
diff --git a/chromium/third_party/webrtc/test/rtp_file_reader.cc b/chromium/third_party/webrtc/test/rtp_file_reader.cc
index 1413f007979..d437d419248 100644
--- a/chromium/third_party/webrtc/test/rtp_file_reader.cc
+++ b/chromium/third_party/webrtc/test/rtp_file_reader.cc
@@ -17,8 +17,8 @@
#include <vector>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/format_macros.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
namespace webrtc {
@@ -130,7 +130,7 @@ class RtpDumpReader : public RtpFileReaderImpl {
}
bool Init(const std::string& filename,
- const std::set<uint32_t>& ssrc_filter) {
+ const std::set<uint32_t>& ssrc_filter) override {
file_ = fopen(filename.c_str(), "rb");
if (file_ == NULL) {
printf("ERROR: Can't open file: %s\n", filename.c_str());
diff --git a/chromium/third_party/webrtc/test/rtp_file_reader_unittest.cc b/chromium/third_party/webrtc/test/rtp_file_reader_unittest.cc
index 15a456ccf61..fceac3836d6 100644
--- a/chromium/third_party/webrtc/test/rtp_file_reader_unittest.cc
+++ b/chromium/third_party/webrtc/test/rtp_file_reader_unittest.cc
@@ -9,9 +9,9 @@
*/
#include <map>
+#include <memory>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/test/rtp_file_reader.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -43,7 +43,7 @@ class TestRtpFileReader : public ::testing::Test {
}
private:
- rtc::scoped_ptr<test::RtpFileReader> rtp_packet_source_;
+ std::unique_ptr<test::RtpFileReader> rtp_packet_source_;
bool headers_only_file_;
};
@@ -94,7 +94,7 @@ class TestPcapFileReader : public ::testing::Test {
}
private:
- rtc::scoped_ptr<test::RtpFileReader> rtp_packet_source_;
+ std::unique_ptr<test::RtpFileReader> rtp_packet_source_;
};
TEST_F(TestPcapFileReader, TestEthernetIIFrame) {
diff --git a/chromium/third_party/webrtc/test/rtp_file_writer_unittest.cc b/chromium/third_party/webrtc/test/rtp_file_writer_unittest.cc
index 2c7c88cc3f3..3287f976c6c 100644
--- a/chromium/third_party/webrtc/test/rtp_file_writer_unittest.cc
+++ b/chromium/third_party/webrtc/test/rtp_file_writer_unittest.cc
@@ -10,8 +10,9 @@
#include <string.h>
+#include <memory>
+
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/test/rtp_file_reader.h"
#include "webrtc/test/rtp_file_writer.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -43,7 +44,7 @@ class RtpFileWriterTest : public ::testing::Test {
void VerifyFileContents(int expected_packets) {
ASSERT_TRUE(rtp_writer_.get() == NULL)
<< "Must call CloseOutputFile before VerifyFileContents";
- rtc::scoped_ptr<test::RtpFileReader> rtp_reader(
+ std::unique_ptr<test::RtpFileReader> rtp_reader(
test::RtpFileReader::Create(test::RtpFileReader::kRtpDump, filename_));
ASSERT_TRUE(rtp_reader.get() != NULL);
test::RtpPacket packet;
@@ -61,7 +62,7 @@ class RtpFileWriterTest : public ::testing::Test {
}
private:
- rtc::scoped_ptr<test::RtpFileWriter> rtp_writer_;
+ std::unique_ptr<test::RtpFileWriter> rtp_writer_;
std::string filename_;
};
diff --git a/chromium/third_party/webrtc/test/rtp_rtcp_observer.h b/chromium/third_party/webrtc/test/rtp_rtcp_observer.h
index 5eb88d3f0d3..ab865611673 100644
--- a/chromium/third_party/webrtc/test/rtp_rtcp_observer.h
+++ b/chromium/third_party/webrtc/test/rtp_rtcp_observer.h
@@ -11,6 +11,7 @@
#define WEBRTC_TEST_RTP_RTCP_OBSERVER_H_
#include <map>
+#include <memory>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
@@ -69,7 +70,7 @@ class RtpRtcpObserver {
}
rtc::Event observation_complete_;
- const rtc::scoped_ptr<RtpHeaderParser> parser_;
+ const std::unique_ptr<RtpHeaderParser> parser_;
private:
const int timeout_ms_;
diff --git a/chromium/third_party/webrtc/test/test.gyp b/chromium/third_party/webrtc/test/test.gyp
index ee3283ea6ad..a04a128a0eb 100644
--- a/chromium/third_party/webrtc/test/test.gyp
+++ b/chromium/third_party/webrtc/test/test.gyp
@@ -301,10 +301,9 @@
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- '<(webrtc_root)/base/base.gyp:rtc_base',
+ '<(webrtc_root)/base/base.gyp:rtc_base_approved',
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/modules/modules.gyp:media_file',
- '<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/webrtc.gyp:webrtc',
'rtp_test_utils',
'test_support',
@@ -401,7 +400,7 @@
},
],
'conditions': [
- ['include_tests==1 and OS=="android"', {
+ ['OS=="android"', {
'targets': [
{
'target_name': 'test_support_unittests_apk_target',
@@ -411,7 +410,28 @@
],
},
],
- }],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'test_support_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):test_support_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'test_support_unittests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
+ }], # OS=="android"
['test_isolation_mode != "noop"', {
'targets': [
{
diff --git a/chromium/third_party/webrtc/test/test_suite.h b/chromium/third_party/webrtc/test/test_suite.h
index dab2acd3881..4b27f9f2c32 100644
--- a/chromium/third_party/webrtc/test/test_suite.h
+++ b/chromium/third_party/webrtc/test/test_suite.h
@@ -17,8 +17,9 @@
// instantiate this class in your main function and call its Run method to run
// any gtest based tests that are linked into your executable.
+#include <memory>
+
#include "webrtc/base/constructormagic.h"
-#include "webrtc/base/scoped_ptr.h"
namespace webrtc {
namespace test {
@@ -41,7 +42,7 @@ class TestSuite {
RTC_DISALLOW_COPY_AND_ASSIGN(TestSuite);
private:
- rtc::scoped_ptr<TraceToStderr> trace_to_stderr_;
+ std::unique_ptr<TraceToStderr> trace_to_stderr_;
};
} // namespace test
diff --git a/chromium/third_party/webrtc/test/test_support_unittests_apk.isolate b/chromium/third_party/webrtc/test/test_support_unittests_apk.isolate
new file mode 100644
index 00000000000..e52980b3270
--- /dev/null
+++ b/chromium/third_party/webrtc/test/test_support_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'test_support_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_test_support_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/test_support_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_test_support_unittests',
+ 'test_support_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/test/testsupport/fileutils.cc b/chromium/third_party/webrtc/test/testsupport/fileutils.cc
index 2fab425a316..d99b990918c 100644
--- a/chromium/third_party/webrtc/test/testsupport/fileutils.cc
+++ b/chromium/third_party/webrtc/test/testsupport/fileutils.cc
@@ -23,7 +23,6 @@
#else
#include <unistd.h>
-#include "webrtc/base/scoped_ptr.h"
#define GET_CURRENT_DIR getcwd
#endif
@@ -36,6 +35,8 @@
#include <stdlib.h>
#include <string.h>
+#include <memory>
+
#include "webrtc/typedefs.h" // For architecture defines
namespace webrtc {
@@ -183,7 +184,7 @@ std::string TempFilename(const std::string &dir, const std::string &prefix) {
return "";
#else
int len = dir.size() + prefix.size() + 2 + 6;
- rtc::scoped_ptr<char[]> tempname(new char[len]);
+ std::unique_ptr<char[]> tempname(new char[len]);
snprintf(tempname.get(), len, "%s/%sXXXXXX", dir.c_str(),
prefix.c_str());
diff --git a/chromium/third_party/webrtc/tools/BUILD.gn b/chromium/third_party/webrtc/tools/BUILD.gn
index 603e99944c6..ebeacc24602 100644
--- a/chromium/third_party/webrtc/tools/BUILD.gn
+++ b/chromium/third_party/webrtc/tools/BUILD.gn
@@ -46,6 +46,7 @@ executable("frame_analyzer") {
deps = [
":command_line_parser",
":video_quality_analysis",
+ "//build/win:default_exe_manifest",
]
}
@@ -58,6 +59,7 @@ executable("rgba_to_i420_converter") {
deps = [
":command_line_parser",
"../common_video",
+ "//build/win:default_exe_manifest",
]
}
@@ -77,6 +79,7 @@ if (!build_with_chromium) {
deps = [
":command_line_parser",
"../test:test_support_main",
+ "//build/win:default_exe_manifest",
"//testing/gtest",
]
}
diff --git a/chromium/third_party/webrtc/tools/agc/activity_metric.cc b/chromium/third_party/webrtc/tools/agc/activity_metric.cc
index 258d02377ee..6c1f756ec58 100644
--- a/chromium/third_party/webrtc/tools/agc/activity_metric.cc
+++ b/chromium/third_party/webrtc/tools/agc/activity_metric.cc
@@ -14,10 +14,10 @@
#include <stdlib.h>
#include <algorithm>
+#include <memory>
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_processing/agc/agc.h"
#include "webrtc/modules/audio_processing/agc/histogram.h"
#include "webrtc/modules/audio_processing/agc/utility.h"
@@ -155,10 +155,10 @@ class AgcStat {
int video_index_;
double activity_threshold_;
double video_vad_[kMaxNumFrames];
- rtc::scoped_ptr<Histogram> audio_content_;
- rtc::scoped_ptr<VadAudioProc> audio_processing_;
- rtc::scoped_ptr<PitchBasedVad> vad_;
- rtc::scoped_ptr<StandaloneVad> standalone_vad_;
+ std::unique_ptr<Histogram> audio_content_;
+ std::unique_ptr<VadAudioProc> audio_processing_;
+ std::unique_ptr<PitchBasedVad> vad_;
+ std::unique_ptr<StandaloneVad> standalone_vad_;
FILE* audio_content_fid_;
};
diff --git a/chromium/third_party/webrtc/tools/agc/agc_harness.cc b/chromium/third_party/webrtc/tools/agc/agc_harness.cc
index 0d35d4b56aa..17919629b91 100644
--- a/chromium/third_party/webrtc/tools/agc/agc_harness.cc
+++ b/chromium/third_party/webrtc/tools/agc/agc_harness.cc
@@ -10,10 +10,11 @@
// Refer to kUsage below for a description.
+#include <memory>
+
#include "gflags/gflags.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/format_macros.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/test/channel_transport/channel_transport.h"
@@ -217,13 +218,13 @@ class AgcVoiceEngine {
int channel_;
int capture_idx_;
int render_idx_;
- rtc::scoped_ptr<test::VoiceChannelTransport> channel_transport_;
+ std::unique_ptr<test::VoiceChannelTransport> channel_transport_;
};
void RunHarness() {
- rtc::scoped_ptr<AgcVoiceEngine> voe1(new AgcVoiceEngine(
+ std::unique_ptr<AgcVoiceEngine> voe1(new AgcVoiceEngine(
FLAGS_legacy_agc, 2000, 2000, FLAGS_capture1, FLAGS_render1));
- rtc::scoped_ptr<AgcVoiceEngine> voe2;
+ std::unique_ptr<AgcVoiceEngine> voe2;
if (FLAGS_parallel) {
voe2.reset(new AgcVoiceEngine(!FLAGS_legacy_agc, 3000, 3000, FLAGS_capture2,
FLAGS_render2));
diff --git a/chromium/third_party/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc b/chromium/third_party/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc
index 2594fd1317b..fb07b569faf 100644
--- a/chromium/third_party/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc
+++ b/chromium/third_party/webrtc/tools/e2e_quality/audio/audio_e2e_harness.cc
@@ -12,10 +12,11 @@
// and runs forever. Some parameters can be configured through command-line
// flags.
+#include <memory>
+
#include "gflags/gflags.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/test/channel_transport/channel_transport.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
@@ -48,7 +49,7 @@ void RunHarness() {
int channel = base->CreateChannel();
ASSERT_NE(-1, channel);
- rtc::scoped_ptr<VoiceChannelTransport> voice_channel_transport(
+ std::unique_ptr<VoiceChannelTransport> voice_channel_transport(
new VoiceChannelTransport(network, channel));
ASSERT_EQ(0, voice_channel_transport->SetSendDestination("127.0.0.1", 1234));
diff --git a/chromium/third_party/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc b/chromium/third_party/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc
index b6b15968665..2bab2881bbe 100644
--- a/chromium/third_party/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc
+++ b/chromium/third_party/webrtc/tools/force_mic_volume_max/force_mic_volume_max.cc
@@ -12,7 +12,6 @@
#include <stdio.h>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/test/channel_transport/channel_transport.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
diff --git a/chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc b/chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc
index 90855a354c0..bb6a75edaab 100644
--- a/chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc
+++ b/chromium/third_party/webrtc/tools/frame_editing/frame_editing_lib.cc
@@ -11,9 +11,9 @@
#include <stdio.h>
#include <stdlib.h>
+#include <memory>
#include <string>
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/tools/frame_editing/frame_editing_lib.h"
#include "webrtc/typedefs.h"
@@ -39,7 +39,7 @@ int EditFrames(const string& in_path, int width, int height,
// Frame size of I420.
size_t frame_length = CalcBufferSize(kI420, width, height);
- rtc::scoped_ptr<uint8_t[]> temp_buffer(new uint8_t[frame_length]);
+ std::unique_ptr<uint8_t[]> temp_buffer(new uint8_t[frame_length]);
FILE* out_fid = fopen(out_path.c_str(), "wb");
diff --git a/chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc b/chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc
index 31991b71757..e1ba1de0fcb 100644
--- a/chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc
+++ b/chromium/third_party/webrtc/tools/frame_editing/frame_editing_unittest.cc
@@ -12,9 +12,9 @@
#include <stdlib.h>
#include <fstream>
+#include <memory>
#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/tools/frame_editing/frame_editing_lib.h"
@@ -56,8 +56,8 @@ class FrameEditingTest : public ::testing::Test {
// Compares the frames in both streams to the end of one of the streams.
void CompareToTheEnd(FILE* test_video_fid,
FILE* ref_video_fid,
- rtc::scoped_ptr<int[]>* ref_buffer,
- rtc::scoped_ptr<int[]>* test_buffer) {
+ std::unique_ptr<int[]>* ref_buffer,
+ std::unique_ptr<int[]>* test_buffer) {
while (!feof(test_video_fid) && !feof(ref_video_fid)) {
num_bytes_read_ = fread(ref_buffer->get(), 1, kFrameSize, ref_video_fid);
if (!feof(ref_video_fid)) {
@@ -81,8 +81,8 @@ class FrameEditingTest : public ::testing::Test {
FILE* original_fid_;
FILE* edited_fid_;
size_t num_bytes_read_;
- rtc::scoped_ptr<int[]> original_buffer_;
- rtc::scoped_ptr<int[]> edited_buffer_;
+ std::unique_ptr<int[]> original_buffer_;
+ std::unique_ptr<int[]> edited_buffer_;
int num_frames_read_;
};
diff --git a/chromium/third_party/webrtc/tools/tools.gyp b/chromium/third_party/webrtc/tools/tools.gyp
index b69f7cb5b86..5a08ab99bd6 100644
--- a/chromium/third_party/webrtc/tools/tools.gyp
+++ b/chromium/third_party/webrtc/tools/tools.gyp
@@ -189,6 +189,27 @@
],
},
],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'tools_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):tools_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'tools_unittests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
}],
['test_isolation_mode != "noop"', {
'targets': [
diff --git a/chromium/third_party/webrtc/tools/tools_unittests_apk.isolate b/chromium/third_party/webrtc/tools/tools_unittests_apk.isolate
new file mode 100644
index 00000000000..a4ecb07903e
--- /dev/null
+++ b/chromium/third_party/webrtc/tools/tools_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'tools_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_tools_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/tools_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_tools_unittests',
+ 'tools_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/typedefs.h b/chromium/third_party/webrtc/typedefs.h
index 6a3f441e22b..e65b77a65f8 100644
--- a/chromium/third_party/webrtc/typedefs.h
+++ b/chromium/third_party/webrtc/typedefs.h
@@ -57,8 +57,7 @@
// TODO(zhongwei.yao): WEBRTC_CPU_DETECTION is only used in one place; we should
// probably just remove it.
-#if (defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__)) || \
- defined(WEBRTC_DETECT_NEON)
+#if (defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE2__))
#define WEBRTC_CPU_DETECTION
#endif
diff --git a/chromium/third_party/webrtc/video/BUILD.gn b/chromium/third_party/webrtc/video/BUILD.gn
index 4f1b7ae1976..9e350f0cdba 100644
--- a/chromium/third_party/webrtc/video/BUILD.gn
+++ b/chromium/third_party/webrtc/video/BUILD.gn
@@ -24,6 +24,10 @@ source_set("video") {
"receive_statistics_proxy.h",
"report_block_stats.cc",
"report_block_stats.h",
+ "rtp_stream_receiver.cc",
+ "rtp_stream_receiver.h",
+ "send_delay_stats.cc",
+ "send_delay_stats.h",
"send_statistics_proxy.cc",
"send_statistics_proxy.h",
"stream_synchronization.cc",
@@ -36,12 +40,10 @@ source_set("video") {
"video_receive_stream.h",
"video_send_stream.cc",
"video_send_stream.h",
- "vie_channel.cc",
- "vie_channel.h",
+ "video_stream_decoder.cc",
+ "video_stream_decoder.h",
"vie_encoder.cc",
"vie_encoder.h",
- "vie_receiver.cc",
- "vie_receiver.h",
"vie_remb.cc",
"vie_remb.h",
"vie_sync_module.cc",
@@ -71,7 +73,6 @@ source_set("video") {
"../modules/video_capture:video_capture_module",
"../modules/video_coding",
"../modules/video_processing",
- "../modules/video_render:video_render_module",
"../system_wrappers",
"../voice_engine",
]
diff --git a/chromium/third_party/webrtc/video/DEPS b/chromium/third_party/webrtc/video/DEPS
index 426f47c4236..7e53144d33d 100644
--- a/chromium/third_party/webrtc/video/DEPS
+++ b/chromium/third_party/webrtc/video/DEPS
@@ -2,6 +2,7 @@ include_rules = [
"+webrtc/base",
"+webrtc/call",
"+webrtc/common_video",
+ "+webrtc/media/base",
"+webrtc/modules/bitrate_controller",
"+webrtc/modules/congestion_controller",
"+webrtc/modules/pacing",
@@ -11,7 +12,6 @@ include_rules = [
"+webrtc/modules/video_coding",
"+webrtc/modules/video_capture",
"+webrtc/modules/video_processing",
- "+webrtc/modules/video_render",
"+webrtc/system_wrappers",
"+webrtc/voice_engine",
]
diff --git a/chromium/third_party/webrtc/video/call_stats.cc b/chromium/third_party/webrtc/video/call_stats.cc
index a1bf221d4e2..59efcfa13de 100644
--- a/chromium/third_party/webrtc/video/call_stats.cc
+++ b/chromium/third_party/webrtc/video/call_stats.cc
@@ -13,9 +13,9 @@
#include <algorithm>
#include "webrtc/base/checks.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
namespace webrtc {
namespace {
diff --git a/chromium/third_party/webrtc/video/call_stats_unittest.cc b/chromium/third_party/webrtc/video/call_stats_unittest.cc
index 6e2e1bca786..8b3cde07a8e 100644
--- a/chromium/third_party/webrtc/video/call_stats_unittest.cc
+++ b/chromium/third_party/webrtc/video/call_stats_unittest.cc
@@ -15,7 +15,6 @@
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/test/histogram.h"
#include "webrtc/video/call_stats.h"
diff --git a/chromium/third_party/webrtc/video/encoded_frame_callback_adapter.cc b/chromium/third_party/webrtc/video/encoded_frame_callback_adapter.cc
index 84283608a04..974149170ab 100644
--- a/chromium/third_party/webrtc/video/encoded_frame_callback_adapter.cc
+++ b/chromium/third_party/webrtc/video/encoded_frame_callback_adapter.cc
@@ -26,9 +26,11 @@ int32_t EncodedFrameCallbackAdapter::Encoded(
const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation) {
- RTC_DCHECK(observer_);
+ if (!observer_)
+ return 0;
const EncodedFrame frame(encodedImage._buffer, encodedImage._length,
encodedImage._frameType);
+
observer_->EncodedFrameCallback(frame);
return 0;
}
diff --git a/chromium/third_party/webrtc/video/encoded_frame_callback_adapter.h b/chromium/third_party/webrtc/video/encoded_frame_callback_adapter.h
index b10c4f1645d..83fe4bd3187 100644
--- a/chromium/third_party/webrtc/video/encoded_frame_callback_adapter.h
+++ b/chromium/third_party/webrtc/video/encoded_frame_callback_adapter.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_
#define WEBRTC_VIDEO_ENCODED_FRAME_CALLBACK_ADAPTER_H_
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
-#include "webrtc/frame_callback.h"
namespace webrtc {
namespace internal {
@@ -27,7 +27,7 @@ class EncodedFrameCallbackAdapter : public EncodedImageCallback {
const RTPFragmentationHeader* fragmentation);
private:
- EncodedFrameObserver* observer_;
+ EncodedFrameObserver* const observer_;
};
} // namespace internal
diff --git a/chromium/third_party/webrtc/video/encoder_state_feedback.cc b/chromium/third_party/webrtc/video/encoder_state_feedback.cc
index 0240e487a6a..28508d09a15 100644
--- a/chromium/third_party/webrtc/video/encoder_state_feedback.cc
+++ b/chromium/third_party/webrtc/video/encoder_state_feedback.cc
@@ -13,23 +13,18 @@
#include "webrtc/base/checks.h"
#include "webrtc/video/vie_encoder.h"
-namespace webrtc {
+static const int kMinKeyFrameRequestIntervalMs = 300;
-EncoderStateFeedback::EncoderStateFeedback() : vie_encoder_(nullptr) {}
+namespace webrtc {
-void EncoderStateFeedback::Init(const std::vector<uint32_t>& ssrcs,
- ViEEncoder* encoder) {
+EncoderStateFeedback::EncoderStateFeedback(Clock* clock,
+ const std::vector<uint32_t>& ssrcs,
+ ViEEncoder* encoder)
+ : clock_(clock),
+ ssrcs_(ssrcs),
+ vie_encoder_(encoder),
+ time_last_intra_request_ms_(ssrcs.size(), -1) {
RTC_DCHECK(!ssrcs.empty());
- rtc::CritScope lock(&crit_);
- ssrcs_ = ssrcs;
- vie_encoder_ = encoder;
-}
-
-void EncoderStateFeedback::TearDown() {
- rtc::CritScope lock(&crit_);
- RTC_DCHECK(vie_encoder_);
- ssrcs_.clear();
- vie_encoder_ = nullptr;
}
bool EncoderStateFeedback::HasSsrc(uint32_t ssrc) {
@@ -40,41 +35,59 @@ bool EncoderStateFeedback::HasSsrc(uint32_t ssrc) {
return false;
}
+size_t EncoderStateFeedback::GetStreamIndex(uint32_t ssrc) {
+ for (size_t i = 0; i < ssrcs_.size(); ++i) {
+ if (ssrcs_[i] == ssrc)
+ return i;
+ }
+ RTC_NOTREACHED() << "Unknown ssrc " << ssrc;
+ return 0;
+}
+
void EncoderStateFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) {
- rtc::CritScope lock(&crit_);
if (!HasSsrc(ssrc))
return;
- RTC_DCHECK(vie_encoder_);
- vie_encoder_->OnReceivedIntraFrameRequest(ssrc);
+ size_t index = GetStreamIndex(ssrc);
+ {
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ rtc::CritScope lock(&crit_);
+ if (time_last_intra_request_ms_[index] + kMinKeyFrameRequestIntervalMs >
+ now_ms) {
+ return;
+ }
+ time_last_intra_request_ms_[index] = now_ms;
+ }
+
+ vie_encoder_->OnReceivedIntraFrameRequest(index);
}
void EncoderStateFeedback::OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) {
- rtc::CritScope lock(&crit_);
if (!HasSsrc(ssrc))
return;
- RTC_DCHECK(vie_encoder_);
- vie_encoder_->OnReceivedSLI(ssrc, picture_id);
+ vie_encoder_->OnReceivedSLI(picture_id);
}
void EncoderStateFeedback::OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) {
- rtc::CritScope lock(&crit_);
if (!HasSsrc(ssrc))
return;
- RTC_DCHECK(vie_encoder_);
- vie_encoder_->OnReceivedRPSI(ssrc, picture_id);
+ vie_encoder_->OnReceivedRPSI(picture_id);
}
// Sending SSRCs for this encoder should never change since they are configured
-// once and not reconfigured.
+// once and not reconfigured, however, OnLocalSsrcChanged is called when the
+// RtpModules are created with a different SSRC than what will be used in the
+// end.
+// TODO(perkj): Can we make sure the RTP module is created with the right SSRC
+// from the beginning so this method is not triggered during creation ?
void EncoderStateFeedback::OnLocalSsrcChanged(uint32_t old_ssrc,
uint32_t new_ssrc) {
if (!RTC_DCHECK_IS_ON)
return;
- rtc::CritScope lock(&crit_);
- if (ssrcs_.empty()) // Encoder not yet attached (or detached for teardown).
+
+ if (old_ssrc == 0) // old_ssrc == 0 during creation.
return;
// SSRC shouldn't change to something we haven't already registered with the
// encoder.
diff --git a/chromium/third_party/webrtc/video/encoder_state_feedback.h b/chromium/third_party/webrtc/video/encoder_state_feedback.h
index 6326bed9c13..c9fb9cc7476 100644
--- a/chromium/third_party/webrtc/video/encoder_state_feedback.h
+++ b/chromium/third_party/webrtc/video/encoder_state_feedback.h
@@ -7,10 +7,6 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
-
-// TODO(mflodman) ViEEncoder has a time check to not send key frames too often,
-// move the logic to this class.
-
#ifndef WEBRTC_VIDEO_ENCODER_STATE_FEEDBACK_H_
#define WEBRTC_VIDEO_ENCODER_STATE_FEEDBACK_H_
@@ -18,6 +14,7 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -26,28 +23,24 @@ class ViEEncoder;
class EncoderStateFeedback : public RtcpIntraFrameObserver {
public:
- EncoderStateFeedback();
-
- // Adds an encoder to receive feedback for a set of SSRCs.
- void Init(const std::vector<uint32_t>& ssrc, ViEEncoder* encoder);
-
- // Removes the registered encoder. Necessary since RTP modules outlive
- // ViEEncoder.
- // TODO(pbos): Make sure RTP modules are not running when tearing down
- // ViEEncoder, then remove this function.
- void TearDown();
-
+ EncoderStateFeedback(Clock* clock,
+ const std::vector<uint32_t>& ssrcs,
+ ViEEncoder* encoder);
void OnReceivedIntraFrameRequest(uint32_t ssrc) override;
void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id) override;
void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id) override;
void OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) override;
private:
- bool HasSsrc(uint32_t ssrc) EXCLUSIVE_LOCKS_REQUIRED(crit_);
- rtc::CriticalSection crit_;
+ bool HasSsrc(uint32_t ssrc);
+ size_t GetStreamIndex(uint32_t ssrc);
- std::vector<uint32_t> ssrcs_ GUARDED_BY(crit_);
- ViEEncoder* vie_encoder_ GUARDED_BY(crit_);
+ Clock* const clock_;
+ const std::vector<uint32_t> ssrcs_;
+ ViEEncoder* const vie_encoder_;
+
+ rtc::CriticalSection crit_;
+ std::vector<int64_t> time_last_intra_request_ms_ GUARDED_BY(crit_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/encoder_state_feedback_unittest.cc b/chromium/third_party/webrtc/video/encoder_state_feedback_unittest.cc
index 3341cf06674..7e6ec71d6eb 100644
--- a/chromium/third_party/webrtc/video/encoder_state_feedback_unittest.cc
+++ b/chromium/third_party/webrtc/video/encoder_state_feedback_unittest.cc
@@ -8,16 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
-// This file includes unit tests for EncoderStateFeedback.
#include "webrtc/video/encoder_state_feedback.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-
-#include "webrtc/modules/bitrate_controller/include/bitrate_controller.h"
-#include "webrtc/modules/pacing/paced_sender.h"
-#include "webrtc/modules/pacing/packet_router.h"
#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
#include "webrtc/video/vie_encoder.h"
@@ -27,52 +21,58 @@ namespace webrtc {
class MockVieEncoder : public ViEEncoder {
public:
- explicit MockVieEncoder(ProcessThread* process_thread, PacedSender* pacer)
- : ViEEncoder(1,
- std::vector<uint32_t>(),
- process_thread,
- nullptr,
- nullptr,
- nullptr,
- pacer,
- nullptr) {}
+ explicit MockVieEncoder(ProcessThread* process_thread)
+ : ViEEncoder(1, process_thread, nullptr, nullptr) {}
~MockVieEncoder() {}
- MOCK_METHOD1(OnReceivedIntraFrameRequest,
- void(uint32_t));
- MOCK_METHOD2(OnReceivedSLI,
- void(uint32_t ssrc, uint8_t picture_id));
- MOCK_METHOD2(OnReceivedRPSI,
- void(uint32_t ssrc, uint64_t picture_id));
+ MOCK_METHOD1(OnReceivedIntraFrameRequest, void(size_t));
+ MOCK_METHOD1(OnReceivedSLI, void(uint8_t picture_id));
+ MOCK_METHOD1(OnReceivedRPSI, void(uint64_t picture_id));
};
-TEST(VieKeyRequestTest, CreateAndTriggerRequests) {
- static const uint32_t kSsrc = 1234;
- NiceMock<MockProcessThread> process_thread;
- PacketRouter router;
- PacedSender pacer(Clock::GetRealTimeClock(), &router,
- BitrateController::kDefaultStartBitrateKbps,
- PacedSender::kDefaultPaceMultiplier *
- BitrateController::kDefaultStartBitrateKbps,
- 0);
- MockVieEncoder encoder(&process_thread, &pacer);
+class VieKeyRequestTest : public ::testing::Test {
+ public:
+ VieKeyRequestTest()
+ : encoder_(&process_thread_),
+ simulated_clock_(123456789),
+ encoder_state_feedback_(
+ &simulated_clock_,
+ std::vector<uint32_t>(1, VieKeyRequestTest::kSsrc),
+ &encoder_) {}
- EncoderStateFeedback encoder_state_feedback;
- encoder_state_feedback.Init(std::vector<uint32_t>(1, kSsrc), &encoder);
+ protected:
+ const uint32_t kSsrc = 1234;
+ NiceMock<MockProcessThread> process_thread_;
+ MockVieEncoder encoder_;
+ SimulatedClock simulated_clock_;
+ EncoderStateFeedback encoder_state_feedback_;
+};
- EXPECT_CALL(encoder, OnReceivedIntraFrameRequest(kSsrc))
- .Times(1);
- encoder_state_feedback.OnReceivedIntraFrameRequest(kSsrc);
+TEST_F(VieKeyRequestTest, CreateAndTriggerRequests) {
+ EXPECT_CALL(encoder_, OnReceivedIntraFrameRequest(0)).Times(1);
+ encoder_state_feedback_.OnReceivedIntraFrameRequest(kSsrc);
const uint8_t sli_picture_id = 3;
- EXPECT_CALL(encoder, OnReceivedSLI(kSsrc, sli_picture_id))
- .Times(1);
- encoder_state_feedback.OnReceivedSLI(kSsrc, sli_picture_id);
+ EXPECT_CALL(encoder_, OnReceivedSLI(sli_picture_id)).Times(1);
+ encoder_state_feedback_.OnReceivedSLI(kSsrc, sli_picture_id);
const uint64_t rpsi_picture_id = 9;
- EXPECT_CALL(encoder, OnReceivedRPSI(kSsrc, rpsi_picture_id))
- .Times(1);
- encoder_state_feedback.OnReceivedRPSI(kSsrc, rpsi_picture_id);
+ EXPECT_CALL(encoder_, OnReceivedRPSI(rpsi_picture_id)).Times(1);
+ encoder_state_feedback_.OnReceivedRPSI(kSsrc, rpsi_picture_id);
+}
+
+TEST_F(VieKeyRequestTest, TooManyOnReceivedIntraFrameRequest) {
+ EXPECT_CALL(encoder_, OnReceivedIntraFrameRequest(0)).Times(1);
+ encoder_state_feedback_.OnReceivedIntraFrameRequest(kSsrc);
+ encoder_state_feedback_.OnReceivedIntraFrameRequest(kSsrc);
+ simulated_clock_.AdvanceTimeMilliseconds(10);
+ encoder_state_feedback_.OnReceivedIntraFrameRequest(kSsrc);
+
+ EXPECT_CALL(encoder_, OnReceivedIntraFrameRequest(0)).Times(1);
+ simulated_clock_.AdvanceTimeMilliseconds(300);
+ encoder_state_feedback_.OnReceivedIntraFrameRequest(kSsrc);
+ encoder_state_feedback_.OnReceivedIntraFrameRequest(kSsrc);
+ encoder_state_feedback_.OnReceivedIntraFrameRequest(kSsrc);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/end_to_end_tests.cc b/chromium/third_party/webrtc/video/end_to_end_tests.cc
index e000b3f8b5f..b60d2a629a4 100644
--- a/chromium/third_party/webrtc/video/end_to_end_tests.cc
+++ b/chromium/third_party/webrtc/video/end_to_end_tests.cc
@@ -19,10 +19,9 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/event.h"
-#include "webrtc/base/timeutils.h"
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
-#include "webrtc/frame_callback.h"
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
@@ -266,107 +265,103 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
DestroyStreams();
}
-TEST_F(EndToEndTest, SendsAndReceivesVP9) {
- class VP9Observer : public test::EndToEndTest,
+class CodecObserver : public test::EndToEndTest,
public rtc::VideoSinkInterface<VideoFrame> {
- public:
- VP9Observer()
- : EndToEndTest(2 * kDefaultTimeoutMs),
- encoder_(VideoEncoder::Create(VideoEncoder::kVp9)),
- decoder_(VP9Decoder::Create()),
- frame_counter_(0) {}
+ public:
+ CodecObserver(int no_frames_to_wait_for,
+ VideoRotation rotation_to_test,
+ const std::string& payload_name,
+ webrtc::VideoEncoder* encoder,
+ webrtc::VideoDecoder* decoder)
+ : EndToEndTest(2 * webrtc::EndToEndTest::kDefaultTimeoutMs),
+ no_frames_to_wait_for_(no_frames_to_wait_for),
+ expected_rotation_(rotation_to_test),
+ payload_name_(payload_name),
+ encoder_(encoder),
+ decoder_(decoder),
+ frame_counter_(0) {}
- void PerformTest() override {
- EXPECT_TRUE(Wait())
- << "Timed out while waiting for enough frames to be decoded.";
- }
+ void PerformTest() override {
+ EXPECT_TRUE(Wait())
+ << "Timed out while waiting for enough frames to be decoded.";
+ }
- void ModifyVideoConfigs(
- VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
- send_config->encoder_settings.encoder = encoder_.get();
- send_config->encoder_settings.payload_name = "VP9";
- send_config->encoder_settings.payload_type = 124;
- encoder_config->streams[0].min_bitrate_bps = 50000;
- encoder_config->streams[0].target_bitrate_bps =
- encoder_config->streams[0].max_bitrate_bps = 2000000;
+ void ModifyVideoConfigs(
+ VideoSendStream::Config* send_config,
+ std::vector<VideoReceiveStream::Config>* receive_configs,
+ VideoEncoderConfig* encoder_config) override {
+ send_config->encoder_settings.encoder = encoder_.get();
+ send_config->encoder_settings.payload_name = payload_name_;
+ send_config->encoder_settings.payload_type = 126;
+ encoder_config->streams[0].min_bitrate_bps = 50000;
+ encoder_config->streams[0].target_bitrate_bps =
+ encoder_config->streams[0].max_bitrate_bps = 2000000;
+
+ (*receive_configs)[0].renderer = this;
+ (*receive_configs)[0].decoders.resize(1);
+ (*receive_configs)[0].decoders[0].payload_type =
+ send_config->encoder_settings.payload_type;
+ (*receive_configs)[0].decoders[0].payload_name =
+ send_config->encoder_settings.payload_name;
+ (*receive_configs)[0].decoders[0].decoder = decoder_.get();
+ }
- (*receive_configs)[0].renderer = this;
- (*receive_configs)[0].decoders.resize(1);
- (*receive_configs)[0].decoders[0].payload_type =
- send_config->encoder_settings.payload_type;
- (*receive_configs)[0].decoders[0].payload_name =
- send_config->encoder_settings.payload_name;
- (*receive_configs)[0].decoders[0].decoder = decoder_.get();
- }
+ void OnFrame(const VideoFrame& video_frame) override {
+ EXPECT_EQ(expected_rotation_, video_frame.rotation());
+ if (++frame_counter_ == no_frames_to_wait_for_)
+ observation_complete_.Set();
+ }
- void OnFrame(const VideoFrame& video_frame) override {
- const int kRequiredFrames = 500;
- if (++frame_counter_ == kRequiredFrames)
- observation_complete_.Set();
- }
+ void OnFrameGeneratorCapturerCreated(
+ test::FrameGeneratorCapturer* frame_generator_capturer) override {
+ frame_generator_capturer->SetFakeRotation(expected_rotation_);
+ }
- private:
- std::unique_ptr<webrtc::VideoEncoder> encoder_;
- std::unique_ptr<webrtc::VideoDecoder> decoder_;
- int frame_counter_;
- } test;
+ private:
+ int no_frames_to_wait_for_;
+ VideoRotation expected_rotation_;
+ std::string payload_name_;
+ std::unique_ptr<webrtc::VideoEncoder> encoder_;
+ std::unique_ptr<webrtc::VideoDecoder> decoder_;
+ int frame_counter_;
+};
+TEST_F(EndToEndTest, SendsAndReceivesVP8Rotation90) {
+ CodecObserver test(5, kVideoRotation_90, "VP8",
+ VideoEncoder::Create(VideoEncoder::kVp8),
+ VP8Decoder::Create());
RunBaseTest(&test);
}
-#if defined(WEBRTC_END_TO_END_H264_TESTS)
-
-TEST_F(EndToEndTest, SendsAndReceivesH264) {
- class H264Observer : public test::EndToEndTest,
- public rtc::VideoSinkInterface<VideoFrame> {
- public:
- H264Observer()
- : EndToEndTest(2 * kDefaultTimeoutMs),
- encoder_(VideoEncoder::Create(VideoEncoder::kH264)),
- decoder_(H264Decoder::Create()),
- frame_counter_(0) {}
-
- void PerformTest() override {
- EXPECT_TRUE(Wait())
- << "Timed out while waiting for enough frames to be decoded.";
- }
-
- void ModifyVideoConfigs(
- VideoSendStream::Config* send_config,
- std::vector<VideoReceiveStream::Config>* receive_configs,
- VideoEncoderConfig* encoder_config) override {
- send_config->rtp.nack.rtp_history_ms =
- (*receive_configs)[0].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
- send_config->encoder_settings.encoder = encoder_.get();
- send_config->encoder_settings.payload_name = "H264";
- send_config->encoder_settings.payload_type = 126;
- encoder_config->streams[0].min_bitrate_bps = 50000;
- encoder_config->streams[0].target_bitrate_bps =
- encoder_config->streams[0].max_bitrate_bps = 2000000;
+#if !defined(RTC_DISABLE_VP9)
+TEST_F(EndToEndTest, SendsAndReceivesVP9) {
+ CodecObserver test(500, kVideoRotation_0, "VP9",
+ VideoEncoder::Create(VideoEncoder::kVp9),
+ VP9Decoder::Create());
+ RunBaseTest(&test);
+}
- (*receive_configs)[0].renderer = this;
- (*receive_configs)[0].decoders.resize(1);
- (*receive_configs)[0].decoders[0].payload_type =
- send_config->encoder_settings.payload_type;
- (*receive_configs)[0].decoders[0].payload_name =
- send_config->encoder_settings.payload_name;
- (*receive_configs)[0].decoders[0].decoder = decoder_.get();
- }
+TEST_F(EndToEndTest, SendsAndReceivesVP9VideoRotation90) {
+ CodecObserver test(5, kVideoRotation_90, "VP9",
+ VideoEncoder::Create(VideoEncoder::kVp9),
+ VP9Decoder::Create());
+ RunBaseTest(&test);
+}
+#endif // !defined(RTC_DISABLE_VP9)
- void OnFrame(const VideoFrame& video_frame) override {
- const int kRequiredFrames = 500;
- if (++frame_counter_ == kRequiredFrames)
- observation_complete_.Set();
- }
+#if defined(WEBRTC_END_TO_END_H264_TESTS)
- private:
- std::unique_ptr<webrtc::VideoEncoder> encoder_;
- std::unique_ptr<webrtc::VideoDecoder> decoder_;
- int frame_counter_;
- } test;
+TEST_F(EndToEndTest, SendsAndReceivesH264) {
+ CodecObserver test(500, kVideoRotation_0, "H264",
+ VideoEncoder::Create(VideoEncoder::kH264),
+ H264Decoder::Create());
+ RunBaseTest(&test);
+}
+TEST_F(EndToEndTest, SendsAndReceivesH264VideoRotation90) {
+ CodecObserver test(5, kVideoRotation_90, "H264",
+ VideoEncoder::Create(VideoEncoder::kH264),
+ H264Decoder::Create());
RunBaseTest(&test);
}
@@ -864,107 +859,6 @@ TEST_F(EndToEndTest, DecodesRetransmittedFrameByRedOverRtx) {
DecodesRetransmittedFrame(true, true);
}
-TEST_F(EndToEndTest, UsesFrameCallbacks) {
- static const int kWidth = 320;
- static const int kHeight = 240;
-
- class Renderer : public rtc::VideoSinkInterface<VideoFrame> {
- public:
- Renderer() : event_(false, false) {}
-
- void OnFrame(const VideoFrame& video_frame) override {
- EXPECT_EQ(0, *video_frame.buffer(kYPlane))
- << "Rendered frame should have zero luma which is applied by the "
- "pre-render callback.";
- event_.Set();
- }
-
- bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
- rtc::Event event_;
- } renderer;
-
- class TestFrameCallback : public I420FrameCallback {
- public:
- TestFrameCallback(int expected_luma_byte, int next_luma_byte)
- : event_(false, false),
- expected_luma_byte_(expected_luma_byte),
- next_luma_byte_(next_luma_byte) {}
-
- bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
-
- private:
- virtual void FrameCallback(VideoFrame* frame) {
- EXPECT_EQ(kWidth, frame->width())
- << "Width not as expected, callback done before resize?";
- EXPECT_EQ(kHeight, frame->height())
- << "Height not as expected, callback done before resize?";
-
- // Previous luma specified, observed luma should be fairly close.
- if (expected_luma_byte_ != -1) {
- EXPECT_NEAR(expected_luma_byte_, *frame->buffer(kYPlane), 10);
- }
-
- memset(frame->buffer(kYPlane),
- next_luma_byte_,
- frame->allocated_size(kYPlane));
-
- event_.Set();
- }
-
- rtc::Event event_;
- int expected_luma_byte_;
- int next_luma_byte_;
- };
-
- TestFrameCallback pre_encode_callback(-1, 255); // Changes luma to 255.
- TestFrameCallback pre_render_callback(255, 0); // Changes luma from 255 to 0.
-
- CreateCalls(Call::Config(), Call::Config());
-
- test::DirectTransport sender_transport(sender_call_.get());
- test::DirectTransport receiver_transport(receiver_call_.get());
- sender_transport.SetReceiver(receiver_call_->Receiver());
- receiver_transport.SetReceiver(sender_call_->Receiver());
-
- CreateSendConfig(1, 0, &sender_transport);
- std::unique_ptr<VideoEncoder> encoder(
- VideoEncoder::Create(VideoEncoder::kVp8));
- video_send_config_.encoder_settings.encoder = encoder.get();
- video_send_config_.encoder_settings.payload_name = "VP8";
- ASSERT_EQ(1u, video_encoder_config_.streams.size()) << "Test setup error.";
- video_encoder_config_.streams[0].width = kWidth;
- video_encoder_config_.streams[0].height = kHeight;
- video_send_config_.pre_encode_callback = &pre_encode_callback;
-
- CreateMatchingReceiveConfigs(&receiver_transport);
- video_receive_configs_[0].pre_render_callback = &pre_render_callback;
- video_receive_configs_[0].renderer = &renderer;
-
- CreateVideoStreams();
- Start();
-
- // Create frames that are smaller than the send width/height, this is done to
- // check that the callbacks are done after processing video.
- std::unique_ptr<test::FrameGenerator> frame_generator(
- test::FrameGenerator::CreateChromaGenerator(kWidth / 2, kHeight / 2));
- video_send_stream_->Input()->IncomingCapturedFrame(
- *frame_generator->NextFrame());
-
- EXPECT_TRUE(pre_encode_callback.Wait())
- << "Timed out while waiting for pre-encode callback.";
- EXPECT_TRUE(pre_render_callback.Wait())
- << "Timed out while waiting for pre-render callback.";
- EXPECT_TRUE(renderer.Wait())
- << "Timed out while waiting for the frame to render.";
-
- Stop();
-
- sender_transport.StopSending();
- receiver_transport.StopSending();
-
- DestroyStreams();
-}
-
void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
static const int kPacketsToDrop = 1;
@@ -1872,7 +1766,7 @@ TEST_F(EndToEndTest, RembWithSendSideBwe) {
~BweObserver() {}
- test::PacketTransport* CreateReceiveTransport() {
+ test::PacketTransport* CreateReceiveTransport() override {
receive_transport_ = new test::PacketTransport(
nullptr, this, test::PacketTransport::kReceiver,
FakeNetworkPipe::Config());
@@ -2254,6 +2148,9 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
"WebRTC.Video.DecodedFramesPerSecond"));
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.RenderFramesPerSecond"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.JitterBufferDelayInMs"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.TargetDelayInMs"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.CurrentDelayInMs"));
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.OnewayDelayInMs"));
EXPECT_EQ(
1, test::NumHistogramSamples("WebRTC.Video.RenderSqrtPixelsPerSecond"));
@@ -2280,6 +2177,7 @@ void EndToEndTest::VerifyHistogramStats(bool use_rtx,
EXPECT_EQ(1, test::NumHistogramSamples(video_prefix + "SendSideDelayInMs"));
EXPECT_EQ(1,
test::NumHistogramSamples(video_prefix + "SendSideDelayMaxInMs"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.SendDelayInMs"));
int num_rtx_samples = use_rtx ? 1 : 0;
EXPECT_EQ(num_rtx_samples, test::NumHistogramSamples(
@@ -2352,7 +2250,7 @@ void EndToEndTest::TestXrReceiverReferenceTimeReport(bool enable_rrtr) {
return SEND_PACKET;
}
// Send stream should send SR packets (and DLRR packets if enabled).
- virtual Action OnSendRtcp(const uint8_t* packet, size_t length) {
+ Action OnSendRtcp(const uint8_t* packet, size_t length) override {
rtc::CritScope lock(&crit_);
RTCPUtility::RTCPParserV2 parser(packet, length, true);
EXPECT_TRUE(parser.IsValid());
@@ -2571,7 +2469,8 @@ TEST_F(EndToEndTest, ReportsSetEncoderRates) {
TEST_F(EndToEndTest, GetStats) {
static const int kStartBitrateBps = 3000000;
static const int kExpectedRenderDelayMs = 20;
- class StatsObserver : public test::EndToEndTest, public I420FrameCallback {
+ class StatsObserver : public test::EndToEndTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
public:
StatsObserver()
: EndToEndTest(kLongTimeoutMs),
@@ -2601,11 +2500,9 @@ TEST_F(EndToEndTest, GetStats) {
return SEND_PACKET;
}
- void FrameCallback(VideoFrame* video_frame) override {
+ void OnFrame(const VideoFrame& video_frame) override {
// Ensure that we have at least 5ms send side delay.
- int64_t render_time = video_frame->render_time_ms();
- if (render_time > 0)
- video_frame->set_render_time_ms(render_time - 5);
+ SleepMs(5);
}
bool CheckReceiveStats() {
@@ -3038,7 +2935,9 @@ void EndToEndTest::TestRtpStatePreservation(bool use_rtx) {
} else {
// Verify timestamps are reasonably close.
uint32_t latest_observed = timestamp_it->second;
- int32_t timestamp_gap = rtc::TimeDiff(timestamp, latest_observed);
+ // Wraparound handling is unnecessary here as long as an int variable
+ // is used to store the result.
+ int32_t timestamp_gap = timestamp - latest_observed;
EXPECT_LE(std::abs(timestamp_gap), kMaxTimestampGap)
<< "Gap in timestamps (" << latest_observed << " -> "
<< timestamp << ") too large for SSRC: " << ssrc << ".";
@@ -3442,6 +3341,13 @@ TEST_F(EndToEndTest, NewVideoSendStreamsRespectVideoNetworkDown) {
class UnusedEncoder : public test::FakeEncoder {
public:
UnusedEncoder() : FakeEncoder(Clock::GetRealTimeClock()) {}
+
+ int32_t InitEncode(const VideoCodec* config,
+ int32_t number_of_cores,
+ size_t max_payload_size) override {
+ EXPECT_GT(config->startBitrate, 0u);
+ return 0;
+ }
int32_t Encode(const VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<FrameType>* frame_types) override {
diff --git a/chromium/third_party/webrtc/video/full_stack.cc b/chromium/third_party/webrtc/video/full_stack.cc
index 8a97ba8a9a1..0399ed2ed73 100644
--- a/chromium/third_party/webrtc/video/full_stack.cc
+++ b/chromium/third_party/webrtc/video/full_stack.cc
@@ -56,6 +56,7 @@ class FullStackTest : public VideoQualityTest {
// logs // bool
// };
+#if !defined(RTC_DISABLE_VP9)
TEST_F(FullStackTest, ForemanCifWithoutPacketLossVp9) {
ForemanCifWithoutPacketLoss("VP9");
}
@@ -63,6 +64,7 @@ TEST_F(FullStackTest, ForemanCifWithoutPacketLossVp9) {
TEST_F(FullStackTest, ForemanCifPlr5Vp9) {
ForemanCifPlr5("VP9");
}
+#endif // !defined(RTC_DISABLE_VP9)
TEST_F(FullStackTest, ParisQcifWithoutPacketLoss) {
VideoQualityTest::Params paris_qcif = {
@@ -200,6 +202,7 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) {
RunTest(screenshare);
}
+#if !defined(RTC_DISABLE_VP9)
TEST_F(FullStackTest, ScreenshareSlidesVP9_2SL) {
VideoQualityTest::Params screenshare = {
{1850, 1110, 5, 50000, 200000, 2000000, "VP9", 1, 0, 400000},
@@ -211,4 +214,5 @@ TEST_F(FullStackTest, ScreenshareSlidesVP9_2SL) {
{std::vector<VideoStream>(), 0, 2, 1}};
RunTest(screenshare);
}
+#endif // !defined(RTC_DISABLE_VP9)
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/overuse_frame_detector.cc b/chromium/third_party/webrtc/video/overuse_frame_detector.cc
index 1ce97771ee1..8498008f974 100644
--- a/chromium/third_party/webrtc/video/overuse_frame_detector.cc
+++ b/chromium/third_party/webrtc/video/overuse_frame_detector.cc
@@ -20,13 +20,13 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/exp_filter.h"
#include "webrtc/base/logging.h"
-#include "webrtc/frame_callback.h"
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video_frame.h"
-#if defined(WEBRTC_MAC)
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
#include <mach/mach.h>
-#endif
+#endif // defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
namespace webrtc {
@@ -56,7 +56,7 @@ CpuOveruseOptions::CpuOveruseOptions()
min_frame_samples(120),
min_process_count(3),
high_threshold_consecutive_count(2) {
-#if defined(WEBRTC_MAC)
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
// This is proof-of-concept code for letting the physical core count affect
// the interval into which we attempt to scale. For now, the code is Mac OS
// specific, since that's the platform were we saw most problems.
@@ -90,8 +90,8 @@ CpuOveruseOptions::CpuOveruseOptions()
high_encode_usage_threshold_percent = 20; // Roughly 1/4 of 100%.
else if (n_physical_cores == 2)
high_encode_usage_threshold_percent = 40; // Roughly 1/4 of 200%.
+#endif // defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
-#endif // WEBRTC_MAC
// Note that we make the interval 2x+epsilon wide, since libyuv scaling steps
// are close to that (when squared). This wide interval makes sure that
// scaling up or down does not jump all the way across the interval.
diff --git a/chromium/third_party/webrtc/video/overuse_frame_detector_unittest.cc b/chromium/third_party/webrtc/video/overuse_frame_detector_unittest.cc
index 06cff38bf61..67d05325e3f 100644
--- a/chromium/third_party/webrtc/video/overuse_frame_detector_unittest.cc
+++ b/chromium/third_party/webrtc/video/overuse_frame_detector_unittest.cc
@@ -53,7 +53,7 @@ class CpuOveruseObserverImpl : public CpuOveruseObserver {
class OveruseFrameDetectorTest : public ::testing::Test,
public CpuOveruseMetricsObserver {
protected:
- virtual void SetUp() {
+ void SetUp() override {
clock_.reset(new SimulatedClock(1234));
observer_.reset(new MockCpuOveruseObserver());
options_.min_process_count = 0;
diff --git a/chromium/third_party/webrtc/video/payload_router.cc b/chromium/third_party/webrtc/video/payload_router.cc
index 968d82df62a..3be5882cdbf 100644
--- a/chromium/third_party/webrtc/video/payload_router.cc
+++ b/chromium/third_party/webrtc/video/payload_router.cc
@@ -13,11 +13,88 @@
#include "webrtc/base/checks.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
namespace webrtc {
-PayloadRouter::PayloadRouter()
- : active_(false), num_sending_modules_(0) {}
+namespace {
+// Map information from info into rtp.
+void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader* rtp) {
+ RTC_DCHECK(info);
+ switch (info->codecType) {
+ case kVideoCodecVP8: {
+ rtp->codec = kRtpVideoVp8;
+ rtp->codecHeader.VP8.InitRTPVideoHeaderVP8();
+ rtp->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId;
+ rtp->codecHeader.VP8.nonReference = info->codecSpecific.VP8.nonReference;
+ rtp->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx;
+ rtp->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync;
+ rtp->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx;
+ rtp->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx;
+ rtp->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
+ return;
+ }
+ case kVideoCodecVP9: {
+ rtp->codec = kRtpVideoVp9;
+ rtp->codecHeader.VP9.InitRTPVideoHeaderVP9();
+ rtp->codecHeader.VP9.inter_pic_predicted =
+ info->codecSpecific.VP9.inter_pic_predicted;
+ rtp->codecHeader.VP9.flexible_mode =
+ info->codecSpecific.VP9.flexible_mode;
+ rtp->codecHeader.VP9.ss_data_available =
+ info->codecSpecific.VP9.ss_data_available;
+ rtp->codecHeader.VP9.picture_id = info->codecSpecific.VP9.picture_id;
+ rtp->codecHeader.VP9.tl0_pic_idx = info->codecSpecific.VP9.tl0_pic_idx;
+ rtp->codecHeader.VP9.temporal_idx = info->codecSpecific.VP9.temporal_idx;
+ rtp->codecHeader.VP9.spatial_idx = info->codecSpecific.VP9.spatial_idx;
+ rtp->codecHeader.VP9.temporal_up_switch =
+ info->codecSpecific.VP9.temporal_up_switch;
+ rtp->codecHeader.VP9.inter_layer_predicted =
+ info->codecSpecific.VP9.inter_layer_predicted;
+ rtp->codecHeader.VP9.gof_idx = info->codecSpecific.VP9.gof_idx;
+ rtp->codecHeader.VP9.num_spatial_layers =
+ info->codecSpecific.VP9.num_spatial_layers;
+
+ if (info->codecSpecific.VP9.ss_data_available) {
+ rtp->codecHeader.VP9.spatial_layer_resolution_present =
+ info->codecSpecific.VP9.spatial_layer_resolution_present;
+ if (info->codecSpecific.VP9.spatial_layer_resolution_present) {
+ for (size_t i = 0; i < info->codecSpecific.VP9.num_spatial_layers;
+ ++i) {
+ rtp->codecHeader.VP9.width[i] = info->codecSpecific.VP9.width[i];
+ rtp->codecHeader.VP9.height[i] = info->codecSpecific.VP9.height[i];
+ }
+ }
+ rtp->codecHeader.VP9.gof.CopyGofInfoVP9(info->codecSpecific.VP9.gof);
+ }
+
+ rtp->codecHeader.VP9.num_ref_pics = info->codecSpecific.VP9.num_ref_pics;
+ for (int i = 0; i < info->codecSpecific.VP9.num_ref_pics; ++i)
+ rtp->codecHeader.VP9.pid_diff[i] = info->codecSpecific.VP9.p_diff[i];
+ return;
+ }
+ case kVideoCodecH264:
+ rtp->codec = kRtpVideoH264;
+ return;
+ case kVideoCodecGeneric:
+ rtp->codec = kRtpVideoGeneric;
+ rtp->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
+ return;
+ default:
+ return;
+ }
+}
+
+} // namespace
+
+PayloadRouter::PayloadRouter(const std::vector<RtpRtcp*>& rtp_modules,
+ int payload_type)
+ : active_(false),
+ num_sending_modules_(1),
+ rtp_modules_(rtp_modules),
+ payload_type_(payload_type) {
+ UpdateModuleSendingState();
+}
PayloadRouter::~PayloadRouter() {}
@@ -26,12 +103,6 @@ size_t PayloadRouter::DefaultMaxPayloadLength() {
return IP_PACKET_SIZE - kIpUdpSrtpLength;
}
-void PayloadRouter::Init(
- const std::vector<RtpRtcp*>& rtp_modules) {
- RTC_DCHECK(rtp_modules_.empty());
- rtp_modules_ = rtp_modules;
-}
-
void PayloadRouter::set_active(bool active) {
rtc::CritScope lock(&crit_);
if (active_ == active)
@@ -45,10 +116,12 @@ bool PayloadRouter::active() {
return active_ && !rtp_modules_.empty();
}
-void PayloadRouter::SetSendingRtpModules(size_t num_sending_modules) {
- RTC_DCHECK_LE(num_sending_modules, rtp_modules_.size());
+void PayloadRouter::SetSendStreams(const std::vector<VideoStream>& streams) {
+ RTC_DCHECK_LE(streams.size(), rtp_modules_.size());
rtc::CritScope lock(&crit_);
- num_sending_modules_ = num_sending_modules;
+ num_sending_modules_ = streams.size();
+ streams_ = streams;
+ // TODO(perkj): Should SetSendStreams also call SetTargetSendBitrate?
UpdateModuleSendingState();
}
@@ -64,39 +137,51 @@ void PayloadRouter::UpdateModuleSendingState() {
}
}
-bool PayloadRouter::RoutePayload(FrameType frame_type,
- int8_t payload_type,
- uint32_t time_stamp,
- int64_t capture_time_ms,
- const uint8_t* payload_data,
- size_t payload_length,
- const RTPFragmentationHeader* fragmentation,
- const RTPVideoHeader* rtp_video_hdr) {
+int32_t PayloadRouter::Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) {
rtc::CritScope lock(&crit_);
RTC_DCHECK(!rtp_modules_.empty());
if (!active_ || num_sending_modules_ == 0)
- return false;
+ return -1;
int stream_idx = 0;
- if (rtp_video_hdr) {
- RTC_DCHECK_LT(rtp_video_hdr->simulcastIdx, rtp_modules_.size());
- // The simulcast index might actually be larger than the number of modules
- // in case the encoder was processing a frame during a codec reconfig.
- if (rtp_video_hdr->simulcastIdx >= num_sending_modules_)
- return false;
- stream_idx = rtp_video_hdr->simulcastIdx;
- }
+
+ RTPVideoHeader rtp_video_header;
+ memset(&rtp_video_header, 0, sizeof(RTPVideoHeader));
+ if (codec_specific_info)
+ CopyCodecSpecific(codec_specific_info, &rtp_video_header);
+ rtp_video_header.rotation = encoded_image.rotation_;
+
+ RTC_DCHECK_LT(rtp_video_header.simulcastIdx, rtp_modules_.size());
+ // The simulcast index might actually be larger than the number of modules
+ // in case the encoder was processing a frame during a codec reconfig.
+ if (rtp_video_header.simulcastIdx >= num_sending_modules_)
+ return -1;
+ stream_idx = rtp_video_header.simulcastIdx;
+
return rtp_modules_[stream_idx]->SendOutgoingData(
- frame_type, payload_type, time_stamp, capture_time_ms, payload_data,
- payload_length, fragmentation, rtp_video_hdr) == 0 ? true : false;
+ encoded_image._frameType, payload_type_, encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, encoded_image._buffer,
+ encoded_image._length, fragmentation, &rtp_video_header);
}
-void PayloadRouter::SetTargetSendBitrates(
- const std::vector<uint32_t>& stream_bitrates) {
+void PayloadRouter::SetTargetSendBitrate(uint32_t bitrate_bps) {
rtc::CritScope lock(&crit_);
- RTC_DCHECK_LE(stream_bitrates.size(), rtp_modules_.size());
- for (size_t i = 0; i < stream_bitrates.size(); ++i) {
- rtp_modules_[i]->SetTargetSendBitrate(stream_bitrates[i]);
+ RTC_DCHECK_LE(streams_.size(), rtp_modules_.size());
+
+ // TODO(sprang): Rebase https://codereview.webrtc.org/1913073002/ on top of
+ // this.
+ int bitrate_remainder = bitrate_bps;
+ for (size_t i = 0; i < streams_.size() && bitrate_remainder > 0; ++i) {
+ int stream_bitrate = 0;
+ if (streams_[i].max_bitrate_bps > bitrate_remainder) {
+ stream_bitrate = bitrate_remainder;
+ } else {
+ stream_bitrate = streams_[i].max_bitrate_bps;
+ }
+ bitrate_remainder -= stream_bitrate;
+ rtp_modules_[i]->SetTargetSendBitrate(stream_bitrate);
}
}
diff --git a/chromium/third_party/webrtc/video/payload_router.h b/chromium/third_party/webrtc/video/payload_router.h
index 9eaf7163224..ce65bae6f81 100644
--- a/chromium/third_party/webrtc/video/payload_router.h
+++ b/chromium/third_party/webrtc/video/payload_router.h
@@ -17,6 +17,8 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
+#include "webrtc/config.h"
+#include "webrtc/video_encoder.h"
#include "webrtc/system_wrappers/include/atomic32.h"
namespace webrtc {
@@ -27,37 +29,29 @@ struct RTPVideoHeader;
// PayloadRouter routes outgoing data to the correct sending RTP module, based
// on the simulcast layer in RTPVideoHeader.
-class PayloadRouter {
+class PayloadRouter : public EncodedImageCallback {
public:
- PayloadRouter();
+ // Rtp modules are assumed to be sorted in simulcast index order.
+ explicit PayloadRouter(const std::vector<RtpRtcp*>& rtp_modules,
+ int payload_type);
~PayloadRouter();
static size_t DefaultMaxPayloadLength();
-
- // Rtp modules are assumed to be sorted in simulcast index order.
- void Init(const std::vector<RtpRtcp*>& rtp_modules);
-
- void SetSendingRtpModules(size_t num_sending_modules);
+ void SetSendStreams(const std::vector<VideoStream>& streams);
// PayloadRouter will only route packets if being active, all packets will be
// dropped otherwise.
void set_active(bool active);
bool active();
- // Input parameters according to the signature of RtpRtcp::SendOutgoingData.
- // Returns true if the packet was routed / sent, false otherwise.
- bool RoutePayload(FrameType frame_type,
- int8_t payload_type,
- uint32_t time_stamp,
- int64_t capture_time_ms,
- const uint8_t* payload_data,
- size_t payload_size,
- const RTPFragmentationHeader* fragmentation,
- const RTPVideoHeader* rtp_video_hdr);
-
- // Configures current target bitrate per module. 'stream_bitrates' is assumed
- // to be in the same order as 'SetSendingRtpModules'.
- void SetTargetSendBitrates(const std::vector<uint32_t>& stream_bitrates);
+ // Implements EncodedImageCallback.
+ // Returns 0 if the packet was routed / sent, -1 otherwise.
+ int32_t Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) override;
+
+ // Configures current target bitrate.
+ void SetTargetSendBitrate(uint32_t bitrate_bps);
// Returns the maximum allowed data payload length, given the configured MTU
// and RTP headers.
@@ -66,13 +60,15 @@ class PayloadRouter {
private:
void UpdateModuleSendingState() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- // TODO(pbos): Set once and for all on construction and make const.
- std::vector<RtpRtcp*> rtp_modules_;
-
rtc::CriticalSection crit_;
bool active_ GUARDED_BY(crit_);
+ std::vector<VideoStream> streams_ GUARDED_BY(crit_);
size_t num_sending_modules_ GUARDED_BY(crit_);
+ // Rtp modules are assumed to be sorted in simulcast index order. Not owned.
+ const std::vector<RtpRtcp*> rtp_modules_;
+ const int payload_type_;
+
RTC_DISALLOW_COPY_AND_ASSIGN(PayloadRouter);
};
diff --git a/chromium/third_party/webrtc/video/payload_router_unittest.cc b/chromium/third_party/webrtc/video/payload_router_unittest.cc
index 5b3dc9340a5..5b6612124c2 100644
--- a/chromium/third_party/webrtc/video/payload_router_unittest.cc
+++ b/chromium/third_party/webrtc/video/payload_router_unittest.cc
@@ -14,6 +14,7 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/video/payload_router.h"
using ::testing::_;
@@ -23,135 +24,148 @@ using ::testing::Return;
namespace webrtc {
-class PayloadRouterTest : public ::testing::Test {
- protected:
- virtual void SetUp() {
- payload_router_.reset(new PayloadRouter());
- }
- std::unique_ptr<PayloadRouter> payload_router_;
-};
-
-TEST_F(PayloadRouterTest, SendOnOneModule) {
- MockRtpRtcp rtp;
+TEST(PayloadRouterTest, SendOnOneModule) {
+ NiceMock<MockRtpRtcp> rtp;
std::vector<RtpRtcp*> modules(1, &rtp);
-
- payload_router_->Init(modules);
- payload_router_->SetSendingRtpModules(modules.size());
+ std::vector<VideoStream> streams(1);
uint8_t payload = 'a';
- FrameType frame_type = kVideoFrameKey;
int8_t payload_type = 96;
-
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1,
- nullptr, nullptr))
+ EncodedImage encoded_image;
+ encoded_image._timeStamp = 1;
+ encoded_image.capture_time_ms_ = 2;
+ encoded_image._frameType = kVideoFrameKey;
+ encoded_image._buffer = &payload;
+ encoded_image._length = 1;
+
+ PayloadRouter payload_router(modules, payload_type);
+ payload_router.SetSendStreams(streams);
+
+ EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
+ encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, &payload,
+ encoded_image._length, nullptr, _))
.Times(0);
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, nullptr, nullptr));
+ EXPECT_EQ(-1, payload_router.Encoded(encoded_image, nullptr, nullptr));
- payload_router_->set_active(true);
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1,
- nullptr, nullptr))
+ payload_router.set_active(true);
+ EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
+ encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, &payload,
+ encoded_image._length, nullptr, _))
.Times(1);
- EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, nullptr, nullptr));
+ EXPECT_EQ(0, payload_router.Encoded(encoded_image, nullptr, nullptr));
- payload_router_->set_active(false);
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1,
- nullptr, nullptr))
+ payload_router.set_active(false);
+ EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
+ encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, &payload,
+ encoded_image._length, nullptr, _))
.Times(0);
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, nullptr, nullptr));
+ EXPECT_EQ(-1, payload_router.Encoded(encoded_image, nullptr, nullptr));
- payload_router_->set_active(true);
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1,
- nullptr, nullptr))
+ payload_router.set_active(true);
+ EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
+ encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, &payload,
+ encoded_image._length, nullptr, _))
.Times(1);
- EXPECT_TRUE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, nullptr, nullptr));
-
- payload_router_->SetSendingRtpModules(0);
- EXPECT_CALL(rtp, SendOutgoingData(frame_type, payload_type, 0, 0, _, 1,
- nullptr, nullptr))
+ EXPECT_EQ(0, payload_router.Encoded(encoded_image, nullptr, nullptr));
+
+ streams.clear();
+ payload_router.SetSendStreams(streams);
+ EXPECT_CALL(rtp, SendOutgoingData(encoded_image._frameType, payload_type,
+ encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, &payload,
+ encoded_image._length, nullptr, _))
.Times(0);
- EXPECT_FALSE(payload_router_->RoutePayload(frame_type, payload_type, 0, 0,
- &payload, 1, nullptr, nullptr));
+ EXPECT_EQ(-1, payload_router.Encoded(encoded_image, nullptr, nullptr));
}
-TEST_F(PayloadRouterTest, SendSimulcast) {
- MockRtpRtcp rtp_1;
- MockRtpRtcp rtp_2;
+TEST(PayloadRouterTest, SendSimulcast) {
+ NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcp> rtp_2;
std::vector<RtpRtcp*> modules;
modules.push_back(&rtp_1);
modules.push_back(&rtp_2);
+ std::vector<VideoStream> streams(2);
- payload_router_->Init(modules);
- payload_router_->SetSendingRtpModules(modules.size());
-
- uint8_t payload_1 = 'a';
- FrameType frame_type_1 = kVideoFrameKey;
- int8_t payload_type_1 = 96;
- RTPVideoHeader rtp_hdr_1;
- rtp_hdr_1.simulcastIdx = 0;
-
- payload_router_->set_active(true);
- EXPECT_CALL(rtp_1, SendOutgoingData(frame_type_1, payload_type_1, 0, 0, _, 1,
- nullptr, &rtp_hdr_1))
+ int8_t payload_type = 96;
+ uint8_t payload = 'a';
+ EncodedImage encoded_image;
+ encoded_image._timeStamp = 1;
+ encoded_image.capture_time_ms_ = 2;
+ encoded_image._frameType = kVideoFrameKey;
+ encoded_image._buffer = &payload;
+ encoded_image._length = 1;
+
+ PayloadRouter payload_router(modules, payload_type);
+ payload_router.SetSendStreams(streams);
+
+ CodecSpecificInfo codec_info_1;
+ memset(&codec_info_1, 0, sizeof(CodecSpecificInfo));
+ codec_info_1.codecType = kVideoCodecVP8;
+ codec_info_1.codecSpecific.VP8.simulcastIdx = 0;
+
+ payload_router.set_active(true);
+ EXPECT_CALL(rtp_1, SendOutgoingData(encoded_image._frameType, payload_type,
+ encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, &payload,
+ encoded_image._length, nullptr, _))
.Times(1);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
.Times(0);
- EXPECT_TRUE(payload_router_->RoutePayload(
- frame_type_1, payload_type_1, 0, 0, &payload_1, 1, nullptr, &rtp_hdr_1));
-
- uint8_t payload_2 = 'b';
- FrameType frame_type_2 = kVideoFrameDelta;
- int8_t payload_type_2 = 97;
- RTPVideoHeader rtp_hdr_2;
- rtp_hdr_2.simulcastIdx = 1;
- EXPECT_CALL(rtp_2, SendOutgoingData(frame_type_2, payload_type_2, 0, 0, _, 1,
- nullptr, &rtp_hdr_2))
+ EXPECT_EQ(0, payload_router.Encoded(encoded_image, &codec_info_1, nullptr));
+
+ CodecSpecificInfo codec_info_2;
+ memset(&codec_info_2, 0, sizeof(CodecSpecificInfo));
+ codec_info_2.codecType = kVideoCodecVP8;
+ codec_info_2.codecSpecific.VP8.simulcastIdx = 1;
+
+ EXPECT_CALL(rtp_2, SendOutgoingData(encoded_image._frameType, payload_type,
+ encoded_image._timeStamp,
+ encoded_image.capture_time_ms_, &payload,
+ encoded_image._length, nullptr, _))
.Times(1);
EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
.Times(0);
- EXPECT_TRUE(payload_router_->RoutePayload(
- frame_type_2, payload_type_2, 0, 0, &payload_2, 1, nullptr, &rtp_hdr_2));
+ EXPECT_EQ(0, payload_router.Encoded(encoded_image, &codec_info_2, nullptr));
// Inactive.
- payload_router_->set_active(false);
+ payload_router.set_active(false);
EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
.Times(0);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
.Times(0);
- EXPECT_FALSE(payload_router_->RoutePayload(
- frame_type_1, payload_type_1, 0, 0, &payload_1, 1, nullptr, &rtp_hdr_1));
- EXPECT_FALSE(payload_router_->RoutePayload(
- frame_type_2, payload_type_2, 0, 0, &payload_2, 1, nullptr, &rtp_hdr_2));
+ EXPECT_EQ(-1, payload_router.Encoded(encoded_image, &codec_info_1, nullptr));
+ EXPECT_EQ(-1, payload_router.Encoded(encoded_image, &codec_info_2, nullptr));
// Invalid simulcast index.
- payload_router_->SetSendingRtpModules(1);
- payload_router_->set_active(true);
+ streams.pop_back(); // Remove a stream.
+ payload_router.SetSendStreams(streams);
+ payload_router.set_active(true);
EXPECT_CALL(rtp_1, SendOutgoingData(_, _, _, _, _, _, _, _))
.Times(0);
EXPECT_CALL(rtp_2, SendOutgoingData(_, _, _, _, _, _, _, _))
.Times(0);
- rtp_hdr_1.simulcastIdx = 1;
- EXPECT_FALSE(payload_router_->RoutePayload(
- frame_type_1, payload_type_1, 0, 0, &payload_1, 1, nullptr, &rtp_hdr_1));
+ codec_info_2.codecSpecific.VP8.simulcastIdx = 1;
+ EXPECT_EQ(-1, payload_router.Encoded(encoded_image, &codec_info_2, nullptr));
}
-TEST_F(PayloadRouterTest, MaxPayloadLength) {
+TEST(PayloadRouterTest, MaxPayloadLength) {
// Without any limitations from the modules, verify we get the max payload
// length for IP/UDP/SRTP with a MTU of 150 bytes.
const size_t kDefaultMaxLength = 1500 - 20 - 8 - 12 - 4;
- EXPECT_EQ(kDefaultMaxLength, payload_router_->DefaultMaxPayloadLength());
- EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
-
- MockRtpRtcp rtp_1;
- MockRtpRtcp rtp_2;
+ NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcp> rtp_2;
std::vector<RtpRtcp*> modules;
modules.push_back(&rtp_1);
modules.push_back(&rtp_2);
- payload_router_->Init(modules);
- payload_router_->SetSendingRtpModules(modules.size());
+ PayloadRouter payload_router(modules, 42);
+
+ EXPECT_EQ(kDefaultMaxLength, PayloadRouter::DefaultMaxPayloadLength());
+ std::vector<VideoStream> streams(2);
+ payload_router.SetSendStreams(streams);
// Modules return a higher length than the default value.
EXPECT_CALL(rtp_1, MaxDataPayloadLength())
@@ -160,7 +174,7 @@ TEST_F(PayloadRouterTest, MaxPayloadLength) {
EXPECT_CALL(rtp_2, MaxDataPayloadLength())
.Times(1)
.WillOnce(Return(kDefaultMaxLength + 10));
- EXPECT_EQ(kDefaultMaxLength, payload_router_->MaxPayloadLength());
+ EXPECT_EQ(kDefaultMaxLength, payload_router.MaxPayloadLength());
// The modules return a value lower than default.
const size_t kTestMinPayloadLength = 1001;
@@ -170,33 +184,27 @@ TEST_F(PayloadRouterTest, MaxPayloadLength) {
EXPECT_CALL(rtp_2, MaxDataPayloadLength())
.Times(1)
.WillOnce(Return(kTestMinPayloadLength));
- EXPECT_EQ(kTestMinPayloadLength, payload_router_->MaxPayloadLength());
+ EXPECT_EQ(kTestMinPayloadLength, payload_router.MaxPayloadLength());
}
-TEST_F(PayloadRouterTest, SetTargetSendBitrates) {
- MockRtpRtcp rtp_1;
- MockRtpRtcp rtp_2;
+TEST(PayloadRouterTest, SetTargetSendBitrates) {
+ NiceMock<MockRtpRtcp> rtp_1;
+ NiceMock<MockRtpRtcp> rtp_2;
std::vector<RtpRtcp*> modules;
modules.push_back(&rtp_1);
modules.push_back(&rtp_2);
- payload_router_->Init(modules);
- payload_router_->SetSendingRtpModules(modules.size());
+ PayloadRouter payload_router(modules, 42);
+ std::vector<VideoStream> streams(2);
+ streams[0].max_bitrate_bps = 10000;
+ streams[1].max_bitrate_bps = 100000;
+ payload_router.SetSendStreams(streams);
const uint32_t bitrate_1 = 10000;
const uint32_t bitrate_2 = 76543;
- std::vector<uint32_t> bitrates(2, bitrate_1);
- bitrates[1] = bitrate_2;
EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
.Times(1);
EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
.Times(1);
- payload_router_->SetTargetSendBitrates(bitrates);
-
- bitrates.resize(1);
- EXPECT_CALL(rtp_1, SetTargetSendBitrate(bitrate_1))
- .Times(1);
- EXPECT_CALL(rtp_2, SetTargetSendBitrate(bitrate_2))
- .Times(0);
- payload_router_->SetTargetSendBitrates(bitrates);
+ payload_router.SetTargetSendBitrate(bitrate_1 + bitrate_2);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/receive_statistics_proxy.cc b/chromium/third_party/webrtc/video/receive_statistics_proxy.cc
index ff419249d4f..4757efa81b6 100644
--- a/chromium/third_party/webrtc/video/receive_statistics_proxy.cc
+++ b/chromium/third_party/webrtc/video/receive_statistics_proxy.cc
@@ -27,8 +27,8 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy(
// 1000ms window, scale 1000 for ms to s.
decode_fps_estimator_(1000, 1000),
renders_fps_estimator_(1000, 1000),
- render_fps_tracker_(100u, 10u),
- render_pixel_tracker_(100u, 10u) {
+ render_fps_tracker_(100, 10u),
+ render_pixel_tracker_(100, 10u) {
stats_.ssrc = config.rtp.remote_ssrc;
for (auto it : config.rtp.rtx)
rtx_stats_[it.second.ssrc] = StreamDataCounters();
@@ -63,8 +63,10 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
height);
}
int sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples);
- if (sync_offset_ms != -1)
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", sync_offset_ms);
+ if (sync_offset_ms != -1) {
+ RTC_LOGGED_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs",
+ sync_offset_ms);
+ }
int qp = qp_counters_.vp8.Avg(kMinRequiredSamples);
if (qp != -1)
@@ -77,6 +79,22 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
if (decode_ms != -1)
RTC_LOGGED_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", decode_ms);
+ int jb_delay_ms = jitter_buffer_delay_counter_.Avg(kMinRequiredDecodeSamples);
+ if (jb_delay_ms != -1) {
+ RTC_LOGGED_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
+ jb_delay_ms);
+ }
+ int target_delay_ms = target_delay_counter_.Avg(kMinRequiredDecodeSamples);
+ if (target_delay_ms != -1) {
+ RTC_LOGGED_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs",
+ target_delay_ms);
+ }
+ int current_delay_ms = current_delay_counter_.Avg(kMinRequiredDecodeSamples);
+ if (current_delay_ms != -1) {
+ RTC_LOGGED_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs",
+ current_delay_ms);
+ }
+
int delay_ms = delay_counter_.Avg(kMinRequiredDecodeSamples);
if (delay_ms != -1)
RTC_LOGGED_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", delay_ms);
@@ -170,6 +188,9 @@ void ReceiveStatisticsProxy::OnDecoderTiming(int decode_ms,
stats_.min_playout_delay_ms = min_playout_delay_ms;
stats_.render_delay_ms = render_delay_ms;
decode_time_counter_.Add(decode_ms);
+ jitter_buffer_delay_counter_.Add(jitter_buffer_ms);
+ target_delay_counter_.Add(target_delay_ms);
+ current_delay_counter_.Add(current_delay_ms);
// Network delay (rtt/2) + target_delay_ms (jitter delay + decode time +
// render delay).
delay_counter_.Add(target_delay_ms + rtt_ms / 2);
diff --git a/chromium/third_party/webrtc/video/receive_statistics_proxy.h b/chromium/third_party/webrtc/video/receive_statistics_proxy.h
index dc612750883..f88bbf51a27 100644
--- a/chromium/third_party/webrtc/video/receive_statistics_proxy.h
+++ b/chromium/third_party/webrtc/video/receive_statistics_proxy.h
@@ -19,10 +19,10 @@
#include "webrtc/base/ratetracker.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
-#include "webrtc/frame_callback.h"
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/video/report_block_stats.h"
-#include "webrtc/video/vie_channel.h"
+#include "webrtc/video/video_stream_decoder.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {
@@ -108,6 +108,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
SampleCounter render_height_counter_ GUARDED_BY(crit_);
SampleCounter sync_offset_counter_ GUARDED_BY(crit_);
SampleCounter decode_time_counter_ GUARDED_BY(crit_);
+ SampleCounter jitter_buffer_delay_counter_ GUARDED_BY(crit_);
+ SampleCounter target_delay_counter_ GUARDED_BY(crit_);
+ SampleCounter current_delay_counter_ GUARDED_BY(crit_);
SampleCounter delay_counter_ GUARDED_BY(crit_);
ReportBlockStats report_block_stats_ GUARDED_BY(crit_);
QpCounters qp_counters_; // Only accessed on the decoding thread.
diff --git a/chromium/third_party/webrtc/video/rtp_stream_receiver.cc b/chromium/third_party/webrtc/video/rtp_stream_receiver.cc
new file mode 100644
index 00000000000..c305df54135
--- /dev/null
+++ b/chromium/third_party/webrtc/video/rtp_stream_receiver.cc
@@ -0,0 +1,542 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/rtp_stream_receiver.h"
+
+#include <vector>
+
+#include "webrtc/base/logging.h"
+#include "webrtc/common_types.h"
+#include "webrtc/config.h"
+#include "webrtc/modules/pacing/packet_router.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/video/receive_statistics_proxy.h"
+#include "webrtc/video/vie_remb.h"
+
+namespace webrtc {
+
+std::unique_ptr<RtpRtcp> CreateRtpRtcpModule(
+ ReceiveStatistics* receive_statistics,
+ Transport* outgoing_transport,
+ RtcpRttStats* rtt_stats,
+ RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ RtpPacketSender* paced_sender,
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator) {
+ RtpRtcp::Configuration configuration;
+ configuration.audio = false;
+ configuration.receiver_only = true;
+ configuration.receive_statistics = receive_statistics;
+ configuration.outgoing_transport = outgoing_transport;
+ configuration.intra_frame_callback = nullptr;
+ configuration.rtt_stats = rtt_stats;
+ configuration.rtcp_packet_type_counter_observer =
+ rtcp_packet_type_counter_observer;
+ configuration.paced_sender = paced_sender;
+ configuration.transport_sequence_number_allocator =
+ transport_sequence_number_allocator;
+ configuration.send_bitrate_observer = nullptr;
+ configuration.send_frame_count_observer = nullptr;
+ configuration.send_side_delay_observer = nullptr;
+ configuration.send_packet_observer = nullptr;
+ configuration.bandwidth_callback = nullptr;
+ configuration.transport_feedback_callback = nullptr;
+
+ std::unique_ptr<RtpRtcp> rtp_rtcp(RtpRtcp::CreateRtpRtcp(configuration));
+ rtp_rtcp->SetSendingStatus(false);
+ rtp_rtcp->SetSendingMediaStatus(false);
+ rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
+
+ return rtp_rtcp;
+}
+
+static const int kPacketLogIntervalMs = 10000;
+
+RtpStreamReceiver::RtpStreamReceiver(
+ vcm::VideoReceiver* video_receiver,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ Transport* transport,
+ RtcpRttStats* rtt_stats,
+ PacedSender* paced_sender,
+ PacketRouter* packet_router,
+ VieRemb* remb,
+ const VideoReceiveStream::Config& config,
+ ReceiveStatisticsProxy* receive_stats_proxy,
+ ProcessThread* process_thread)
+ : clock_(Clock::GetRealTimeClock()),
+ config_(config),
+ video_receiver_(video_receiver),
+ remote_bitrate_estimator_(remote_bitrate_estimator),
+ packet_router_(packet_router),
+ remb_(remb),
+ process_thread_(process_thread),
+ ntp_estimator_(clock_),
+ rtp_payload_registry_(RTPPayloadStrategy::CreateStrategy(false)),
+ rtp_header_parser_(RtpHeaderParser::Create()),
+ rtp_receiver_(RtpReceiver::CreateVideoReceiver(clock_,
+ this,
+ this,
+ &rtp_payload_registry_)),
+ rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
+ fec_receiver_(FecReceiver::Create(this)),
+ receiving_(false),
+ restored_packet_in_use_(false),
+ last_packet_log_ms_(-1),
+ rtp_rtcp_(CreateRtpRtcpModule(rtp_receive_statistics_.get(),
+ transport,
+ rtt_stats,
+ receive_stats_proxy,
+ remote_bitrate_estimator_,
+ paced_sender,
+ packet_router)) {
+ packet_router_->AddRtpModule(rtp_rtcp_.get());
+ rtp_receive_statistics_->RegisterRtpStatisticsCallback(receive_stats_proxy);
+ rtp_receive_statistics_->RegisterRtcpStatisticsCallback(receive_stats_proxy);
+
+ RTC_DCHECK(config.rtp.rtcp_mode != RtcpMode::kOff)
+ << "A stream should not be configured with RTCP disabled. This value is "
+ "reserved for internal usage.";
+ RTC_DCHECK(config_.rtp.remote_ssrc != 0);
+ // TODO(pbos): What's an appropriate local_ssrc for receive-only streams?
+ RTC_DCHECK(config_.rtp.local_ssrc != 0);
+ RTC_DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc);
+
+ rtp_rtcp_->SetRTCPStatus(config.rtp.rtcp_mode);
+ rtp_rtcp_->SetSSRC(config.rtp.local_ssrc);
+ rtp_rtcp_->SetKeyFrameRequestMethod(kKeyFrameReqPliRtcp);
+ if (config.rtp.remb) {
+ rtp_rtcp_->SetREMBStatus(true);
+ remb_->AddReceiveChannel(rtp_rtcp_.get());
+ }
+
+ for (size_t i = 0; i < config.rtp.extensions.size(); ++i) {
+ EnableReceiveRtpHeaderExtension(config.rtp.extensions[i].name,
+ config.rtp.extensions[i].id);
+ }
+
+ static const int kMaxPacketAgeToNack = 450;
+ const int max_reordering_threshold = (config.rtp.nack.rtp_history_ms > 0)
+ ? kMaxPacketAgeToNack : kDefaultMaxReorderingThreshold;
+ rtp_receive_statistics_->SetMaxReorderingThreshold(max_reordering_threshold);
+
+ // TODO(pbos): Support multiple RTX, per video payload.
+ for (const auto& kv : config_.rtp.rtx) {
+ RTC_DCHECK(kv.second.ssrc != 0);
+ RTC_DCHECK(kv.second.payload_type != 0);
+
+ rtp_payload_registry_.SetRtxSsrc(kv.second.ssrc);
+ rtp_payload_registry_.SetRtxPayloadType(kv.second.payload_type,
+ kv.first);
+ }
+
+ // If set to true, the RTX payload type mapping supplied in
+ // |SetRtxPayloadType| will be used when restoring RTX packets. Without it,
+ // RTX packets will always be restored to the last non-RTX packet payload type
+ // received.
+ // TODO(holmer): When Chrome no longer depends on this being false by default,
+ // always use the mapping and remove this whole codepath.
+ rtp_payload_registry_.set_use_rtx_payload_mapping_on_restore(
+ config_.rtp.use_rtx_payload_mapping_on_restore);
+
+ if (IsFecEnabled()) {
+ VideoCodec ulpfec_codec = {};
+ ulpfec_codec.codecType = kVideoCodecULPFEC;
+ strncpy(ulpfec_codec.plName, "ulpfec", sizeof(ulpfec_codec.plName));
+ ulpfec_codec.plType = config_.rtp.fec.ulpfec_payload_type;
+ RTC_CHECK(SetReceiveCodec(ulpfec_codec));
+
+ VideoCodec red_codec = {};
+ red_codec.codecType = kVideoCodecRED;
+ strncpy(red_codec.plName, "red", sizeof(red_codec.plName));
+ red_codec.plType = config_.rtp.fec.red_payload_type;
+ RTC_CHECK(SetReceiveCodec(red_codec));
+ if (config_.rtp.fec.red_rtx_payload_type != -1) {
+ rtp_payload_registry_.SetRtxPayloadType(
+ config_.rtp.fec.red_rtx_payload_type,
+ config_.rtp.fec.red_payload_type);
+ }
+
+ rtp_rtcp_->SetGenericFECStatus(true,
+ config_.rtp.fec.red_payload_type,
+ config_.rtp.fec.ulpfec_payload_type);
+ }
+
+ if (config.rtp.rtcp_xr.receiver_reference_time_report)
+ rtp_rtcp_->SetRtcpXrRrtrStatus(true);
+
+ // Stats callback for CNAME changes.
+ rtp_rtcp_->RegisterRtcpStatisticsCallback(receive_stats_proxy);
+
+ process_thread_->RegisterModule(rtp_receive_statistics_.get());
+ process_thread_->RegisterModule(rtp_rtcp_.get());
+}
+
+RtpStreamReceiver::~RtpStreamReceiver() {
+ process_thread_->DeRegisterModule(rtp_receive_statistics_.get());
+ process_thread_->DeRegisterModule(rtp_rtcp_.get());
+
+ packet_router_->RemoveRtpModule(rtp_rtcp_.get());
+ rtp_rtcp_->SetREMBStatus(false);
+ remb_->RemoveReceiveChannel(rtp_rtcp_.get());
+ UpdateHistograms();
+}
+
+bool RtpStreamReceiver::SetReceiveCodec(const VideoCodec& video_codec) {
+ int8_t old_pltype = -1;
+ if (rtp_payload_registry_.ReceivePayloadType(
+ video_codec.plName, kVideoPayloadTypeFrequency, 0,
+ video_codec.maxBitrate, &old_pltype) != -1) {
+ rtp_payload_registry_.DeRegisterReceivePayload(old_pltype);
+ }
+
+ return rtp_receiver_->RegisterReceivePayload(
+ video_codec.plName, video_codec.plType, kVideoPayloadTypeFrequency,
+ 0, 0) == 0;
+}
+
+uint32_t RtpStreamReceiver::GetRemoteSsrc() const {
+ return rtp_receiver_->SSRC();
+}
+
+int RtpStreamReceiver::GetCsrcs(uint32_t* csrcs) const {
+ return rtp_receiver_->CSRCs(csrcs);
+}
+
+RtpReceiver* RtpStreamReceiver::GetRtpReceiver() const {
+ return rtp_receiver_.get();
+}
+
+int32_t RtpStreamReceiver::OnReceivedPayloadData(
+ const uint8_t* payload_data,
+ const size_t payload_size,
+ const WebRtcRTPHeader* rtp_header) {
+ RTC_DCHECK(video_receiver_);
+ WebRtcRTPHeader rtp_header_with_ntp = *rtp_header;
+ rtp_header_with_ntp.ntp_time_ms =
+ ntp_estimator_.Estimate(rtp_header->header.timestamp);
+ if (video_receiver_->IncomingPacket(payload_data, payload_size,
+ rtp_header_with_ntp) != 0) {
+ // Check this...
+ return -1;
+ }
+ return 0;
+}
+
+bool RtpStreamReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
+ size_t rtp_packet_length) {
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
+ return false;
+ }
+ header.payload_type_frequency = kVideoPayloadTypeFrequency;
+ bool in_order = IsPacketInOrder(header);
+ return ReceivePacket(rtp_packet, rtp_packet_length, header, in_order);
+}
+
+// TODO(pbos): Remove as soon as audio can handle a changing payload type
+// without this callback.
+int32_t RtpStreamReceiver::OnInitializeDecoder(
+ const int8_t payload_type,
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const size_t channels,
+ const uint32_t rate) {
+ RTC_NOTREACHED();
+ return 0;
+}
+
+void RtpStreamReceiver::OnIncomingSSRCChanged(const uint32_t ssrc) {
+ rtp_rtcp_->SetRemoteSSRC(ssrc);
+}
+
+bool RtpStreamReceiver::DeliverRtp(const uint8_t* rtp_packet,
+ size_t rtp_packet_length,
+ const PacketTime& packet_time) {
+ RTC_DCHECK(remote_bitrate_estimator_);
+ {
+ rtc::CritScope lock(&receive_cs_);
+ if (!receiving_) {
+ return false;
+ }
+ }
+
+ RTPHeader header;
+ if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length,
+ &header)) {
+ return false;
+ }
+ size_t payload_length = rtp_packet_length - header.headerLength;
+ int64_t arrival_time_ms;
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ if (packet_time.timestamp != -1)
+ arrival_time_ms = (packet_time.timestamp + 500) / 1000;
+ else
+ arrival_time_ms = now_ms;
+
+ {
+ // Periodically log the RTP header of incoming packets.
+ rtc::CritScope lock(&receive_cs_);
+ if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
+ std::stringstream ss;
+ ss << "Packet received on SSRC: " << header.ssrc << " with payload type: "
+ << static_cast<int>(header.payloadType) << ", timestamp: "
+ << header.timestamp << ", sequence number: " << header.sequenceNumber
+ << ", arrival time: " << arrival_time_ms;
+ if (header.extension.hasTransmissionTimeOffset)
+ ss << ", toffset: " << header.extension.transmissionTimeOffset;
+ if (header.extension.hasAbsoluteSendTime)
+ ss << ", abs send time: " << header.extension.absoluteSendTime;
+ LOG(LS_INFO) << ss.str();
+ last_packet_log_ms_ = now_ms;
+ }
+ }
+
+ remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_length,
+ header);
+ header.payload_type_frequency = kVideoPayloadTypeFrequency;
+
+ bool in_order = IsPacketInOrder(header);
+ rtp_payload_registry_.SetIncomingPayloadType(header);
+ bool ret = ReceivePacket(rtp_packet, rtp_packet_length, header, in_order);
+ // Update receive statistics after ReceivePacket.
+ // Receive statistics will be reset if the payload type changes (make sure
+ // that the first packet is included in the stats).
+ rtp_receive_statistics_->IncomingPacket(
+ header, rtp_packet_length, IsPacketRetransmitted(header, in_order));
+ return ret;
+}
+
+int32_t RtpStreamReceiver::RequestKeyFrame() {
+ return rtp_rtcp_->RequestKeyFrame();
+}
+
+int32_t RtpStreamReceiver::SliceLossIndicationRequest(
+ const uint64_t picture_id) {
+ return rtp_rtcp_->SendRTCPSliceLossIndication(
+ static_cast<uint8_t>(picture_id));
+}
+
+bool RtpStreamReceiver::IsFecEnabled() const {
+ return config_.rtp.fec.red_payload_type != -1 &&
+ config_.rtp.fec.ulpfec_payload_type != -1;
+}
+
+bool RtpStreamReceiver::IsRetransmissionsEnabled() const {
+ return config_.rtp.nack.rtp_history_ms > 0;
+}
+
+void RtpStreamReceiver::RequestPacketRetransmit(
+ const std::vector<uint16_t>& sequence_numbers) {
+ rtp_rtcp_->SendNack(sequence_numbers);
+}
+
+int32_t RtpStreamReceiver::ResendPackets(const uint16_t* sequence_numbers,
+ uint16_t length) {
+ return rtp_rtcp_->SendNACK(sequence_numbers, length);
+}
+
+bool RtpStreamReceiver::ReceivePacket(const uint8_t* packet,
+ size_t packet_length,
+ const RTPHeader& header,
+ bool in_order) {
+ if (rtp_payload_registry_.IsEncapsulated(header)) {
+ return ParseAndHandleEncapsulatingHeader(packet, packet_length, header);
+ }
+ const uint8_t* payload = packet + header.headerLength;
+ assert(packet_length >= header.headerLength);
+ size_t payload_length = packet_length - header.headerLength;
+ PayloadUnion payload_specific;
+ if (!rtp_payload_registry_.GetPayloadSpecifics(header.payloadType,
+ &payload_specific)) {
+ return false;
+ }
+ return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
+ payload_specific, in_order);
+}
+
+bool RtpStreamReceiver::ParseAndHandleEncapsulatingHeader(
+ const uint8_t* packet, size_t packet_length, const RTPHeader& header) {
+ if (rtp_payload_registry_.IsRed(header)) {
+ int8_t ulpfec_pt = rtp_payload_registry_.ulpfec_payload_type();
+ if (packet[header.headerLength] == ulpfec_pt) {
+ rtp_receive_statistics_->FecPacketReceived(header, packet_length);
+ // Notify video_receiver about received FEC packets to avoid NACKing these
+ // packets.
+ NotifyReceiverOfFecPacket(header);
+ }
+ if (fec_receiver_->AddReceivedRedPacket(
+ header, packet, packet_length, ulpfec_pt) != 0) {
+ return false;
+ }
+ return fec_receiver_->ProcessReceivedFec() == 0;
+ } else if (rtp_payload_registry_.IsRtx(header)) {
+ if (header.headerLength + header.paddingLength == packet_length) {
+ // This is an empty packet and should be silently dropped before trying to
+ // parse the RTX header.
+ return true;
+ }
+ // Remove the RTX header and parse the original RTP header.
+ if (packet_length < header.headerLength)
+ return false;
+ if (packet_length > sizeof(restored_packet_))
+ return false;
+ rtc::CritScope lock(&receive_cs_);
+ if (restored_packet_in_use_) {
+ LOG(LS_WARNING) << "Multiple RTX headers detected, dropping packet.";
+ return false;
+ }
+ if (!rtp_payload_registry_.RestoreOriginalPacket(
+ restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
+ header)) {
+ LOG(LS_WARNING) << "Incoming RTX packet: Invalid RTP header ssrc: "
+ << header.ssrc << " payload type: "
+ << static_cast<int>(header.payloadType);
+ return false;
+ }
+ restored_packet_in_use_ = true;
+ bool ret = OnRecoveredPacket(restored_packet_, packet_length);
+ restored_packet_in_use_ = false;
+ return ret;
+ }
+ return false;
+}
+
+void RtpStreamReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) {
+ int8_t last_media_payload_type =
+ rtp_payload_registry_.last_received_media_payload_type();
+ if (last_media_payload_type < 0) {
+ LOG(LS_WARNING) << "Failed to get last media payload type.";
+ return;
+ }
+ // Fake an empty media packet.
+ WebRtcRTPHeader rtp_header = {};
+ rtp_header.header = header;
+ rtp_header.header.payloadType = last_media_payload_type;
+ rtp_header.header.paddingLength = 0;
+ PayloadUnion payload_specific;
+ if (!rtp_payload_registry_.GetPayloadSpecifics(last_media_payload_type,
+ &payload_specific)) {
+ LOG(LS_WARNING) << "Failed to get payload specifics.";
+ return;
+ }
+ rtp_header.type.Video.codec = payload_specific.Video.videoCodecType;
+ rtp_header.type.Video.rotation = kVideoRotation_0;
+ if (header.extension.hasVideoRotation) {
+ rtp_header.type.Video.rotation =
+ ConvertCVOByteToVideoRotation(header.extension.videoRotation);
+ }
+ OnReceivedPayloadData(nullptr, 0, &rtp_header);
+}
+
+bool RtpStreamReceiver::DeliverRtcp(const uint8_t* rtcp_packet,
+ size_t rtcp_packet_length) {
+ {
+ rtc::CritScope lock(&receive_cs_);
+ if (!receiving_) {
+ return false;
+ }
+ }
+
+ rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
+
+ int64_t rtt = 0;
+ rtp_rtcp_->RTT(rtp_receiver_->SSRC(), &rtt, nullptr, nullptr, nullptr);
+ if (rtt == 0) {
+ // Waiting for valid rtt.
+ return true;
+ }
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, nullptr, nullptr,
+ &rtp_timestamp) != 0) {
+ // Waiting for RTCP.
+ return true;
+ }
+ ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
+
+ return true;
+}
+
+void RtpStreamReceiver::SignalNetworkState(NetworkState state) {
+ rtp_rtcp_->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode
+ : RtcpMode::kOff);
+}
+
+void RtpStreamReceiver::StartReceive() {
+ rtc::CritScope lock(&receive_cs_);
+ receiving_ = true;
+}
+
+void RtpStreamReceiver::StopReceive() {
+ rtc::CritScope lock(&receive_cs_);
+ receiving_ = false;
+}
+
+bool RtpStreamReceiver::IsPacketInOrder(const RTPHeader& header) const {
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(header.ssrc);
+ if (!statistician)
+ return false;
+ return statistician->IsPacketInOrder(header.sequenceNumber);
+}
+
+bool RtpStreamReceiver::IsPacketRetransmitted(const RTPHeader& header,
+ bool in_order) const {
+ // Retransmissions are handled separately if RTX is enabled.
+ if (rtp_payload_registry_.RtxEnabled())
+ return false;
+ StreamStatistician* statistician =
+ rtp_receive_statistics_->GetStatistician(header.ssrc);
+ if (!statistician)
+ return false;
+ // Check if this is a retransmission.
+ int64_t min_rtt = 0;
+ rtp_rtcp_->RTT(rtp_receiver_->SSRC(), nullptr, nullptr, &min_rtt, nullptr);
+ return !in_order &&
+ statistician->IsRetransmitOfOldPacket(header, min_rtt);
+}
+
+void RtpStreamReceiver::UpdateHistograms() {
+ FecPacketCounter counter = fec_receiver_->GetPacketCounter();
+ if (counter.num_packets > 0) {
+ RTC_LOGGED_HISTOGRAM_PERCENTAGE(
+ "WebRTC.Video.ReceivedFecPacketsInPercent",
+ static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
+ }
+ if (counter.num_fec_packets > 0) {
+ RTC_LOGGED_HISTOGRAM_PERCENTAGE(
+ "WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
+ static_cast<int>(counter.num_recovered_packets * 100 /
+ counter.num_fec_packets));
+ }
+}
+
+void RtpStreamReceiver::EnableReceiveRtpHeaderExtension(
+ const std::string& extension, int id) {
+ // One-byte-extension local identifiers are in the range 1-14 inclusive.
+ RTC_DCHECK_GE(id, 1);
+ RTC_DCHECK_LE(id, 14);
+ RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension));
+ RTC_CHECK(rtp_header_parser_->RegisterRtpHeaderExtension(
+ StringToRtpExtensionType(extension), id));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/vie_receiver.h b/chromium/third_party/webrtc/video/rtp_stream_receiver.h
index 999e66d68b1..4b176f8845c 100644
--- a/chromium/third_party/webrtc/video/vie_receiver.h
+++ b/chromium/third_party/webrtc/video/rtp_stream_receiver.h
@@ -8,63 +8,70 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_VIDEO_VIE_RECEIVER_H_
-#define WEBRTC_VIDEO_VIE_RECEIVER_H_
+#ifndef WEBRTC_VIDEO_RTP_STREAM_RECEIVER_H_
+#define WEBRTC_VIDEO_RTP_STREAM_RECEIVER_H_
#include <list>
#include <memory>
#include <string>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
#include "webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
+#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/typedefs.h"
+#include "webrtc/video_receive_stream.h"
namespace webrtc {
class FecReceiver;
+class PacedSender;
+class PacketRouter;
+class ProcessThread;
class RemoteNtpTimeEstimator;
class ReceiveStatistics;
+class ReceiveStatisticsProxy;
class RemoteBitrateEstimator;
+class RtcpRttStats;
class RtpHeaderParser;
class RTPPayloadRegistry;
class RtpReceiver;
-class RtpRtcp;
-class VideoCodingModule;
+class Transport;
+class VieRemb;
-class ViEReceiver : public RtpData {
+namespace vcm {
+class VideoReceiver;
+} // namespace vcm
+
+class RtpStreamReceiver : public RtpData, public RtpFeedback,
+ public VCMFrameTypeCallback,
+ public VCMPacketRequestCallback {
public:
- ViEReceiver(VideoCodingModule* module_vcm,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtpFeedback* rtp_feedback);
- ~ViEReceiver();
+ RtpStreamReceiver(vcm::VideoReceiver* video_receiver,
+ RemoteBitrateEstimator* remote_bitrate_estimator,
+ Transport* transport,
+ RtcpRttStats* rtt_stats,
+ PacedSender* paced_sender,
+ PacketRouter* packet_router,
+ VieRemb* remb,
+ const VideoReceiveStream::Config& config,
+ ReceiveStatisticsProxy* receive_stats_proxy,
+ ProcessThread* process_thread);
+ ~RtpStreamReceiver();
bool SetReceiveCodec(const VideoCodec& video_codec);
- void SetNackStatus(bool enable, int max_nack_reordering_threshold);
- void SetRtxPayloadType(int payload_type, int associated_payload_type);
- // If set to true, the RTX payload type mapping supplied in
- // |SetRtxPayloadType| will be used when restoring RTX packets. Without it,
- // RTX packets will always be restored to the last non-RTX packet payload type
- // received.
- void SetUseRtxPayloadMappingOnRestore(bool val);
- void SetRtxSsrc(uint32_t ssrc);
- bool GetRtxSsrc(uint32_t* ssrc) const;
-
- bool IsFecEnabled() const;
-
uint32_t GetRemoteSsrc() const;
int GetCsrcs(uint32_t* csrcs) const;
- void Init(const std::vector<RtpRtcp*>& modules);
-
RtpReceiver* GetRtpReceiver() const;
-
- void EnableReceiveRtpHeaderExtension(const std::string& extension, int id);
+ RtpRtcp* rtp_rtcp() const { return rtp_rtcp_.get(); }
void StartReceive();
void StopReceive();
@@ -74,13 +81,35 @@ class ViEReceiver : public RtpData {
const PacketTime& packet_time);
bool DeliverRtcp(const uint8_t* rtcp_packet, size_t rtcp_packet_length);
+ void SignalNetworkState(NetworkState state);
+
// Implements RtpData.
int32_t OnReceivedPayloadData(const uint8_t* payload_data,
const size_t payload_size,
const WebRtcRTPHeader* rtp_header) override;
bool OnRecoveredPacket(const uint8_t* packet, size_t packet_length) override;
- ReceiveStatistics* GetReceiveStatistics() const;
+ // Implements RtpFeedback.
+ int32_t OnInitializeDecoder(const int8_t payload_type,
+ const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+ const int frequency,
+ const size_t channels,
+ const uint32_t rate) override;
+ void OnIncomingSSRCChanged(const uint32_t ssrc) override;
+ void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) override {}
+
+ // Implements VCMFrameTypeCallback.
+ int32_t RequestKeyFrame() override;
+ int32_t SliceLossIndicationRequest(const uint64_t picture_id) override;
+
+ bool IsFecEnabled() const;
+ bool IsRetransmissionsEnabled() const;
+ // Don't use, still experimental.
+ void RequestPacketRetransmit(const std::vector<uint16_t>& sequence_numbers);
+
+ // Implements VCMPacketRequestCallback.
+ int32_t ResendPackets(const uint16_t* sequenceNumbers,
+ uint16_t length) override;
private:
bool ReceivePacket(const uint8_t* packet,
@@ -96,13 +125,15 @@ class ViEReceiver : public RtpData {
bool IsPacketInOrder(const RTPHeader& header) const;
bool IsPacketRetransmitted(const RTPHeader& header, bool in_order) const;
void UpdateHistograms();
+ void EnableReceiveRtpHeaderExtension(const std::string& extension, int id);
Clock* const clock_;
- VideoCodingModule* const vcm_;
+ const VideoReceiveStream::Config config_;
+ vcm::VideoReceiver* const video_receiver_;
RemoteBitrateEstimator* const remote_bitrate_estimator_;
-
- // TODO(pbos): Make const and set on construction.
- std::vector<RtpRtcp*> rtp_rtcp_;
+ PacketRouter* const packet_router_;
+ VieRemb* const remb_;
+ ProcessThread* const process_thread_;
RemoteNtpTimeEstimator ntp_estimator_;
RTPPayloadRegistry rtp_payload_registry_;
@@ -117,8 +148,10 @@ class ViEReceiver : public RtpData {
uint8_t restored_packet_[IP_PACKET_SIZE] GUARDED_BY(receive_cs_);
bool restored_packet_in_use_ GUARDED_BY(receive_cs_);
int64_t last_packet_log_ms_ GUARDED_BY(receive_cs_);
+
+ const std::unique_ptr<RtpRtcp> rtp_rtcp_;
};
} // namespace webrtc
-#endif // WEBRTC_VIDEO_VIE_RECEIVER_H_
+#endif // WEBRTC_VIDEO_RTP_STREAM_RECEIVER_H_
diff --git a/chromium/third_party/webrtc/video/screenshare_loopback.cc b/chromium/third_party/webrtc/video/screenshare_loopback.cc
index 91002ef9043..912aaaddc45 100644
--- a/chromium/third_party/webrtc/video/screenshare_loopback.cc
+++ b/chromium/third_party/webrtc/video/screenshare_loopback.cc
@@ -41,12 +41,14 @@ int MinBitrateKbps() {
return static_cast<int>(FLAGS_min_bitrate);
}
-DEFINE_int32(start_bitrate, 200, "Call start bitrate in kbps.");
+DEFINE_int32(start_bitrate,
+ Call::Config::kDefaultStartBitrateBps / 1000,
+ "Call start bitrate in kbps.");
int StartBitrateKbps() {
return static_cast<int>(FLAGS_start_bitrate);
}
-DEFINE_int32(target_bitrate, 2000, "Stream target bitrate in kbps.");
+DEFINE_int32(target_bitrate, 200, "Stream target bitrate in kbps.");
int TargetBitrateKbps() {
return static_cast<int>(FLAGS_target_bitrate);
}
diff --git a/chromium/third_party/webrtc/video/send_delay_stats.cc b/chromium/third_party/webrtc/video/send_delay_stats.cc
new file mode 100644
index 00000000000..87010665198
--- /dev/null
+++ b/chromium/third_party/webrtc/video/send_delay_stats.cc
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/send_delay_stats.h"
+
+#include "webrtc/base/logging.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+
+namespace webrtc {
+namespace {
+// Packet with a larger delay are removed and excluded from the delay stats.
+// Set to larger than max histogram delay which is 10000.
+const int64_t kMaxSentPacketDelayMs = 11000;
+const size_t kMaxPacketMapSize = 2000;
+
+// Limit for the maximum number of streams to calculate stats for.
+const size_t kMaxSsrcMapSize = 50;
+const int kMinRequiredSamples = 200;
+} // namespace
+
+SendDelayStats::SendDelayStats(Clock* clock)
+ : clock_(clock), num_old_packets_(0), num_skipped_packets_(0) {}
+
+SendDelayStats::~SendDelayStats() {
+ if (num_old_packets_ > 0 || num_skipped_packets_ > 0) {
+ LOG(LS_WARNING) << "Delay stats: number of old packets " << num_old_packets_
+ << ", skipped packets " << num_skipped_packets_
+ << ". Number of streams " << send_delay_counters_.size();
+ }
+ UpdateHistograms();
+}
+
+void SendDelayStats::UpdateHistograms() {
+ rtc::CritScope lock(&crit_);
+ for (const auto& it : send_delay_counters_) {
+ int send_delay_ms = it.second.Avg(kMinRequiredSamples);
+ if (send_delay_ms != -1) {
+ RTC_LOGGED_HISTOGRAM_COUNTS_10000("WebRTC.Video.SendDelayInMs",
+ send_delay_ms);
+ }
+ }
+}
+
+void SendDelayStats::AddSsrcs(const VideoSendStream::Config& config) {
+ rtc::CritScope lock(&crit_);
+ if (ssrcs_.size() > kMaxSsrcMapSize)
+ return;
+ for (const auto& ssrc : config.rtp.ssrcs)
+ ssrcs_.insert(ssrc);
+}
+
+void SendDelayStats::OnSendPacket(uint16_t packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc) {
+ // Packet sent to transport.
+ rtc::CritScope lock(&crit_);
+ if (ssrcs_.find(ssrc) == ssrcs_.end())
+ return;
+
+ int64_t now = clock_->TimeInMilliseconds();
+ RemoveOld(now, &packets_);
+
+ if (packets_.size() > kMaxPacketMapSize) {
+ ++num_skipped_packets_;
+ return;
+ }
+ packets_.insert(
+ std::make_pair(packet_id, Packet(ssrc, capture_time_ms, now)));
+}
+
+bool SendDelayStats::OnSentPacket(int packet_id, int64_t time_ms) {
+ // Packet leaving socket.
+ if (packet_id == -1)
+ return false;
+
+ rtc::CritScope lock(&crit_);
+ auto it = packets_.find(packet_id);
+ if (it == packets_.end())
+ return false;
+
+ // TODO(asapersson): Remove SendSideDelayUpdated(), use capture -> sent.
+ // Elapsed time from send (to transport) -> sent (leaving socket).
+ int diff_ms = time_ms - it->second.send_time_ms;
+ send_delay_counters_[it->second.ssrc].Add(diff_ms);
+ packets_.erase(it);
+ return true;
+}
+
+void SendDelayStats::RemoveOld(int64_t now, PacketMap* packets) {
+ while (!packets->empty()) {
+ auto it = packets->begin();
+ if (now - it->second.capture_time_ms < kMaxSentPacketDelayMs)
+ break;
+
+ packets->erase(it);
+ ++num_old_packets_;
+ }
+}
+
+void SendDelayStats::SampleCounter::Add(int sample) {
+ sum += sample;
+ ++num_samples;
+}
+
+int SendDelayStats::SampleCounter::Avg(int min_required_samples) const {
+ if (num_samples < min_required_samples || num_samples == 0)
+ return -1;
+ return (sum + (num_samples / 2)) / num_samples;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/send_delay_stats.h b/chromium/third_party/webrtc/video/send_delay_stats.h
new file mode 100644
index 00000000000..20a97814c72
--- /dev/null
+++ b/chromium/third_party/webrtc/video/send_delay_stats.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_SEND_DELAY_STATS_H_
+#define WEBRTC_VIDEO_SEND_DELAY_STATS_H_
+
+#include <map>
+#include <memory>
+#include <set>
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/system_wrappers/include/clock.h"
+#include "webrtc/video_send_stream.h"
+
+namespace webrtc {
+
+class SendDelayStats : public SendPacketObserver {
+ public:
+ explicit SendDelayStats(Clock* clock);
+ virtual ~SendDelayStats();
+
+ // Adds the configured ssrcs for the rtp streams.
+ // Stats will be calculated for these streams.
+ void AddSsrcs(const VideoSendStream::Config& config);
+
+ // Called when a packet is sent (leaving socket).
+ bool OnSentPacket(int packet_id, int64_t time_ms);
+
+ protected:
+ // From SendPacketObserver.
+ // Called when a packet is sent to the transport.
+ void OnSendPacket(uint16_t packet_id,
+ int64_t capture_time_ms,
+ uint32_t ssrc) override;
+
+ private:
+ // Map holding sent packets (mapped by sequence number).
+ struct SequenceNumberOlderThan {
+ bool operator()(uint16_t seq1, uint16_t seq2) const {
+ return IsNewerSequenceNumber(seq2, seq1);
+ }
+ };
+ struct Packet {
+ Packet(uint32_t ssrc, int64_t capture_time_ms, int64_t send_time_ms)
+ : ssrc(ssrc),
+ capture_time_ms(capture_time_ms),
+ send_time_ms(send_time_ms) {}
+ uint32_t ssrc;
+ int64_t capture_time_ms;
+ int64_t send_time_ms;
+ };
+ typedef std::map<uint16_t, Packet, SequenceNumberOlderThan> PacketMap;
+
+ class SampleCounter {
+ public:
+ SampleCounter() : sum(0), num_samples(0) {}
+ ~SampleCounter() {}
+ void Add(int sample);
+ int Avg(int min_required_samples) const;
+
+ private:
+ int sum;
+ int num_samples;
+ };
+
+ void UpdateHistograms();
+ void RemoveOld(int64_t now, PacketMap* packets)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ Clock* const clock_;
+ rtc::CriticalSection crit_;
+
+ PacketMap packets_ GUARDED_BY(crit_);
+ size_t num_old_packets_ GUARDED_BY(crit_);
+ size_t num_skipped_packets_ GUARDED_BY(crit_);
+
+ std::set<uint32_t> ssrcs_ GUARDED_BY(crit_);
+ std::map<uint32_t, SampleCounter> send_delay_counters_
+ GUARDED_BY(crit_); // Mapped by SSRC.
+};
+
+} // namespace webrtc
+#endif // WEBRTC_VIDEO_SEND_DELAY_STATS_H_
diff --git a/chromium/third_party/webrtc/video/send_delay_stats_unittest.cc b/chromium/third_party/webrtc/video/send_delay_stats_unittest.cc
new file mode 100644
index 00000000000..44c62c3a452
--- /dev/null
+++ b/chromium/third_party/webrtc/video/send_delay_stats_unittest.cc
@@ -0,0 +1,122 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/send_delay_stats.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/test/histogram.h"
+
+namespace webrtc {
+namespace {
+const uint32_t kSsrc1 = 17;
+const uint32_t kSsrc2 = 42;
+const uint32_t kRtxSsrc1 = 18;
+const uint32_t kRtxSsrc2 = 43;
+const uint16_t kPacketId = 2345;
+const int64_t kMaxPacketDelayMs = 11000;
+const int kMinRequiredSamples = 200;
+} // namespace
+
+class SendDelayStatsTest : public ::testing::Test {
+ public:
+ SendDelayStatsTest() : clock_(1234), config_(CreateConfig()) {}
+ virtual ~SendDelayStatsTest() {}
+
+ protected:
+ virtual void SetUp() {
+ stats_.reset(new SendDelayStats(&clock_));
+ stats_->AddSsrcs(config_);
+ }
+
+ VideoSendStream::Config CreateConfig() {
+ VideoSendStream::Config config(nullptr);
+ config.rtp.ssrcs.push_back(kSsrc1);
+ config.rtp.ssrcs.push_back(kSsrc2);
+ config.rtp.rtx.ssrcs.push_back(kRtxSsrc1);
+ config.rtp.rtx.ssrcs.push_back(kRtxSsrc2);
+ return config;
+ }
+
+ void OnSendPacket(uint16_t id, uint32_t ssrc) {
+ OnSendPacket(id, ssrc, clock_.TimeInMilliseconds());
+ }
+
+ void OnSendPacket(uint16_t id, uint32_t ssrc, int64_t capture_ms) {
+ SendPacketObserver* observer = stats_.get();
+ observer->OnSendPacket(id, capture_ms, ssrc);
+ }
+
+ bool OnSentPacket(uint16_t id) {
+ return stats_->OnSentPacket(id, clock_.TimeInMilliseconds());
+ }
+
+ SimulatedClock clock_;
+ VideoSendStream::Config config_;
+ std::unique_ptr<SendDelayStats> stats_;
+};
+
+TEST_F(SendDelayStatsTest, SentPacketFound) {
+ EXPECT_FALSE(OnSentPacket(kPacketId));
+ OnSendPacket(kPacketId, kSsrc1);
+ EXPECT_TRUE(OnSentPacket(kPacketId)); // Packet found.
+ EXPECT_FALSE(OnSentPacket(kPacketId)); // Packet removed when found.
+}
+
+TEST_F(SendDelayStatsTest, SentPacketNotFoundForNonRegisteredSsrc) {
+ OnSendPacket(kPacketId, kSsrc1);
+ EXPECT_TRUE(OnSentPacket(kPacketId));
+ OnSendPacket(kPacketId + 1, kSsrc2);
+ EXPECT_TRUE(OnSentPacket(kPacketId + 1));
+ OnSendPacket(kPacketId + 2, kRtxSsrc1); // RTX SSRC not registered.
+ EXPECT_FALSE(OnSentPacket(kPacketId + 2));
+}
+
+TEST_F(SendDelayStatsTest, SentPacketFoundWithMaxSendDelay) {
+ OnSendPacket(kPacketId, kSsrc1);
+ clock_.AdvanceTimeMilliseconds(kMaxPacketDelayMs - 1);
+ OnSendPacket(kPacketId + 1, kSsrc1); // kPacketId -> not old/removed.
+ EXPECT_TRUE(OnSentPacket(kPacketId)); // Packet found.
+ EXPECT_TRUE(OnSentPacket(kPacketId + 1)); // Packet found.
+}
+
+TEST_F(SendDelayStatsTest, OldPacketsRemoved) {
+ const int64_t kCaptureTimeMs = clock_.TimeInMilliseconds();
+ OnSendPacket(0xffffu, kSsrc1, kCaptureTimeMs);
+ OnSendPacket(0u, kSsrc1, kCaptureTimeMs);
+ OnSendPacket(1u, kSsrc1, kCaptureTimeMs + 1);
+ clock_.AdvanceTimeMilliseconds(kMaxPacketDelayMs); // 0xffff, 0 -> old.
+ OnSendPacket(2u, kSsrc1, kCaptureTimeMs + 2);
+
+ EXPECT_FALSE(OnSentPacket(0xffffu)); // Old removed.
+ EXPECT_FALSE(OnSentPacket(0u)); // Old removed.
+ EXPECT_TRUE(OnSentPacket(1u));
+ EXPECT_TRUE(OnSentPacket(2u));
+}
+
+TEST_F(SendDelayStatsTest, HistogramsAreUpdated) {
+ test::ClearHistograms();
+ const int64_t kDelayMs1 = 5;
+ const int64_t kDelayMs2 = 10;
+ uint16_t id = 0;
+ for (int i = 0; i < kMinRequiredSamples; ++i) {
+ OnSendPacket(++id, kSsrc1);
+ clock_.AdvanceTimeMilliseconds(kDelayMs1);
+ EXPECT_TRUE(OnSentPacket(id));
+ OnSendPacket(++id, kSsrc2);
+ clock_.AdvanceTimeMilliseconds(kDelayMs2);
+ EXPECT_TRUE(OnSentPacket(id));
+ }
+ stats_.reset();
+ EXPECT_EQ(2, test::NumHistogramSamples("WebRTC.Video.SendDelayInMs"));
+ EXPECT_EQ(kDelayMs2, test::LastHistogramSample("WebRTC.Video.SendDelayInMs"));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/send_statistics_proxy.cc b/chromium/third_party/webrtc/video/send_statistics_proxy.cc
index 86694c930e7..d8c11a8c358 100644
--- a/chromium/third_party/webrtc/video/send_statistics_proxy.cc
+++ b/chromium/third_party/webrtc/video/send_statistics_proxy.cc
@@ -17,6 +17,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/system_wrappers/include/metrics.h"
namespace webrtc {
@@ -96,8 +97,8 @@ SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer(
clock_(clock),
max_sent_width_per_timestamp_(0),
max_sent_height_per_timestamp_(0),
- input_frame_rate_tracker_(100u, 10u),
- sent_frame_rate_tracker_(100u, 10u),
+ input_frame_rate_tracker_(100, 10u),
+ sent_frame_rate_tracker_(100, 10u),
first_rtcp_stats_time_ms_(-1),
first_rtp_stats_time_ms_(-1),
start_stats_(stats) {}
@@ -194,26 +195,46 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms(
}
for (const auto& it : qp_counters_) {
- int qp = it.second.vp8.Avg(kMinRequiredSamples);
- if (qp != -1) {
+ int qp_vp8 = it.second.vp8.Avg(kMinRequiredSamples);
+ if (qp_vp8 != -1) {
int spatial_idx = it.first;
if (spatial_idx == -1) {
RTC_LOGGED_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.Vp8",
- qp);
+ qp_vp8);
} else if (spatial_idx == 0) {
- RTC_LOGGED_HISTOGRAMS_COUNTS_200(kIndex,
- uma_prefix_ + "Encoded.Qp.Vp8.S0", qp);
+ RTC_LOGGED_HISTOGRAMS_COUNTS_200(
+ kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S0", qp_vp8);
} else if (spatial_idx == 1) {
- RTC_LOGGED_HISTOGRAMS_COUNTS_200(kIndex,
- uma_prefix_ + "Encoded.Qp.Vp8.S1", qp);
+ RTC_LOGGED_HISTOGRAMS_COUNTS_200(
+ kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S1", qp_vp8);
} else if (spatial_idx == 2) {
- RTC_LOGGED_HISTOGRAMS_COUNTS_200(kIndex,
- uma_prefix_ + "Encoded.Qp.Vp8.S2", qp);
+ RTC_LOGGED_HISTOGRAMS_COUNTS_200(
+ kIndex, uma_prefix_ + "Encoded.Qp.Vp8.S2", qp_vp8);
} else {
LOG(LS_WARNING) << "QP stats not recorded for VP8 spatial idx "
<< spatial_idx;
}
}
+ int qp_vp9 = it.second.vp9.Avg(kMinRequiredSamples);
+ if (qp_vp9 != -1) {
+ int spatial_idx = it.first;
+ if (spatial_idx == -1) {
+ RTC_LOGGED_HISTOGRAMS_COUNTS_500(kIndex, uma_prefix_ + "Encoded.Qp.Vp9",
+ qp_vp9);
+ } else if (spatial_idx == 0) {
+ RTC_LOGGED_HISTOGRAMS_COUNTS_500(
+ kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S0", qp_vp9);
+ } else if (spatial_idx == 1) {
+ RTC_LOGGED_HISTOGRAMS_COUNTS_500(
+ kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S1", qp_vp9);
+ } else if (spatial_idx == 2) {
+ RTC_LOGGED_HISTOGRAMS_COUNTS_500(
+ kIndex, uma_prefix_ + "Encoded.Qp.Vp9.S2", qp_vp9);
+ } else {
+ LOG(LS_WARNING) << "QP stats not recorded for VP9 spatial layer "
+ << spatial_idx;
+ }
+ }
}
if (first_rtcp_stats_time_ms_ != -1) {
@@ -321,16 +342,14 @@ void SendStatisticsProxy::SetContentType(
}
}
-void SendStatisticsProxy::OnEncoderImplementationName(
- const char* implementation_name) {
- rtc::CritScope lock(&crit_);
- stats_.encoder_implementation_name = implementation_name;
-}
-
-void SendStatisticsProxy::OnOutgoingRate(uint32_t framerate, uint32_t bitrate) {
+void SendStatisticsProxy::OnEncoderStatsUpdate(
+ uint32_t framerate,
+ uint32_t bitrate,
+ const std::string& encoder_name) {
rtc::CritScope lock(&crit_);
stats_.encode_frame_rate = framerate;
stats_.media_bitrate_bps = bitrate;
+ stats_.encoder_implementation_name = encoder_name;
}
void SendStatisticsProxy::OnEncodedFrameTimeMeasured(
@@ -406,8 +425,17 @@ void SendStatisticsProxy::OnSetRates(uint32_t bitrate_bps, int framerate) {
void SendStatisticsProxy::OnSendEncodedImage(
const EncodedImage& encoded_image,
- const RTPVideoHeader* rtp_video_header) {
- size_t simulcast_idx = rtp_video_header ? rtp_video_header->simulcastIdx : 0;
+ const CodecSpecificInfo* codec_info) {
+ size_t simulcast_idx = 0;
+
+ if (codec_info) {
+ if (codec_info->codecType == kVideoCodecVP8) {
+ simulcast_idx = codec_info->codecSpecific.VP8.simulcastIdx;
+ } else if (codec_info->codecType == kVideoCodecGeneric) {
+ simulcast_idx = codec_info->codecSpecific.generic.simulcast_idx;
+ }
+ }
+
if (simulcast_idx >= config_.rtp.ssrcs.size()) {
LOG(LS_ERROR) << "Encoded image outside simulcast range (" << simulcast_idx
<< " >= " << config_.rtp.ssrcs.size() << ").";
@@ -449,11 +477,18 @@ void SendStatisticsProxy::OnSendEncodedImage(
}
}
- if (encoded_image.qp_ != -1 && rtp_video_header &&
- rtp_video_header->codec == kRtpVideoVp8) {
- int spatial_idx =
- (config_.rtp.ssrcs.size() == 1) ? -1 : static_cast<int>(simulcast_idx);
- uma_container_->qp_counters_[spatial_idx].vp8.Add(encoded_image.qp_);
+ if (encoded_image.qp_ != -1 && codec_info) {
+ if (codec_info->codecType == kVideoCodecVP8) {
+ int spatial_idx = (config_.rtp.ssrcs.size() == 1)
+ ? -1
+ : static_cast<int>(simulcast_idx);
+ uma_container_->qp_counters_[spatial_idx].vp8.Add(encoded_image.qp_);
+ } else if (codec_info->codecType == kVideoCodecVP9) {
+ int spatial_idx = (codec_info->codecSpecific.VP9.num_spatial_layers == 1)
+ ? -1
+ : codec_info->codecSpecific.VP9.spatial_idx;
+ uma_container_->qp_counters_[spatial_idx].vp9.Add(encoded_image.qp_);
+ }
}
// TODO(asapersson): This is incorrect if simulcast layers are encoded on
diff --git a/chromium/third_party/webrtc/video/send_statistics_proxy.h b/chromium/third_party/webrtc/video/send_statistics_proxy.h
index 5a479a1e804..74d5261ebd0 100644
--- a/chromium/third_party/webrtc/video/send_statistics_proxy.h
+++ b/chromium/third_party/webrtc/video/send_statistics_proxy.h
@@ -49,12 +49,13 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
VideoSendStream::Stats GetStats();
virtual void OnSendEncodedImage(const EncodedImage& encoded_image,
- const RTPVideoHeader* rtp_video_header);
+ const CodecSpecificInfo* codec_info);
// Used to update incoming frame rate.
void OnIncomingFrame(int width, int height);
- void OnEncoderImplementationName(const char* implementation_name);
- void OnOutgoingRate(uint32_t framerate, uint32_t bitrate);
+ void OnEncoderStatsUpdate(uint32_t framerate,
+ uint32_t bitrate,
+ const std::string& encoder_name);
void OnSuspendChange(bool is_suspended);
void OnInactiveSsrc(uint32_t ssrc);
@@ -126,7 +127,8 @@ class SendStatisticsProxy : public CpuOveruseMetricsObserver,
int64_t bitrate_update_ms;
};
struct QpCounters {
- SampleCounter vp8;
+ SampleCounter vp8; // QP range: 0-127
+ SampleCounter vp9; // QP range: 0-255
};
void PurgeOldStats() EXCLUSIVE_LOCKS_REQUIRED(crit_);
VideoSendStream::StreamStats* GetStatsEntry(uint32_t ssrc)
diff --git a/chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc b/chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc
index bb404fb8bed..636b9dc40cf 100644
--- a/chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc
+++ b/chromium/third_party/webrtc/video/send_statistics_proxy_unittest.cc
@@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-// This file includes unit tests for SendStatisticsProxy.
#include "webrtc/video/send_statistics_proxy.h"
#include <map>
@@ -21,11 +20,16 @@
#include "webrtc/test/histogram.h"
namespace webrtc {
+namespace {
+const uint32_t kFirstSsrc = 17;
+const uint32_t kSecondSsrc = 42;
+const uint32_t kFirstRtxSsrc = 18;
+const uint32_t kSecondRtxSsrc = 43;
-static const uint32_t kFirstSsrc = 17;
-static const uint32_t kSecondSsrc = 42;
-static const uint32_t kFirstRtxSsrc = 18;
-static const uint32_t kSecondRtxSsrc = 43;
+const int kMinRequiredSamples = 200;
+const int kQpIdx0 = 21;
+const int kQpIdx1 = 39;
+} // namespace
class SendStatisticsProxyTest : public ::testing::Test {
public:
@@ -106,10 +110,7 @@ class SendStatisticsProxyTest : public ::testing::Test {
TEST_F(SendStatisticsProxyTest, RtcpStatistics) {
RtcpStatisticsCallback* callback = statistics_proxy_.get();
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
- it != config_.rtp.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.ssrcs) {
VideoSendStream::StreamStats& ssrc_stats = expected_.substreams[ssrc];
// Add statistics with some arbitrary, but unique, numbers.
@@ -120,10 +121,7 @@ TEST_F(SendStatisticsProxyTest, RtcpStatistics) {
ssrc_stats.rtcp_stats.jitter = offset + 3;
callback->StatisticsUpdated(ssrc_stats.rtcp_stats, ssrc);
}
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.rtx.ssrcs.begin();
- it != config_.rtp.rtx.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.rtx.ssrcs) {
VideoSendStream::StreamStats& ssrc_stats = expected_.substreams[ssrc];
// Add statistics with some arbitrary, but unique, numbers.
@@ -142,11 +140,13 @@ TEST_F(SendStatisticsProxyTest, EncodedBitrateAndFramerate) {
int media_bitrate_bps = 500;
int encode_fps = 29;
- statistics_proxy_->OnOutgoingRate(encode_fps, media_bitrate_bps);
+ statistics_proxy_->OnEncoderStatsUpdate(encode_fps, media_bitrate_bps,
+ "encoder name");
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
EXPECT_EQ(media_bitrate_bps, stats.media_bitrate_bps);
EXPECT_EQ(encode_fps, stats.encode_frame_rate);
+ EXPECT_EQ("encoder name", stats.encoder_implementation_name);
}
TEST_F(SendStatisticsProxyTest, Suspended) {
@@ -164,10 +164,7 @@ TEST_F(SendStatisticsProxyTest, Suspended) {
TEST_F(SendStatisticsProxyTest, FrameCounts) {
FrameCountObserver* observer = statistics_proxy_.get();
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
- it != config_.rtp.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.ssrcs) {
// Add statistics with some arbitrary, but unique, numbers.
VideoSendStream::StreamStats& stats = expected_.substreams[ssrc];
uint32_t offset = ssrc * sizeof(VideoSendStream::StreamStats);
@@ -177,10 +174,7 @@ TEST_F(SendStatisticsProxyTest, FrameCounts) {
stats.frame_counts = frame_counts;
observer->FrameCountUpdated(frame_counts, ssrc);
}
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.rtx.ssrcs.begin();
- it != config_.rtp.rtx.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.rtx.ssrcs) {
// Add statistics with some arbitrary, but unique, numbers.
VideoSendStream::StreamStats& stats = expected_.substreams[ssrc];
uint32_t offset = ssrc * sizeof(VideoSendStream::StreamStats);
@@ -197,10 +191,7 @@ TEST_F(SendStatisticsProxyTest, FrameCounts) {
TEST_F(SendStatisticsProxyTest, DataCounters) {
StreamDataCountersCallback* callback = statistics_proxy_.get();
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
- it != config_.rtp.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.ssrcs) {
StreamDataCounters& counters = expected_.substreams[ssrc].rtp_stats;
// Add statistics with some arbitrary, but unique, numbers.
size_t offset = ssrc * sizeof(StreamDataCounters);
@@ -213,10 +204,7 @@ TEST_F(SendStatisticsProxyTest, DataCounters) {
counters.transmitted.packets = offset_uint32 + 5;
callback->DataCountersUpdated(counters, ssrc);
}
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.rtx.ssrcs.begin();
- it != config_.rtp.rtx.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.rtx.ssrcs) {
StreamDataCounters& counters = expected_.substreams[ssrc].rtp_stats;
// Add statistics with some arbitrary, but unique, numbers.
size_t offset = ssrc * sizeof(StreamDataCounters);
@@ -236,10 +224,7 @@ TEST_F(SendStatisticsProxyTest, DataCounters) {
TEST_F(SendStatisticsProxyTest, Bitrate) {
BitrateStatisticsObserver* observer = statistics_proxy_.get();
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
- it != config_.rtp.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.ssrcs) {
BitrateStatistics total;
BitrateStatistics retransmit;
// Use ssrc as bitrate_bps to get a unique value for each stream.
@@ -249,10 +234,7 @@ TEST_F(SendStatisticsProxyTest, Bitrate) {
expected_.substreams[ssrc].total_bitrate_bps = total.bitrate_bps;
expected_.substreams[ssrc].retransmit_bitrate_bps = retransmit.bitrate_bps;
}
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.rtx.ssrcs.begin();
- it != config_.rtp.rtx.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.rtx.ssrcs) {
BitrateStatistics total;
BitrateStatistics retransmit;
// Use ssrc as bitrate_bps to get a unique value for each stream.
@@ -269,10 +251,7 @@ TEST_F(SendStatisticsProxyTest, Bitrate) {
TEST_F(SendStatisticsProxyTest, SendSideDelay) {
SendSideDelayObserver* observer = statistics_proxy_.get();
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.ssrcs.begin();
- it != config_.rtp.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.ssrcs) {
// Use ssrc as avg_delay_ms and max_delay_ms to get a unique value for each
// stream.
int avg_delay_ms = ssrc;
@@ -281,10 +260,7 @@ TEST_F(SendStatisticsProxyTest, SendSideDelay) {
expected_.substreams[ssrc].avg_delay_ms = avg_delay_ms;
expected_.substreams[ssrc].max_delay_ms = max_delay_ms;
}
- for (std::vector<uint32_t>::const_iterator it = config_.rtp.rtx.ssrcs.begin();
- it != config_.rtp.rtx.ssrcs.end();
- ++it) {
- const uint32_t ssrc = *it;
+ for (const auto& ssrc : config_.rtp.rtx.ssrcs) {
// Use ssrc as avg_delay_ms and max_delay_ms to get a unique value for each
// stream.
int avg_delay_ms = ssrc;
@@ -310,7 +286,6 @@ TEST_F(SendStatisticsProxyTest, OnEncodedFrameTimeMeasured) {
TEST_F(SendStatisticsProxyTest, SwitchContentTypeUpdatesHistograms) {
test::ClearHistograms();
- const int kMinRequiredSamples = 200;
const int kWidth = 640;
const int kHeight = 480;
@@ -329,21 +304,17 @@ TEST_F(SendStatisticsProxyTest, SwitchContentTypeUpdatesHistograms) {
TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_Vp8) {
test::ClearHistograms();
- const int kMinRequiredSamples = 200;
- const int kQpIdx0 = 21;
- const int kQpIdx1 = 39;
EncodedImage encoded_image;
-
- RTPVideoHeader rtp_video_header;
- rtp_video_header.codec = kRtpVideoVp8;
+ CodecSpecificInfo codec_info;
+ codec_info.codecType = kVideoCodecVP8;
for (int i = 0; i < kMinRequiredSamples; ++i) {
- rtp_video_header.simulcastIdx = 0;
+ codec_info.codecSpecific.VP8.simulcastIdx = 0;
encoded_image.qp_ = kQpIdx0;
- statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
- rtp_video_header.simulcastIdx = 1;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
+ codec_info.codecSpecific.VP8.simulcastIdx = 1;
encoded_image.qp_ = kQpIdx1;
- statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
}
statistics_proxy_.reset();
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.Encoded.Qp.Vp8.S0"));
@@ -361,23 +332,66 @@ TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_Vp8OneSsrc) {
&fake_clock_, config, VideoEncoderConfig::ContentType::kRealtimeVideo));
test::ClearHistograms();
- const int kMinRequiredSamples = 200;
- const int kQpIdx0 = 21;
EncodedImage encoded_image;
-
- RTPVideoHeader rtp_video_header;
- rtp_video_header.codec = kRtpVideoVp8;
+ CodecSpecificInfo codec_info;
+ codec_info.codecType = kVideoCodecVP8;
for (int i = 0; i < kMinRequiredSamples; ++i) {
- rtp_video_header.simulcastIdx = 0;
+ codec_info.codecSpecific.VP8.simulcastIdx = 0;
encoded_image.qp_ = kQpIdx0;
- statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
}
statistics_proxy_.reset();
EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.Encoded.Qp.Vp8"));
EXPECT_EQ(kQpIdx0, test::LastHistogramSample("WebRTC.Video.Encoded.Qp.Vp8"));
}
+TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_Vp9) {
+ test::ClearHistograms();
+ EncodedImage encoded_image;
+ CodecSpecificInfo codec_info;
+ codec_info.codecType = kVideoCodecVP9;
+ codec_info.codecSpecific.VP9.num_spatial_layers = 2;
+
+ for (int i = 0; i < kMinRequiredSamples; ++i) {
+ encoded_image.qp_ = kQpIdx0;
+ codec_info.codecSpecific.VP9.spatial_idx = 0;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
+ encoded_image.qp_ = kQpIdx1;
+ codec_info.codecSpecific.VP9.spatial_idx = 1;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
+ }
+ statistics_proxy_.reset();
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.Encoded.Qp.Vp9.S0"));
+ EXPECT_EQ(kQpIdx0,
+ test::LastHistogramSample("WebRTC.Video.Encoded.Qp.Vp9.S0"));
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.Encoded.Qp.Vp9.S1"));
+ EXPECT_EQ(kQpIdx1,
+ test::LastHistogramSample("WebRTC.Video.Encoded.Qp.Vp9.S1"));
+}
+
+TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_Vp9OneSpatialLayer) {
+ VideoSendStream::Config config(nullptr);
+ config.rtp.ssrcs.push_back(kFirstSsrc);
+ statistics_proxy_.reset(new SendStatisticsProxy(
+ &fake_clock_, config, VideoEncoderConfig::ContentType::kRealtimeVideo));
+
+ test::ClearHistograms();
+ EncodedImage encoded_image;
+ CodecSpecificInfo codec_info;
+ codec_info.codecType = kVideoCodecVP9;
+ codec_info.codecSpecific.VP9.num_spatial_layers = 1;
+
+ for (int i = 0; i < kMinRequiredSamples; ++i) {
+ encoded_image.qp_ = kQpIdx0;
+ codec_info.codecSpecific.VP9.spatial_idx = 0;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
+ }
+ statistics_proxy_.reset();
+ EXPECT_EQ(1, test::NumHistogramSamples("WebRTC.Video.Encoded.Qp.Vp9"));
+ EXPECT_EQ(kQpIdx0, test::LastHistogramSample("WebRTC.Video.Encoded.Qp.Vp9"));
+}
+
TEST_F(SendStatisticsProxyTest, NoSubstreams) {
uint32_t excluded_ssrc =
std::max(
@@ -413,12 +427,13 @@ TEST_F(SendStatisticsProxyTest, EncodedResolutionTimesOut) {
encoded_image._encodedWidth = kEncodedWidth;
encoded_image._encodedHeight = kEncodedHeight;
- RTPVideoHeader rtp_video_header;
+ CodecSpecificInfo codec_info;
+ codec_info.codecType = kVideoCodecVP8;
+ codec_info.codecSpecific.VP8.simulcastIdx = 0;
- rtp_video_header.simulcastIdx = 0;
- statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
- rtp_video_header.simulcastIdx = 1;
- statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
+ codec_info.codecSpecific.VP8.simulcastIdx = 1;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
EXPECT_EQ(kEncodedWidth, stats.substreams[config_.rtp.ssrcs[0]].width);
@@ -440,8 +455,8 @@ TEST_F(SendStatisticsProxyTest, EncodedResolutionTimesOut) {
// Report stats for second SSRC to make sure it's not outdated along with the
// first SSRC.
- rtp_video_header.simulcastIdx = 1;
- statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
+ codec_info.codecSpecific.VP8.simulcastIdx = 1;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
// Forward 1 ms, reach timeout, substream 0 should have no resolution
// reported, but substream 1 should.
@@ -460,12 +475,13 @@ TEST_F(SendStatisticsProxyTest, ClearsResolutionFromInactiveSsrcs) {
encoded_image._encodedWidth = kEncodedWidth;
encoded_image._encodedHeight = kEncodedHeight;
- RTPVideoHeader rtp_video_header;
+ CodecSpecificInfo codec_info;
+ codec_info.codecType = kVideoCodecVP8;
+ codec_info.codecSpecific.VP8.simulcastIdx = 0;
- rtp_video_header.simulcastIdx = 0;
- statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
- rtp_video_header.simulcastIdx = 1;
- statistics_proxy_->OnSendEncodedImage(encoded_image, &rtp_video_header);
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
+ codec_info.codecSpecific.VP8.simulcastIdx = 1;
+ statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info);
statistics_proxy_->OnInactiveSsrc(config_.rtp.ssrcs[1]);
VideoSendStream::Stats stats = statistics_proxy_->GetStats();
diff --git a/chromium/third_party/webrtc/video/video_capture_input.cc b/chromium/third_party/webrtc/video/video_capture_input.cc
index 16263ccaf35..8f574e21154 100644
--- a/chromium/third_party/webrtc/video/video_capture_input.cc
+++ b/chromium/third_party/webrtc/video/video_capture_input.cc
@@ -16,7 +16,6 @@
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/video/overuse_frame_detector.h"
#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video/vie_encoder.h"
@@ -84,10 +83,11 @@ void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
return;
}
- captured_frame_.ShallowCopy(incoming_frame);
+ captured_frame_.reset(new VideoFrame);
+ captured_frame_->ShallowCopy(incoming_frame);
last_captured_timestamp_ = incoming_frame.ntp_time_ms();
- overuse_detector_->FrameCaptured(captured_frame_);
+ overuse_detector_->FrameCaptured(*captured_frame_);
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
"render_time", video_frame.render_time_ms());
@@ -97,11 +97,11 @@ void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
bool VideoCaptureInput::GetVideoFrame(VideoFrame* video_frame) {
rtc::CritScope lock(&crit_);
- if (captured_frame_.IsZeroSize())
+ if (!captured_frame_)
return false;
- *video_frame = captured_frame_;
- captured_frame_.Reset();
+ *video_frame = *captured_frame_;
+ captured_frame_.reset();
return true;
}
diff --git a/chromium/third_party/webrtc/video/video_capture_input.h b/chromium/third_party/webrtc/video/video_capture_input.h
index a47e8772c1d..5877f6c94fd 100644
--- a/chromium/third_party/webrtc/video/video_capture_input.h
+++ b/chromium/third_party/webrtc/video/video_capture_input.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_
#define WEBRTC_VIDEO_VIDEO_CAPTURE_INPUT_H_
+#include <memory>
#include <vector>
#include "webrtc/base/criticalsection.h"
@@ -53,7 +54,7 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput {
SendStatisticsProxy* const stats_proxy_;
rtc::Event* const capture_event_;
- VideoFrame captured_frame_ GUARDED_BY(crit_);
+ std::unique_ptr<VideoFrame> captured_frame_ GUARDED_BY(crit_);
Clock* const clock_;
// Used to make sure incoming time stamp is increasing for every frame.
int64_t last_captured_timestamp_;
diff --git a/chromium/third_party/webrtc/video/video_capture_input_unittest.cc b/chromium/third_party/webrtc/video/video_capture_input_unittest.cc
index b36c2577f91..2da722b47d4 100644
--- a/chromium/third_party/webrtc/video/video_capture_input_unittest.cc
+++ b/chromium/third_party/webrtc/video/video_capture_input_unittest.cc
@@ -16,6 +16,7 @@
#include "webrtc/base/event.h"
#include "webrtc/base/refcount.h"
#include "webrtc/test/fake_texture_frame.h"
+#include "webrtc/test/frame_utils.h"
#include "webrtc/video/send_statistics_proxy.h"
// If an output frame does not arrive in 500ms, the test will fail.
@@ -23,9 +24,6 @@
namespace webrtc {
-bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
-bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2);
-bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2);
bool EqualFramesVector(const std::vector<std::unique_ptr<VideoFrame>>& frames1,
const std::vector<std::unique_ptr<VideoFrame>>& frames2);
std::unique_ptr<VideoFrame> CreateVideoFrame(uint8_t length);
@@ -54,9 +52,9 @@ class VideoCaptureInputTest : public ::testing::Test {
EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS));
VideoFrame frame;
EXPECT_TRUE(input_->GetVideoFrame(&frame));
- if (!frame.native_handle()) {
- output_frame_ybuffers_.push_back(
- static_cast<const VideoFrame*>(&frame)->buffer(kYPlane));
+ ASSERT_TRUE(frame.video_frame_buffer());
+ if (!frame.video_frame_buffer()->native_handle()) {
+ output_frame_ybuffers_.push_back(frame.video_frame_buffer()->DataY());
}
output_frames_.push_back(
std::unique_ptr<VideoFrame>(new VideoFrame(frame)));
@@ -95,17 +93,18 @@ TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
rtc::Event* const event_;
};
- VideoFrame frame(
- new rtc::RefCountedObject<TestBuffer>(&frame_destroyed_event), 1, 1,
- kVideoRotation_0);
+ {
+ VideoFrame frame(
+ new rtc::RefCountedObject<TestBuffer>(&frame_destroyed_event), 1, 1,
+ kVideoRotation_0);
- AddInputFrame(&frame);
- WaitOutputFrame();
+ AddInputFrame(&frame);
+ WaitOutputFrame();
- EXPECT_EQ(output_frames_[0]->video_frame_buffer().get(),
- frame.video_frame_buffer().get());
- output_frames_.clear();
- frame.Reset();
+ EXPECT_EQ(output_frames_[0]->video_frame_buffer().get(),
+ frame.video_frame_buffer().get());
+ output_frames_.clear();
+ }
EXPECT_TRUE(frame_destroyed_event.Wait(FRAME_TIMEOUT_MS));
}
@@ -168,7 +167,9 @@ TEST_F(VideoCaptureInputTest, TestTextureFrames) {
i + 1, webrtc::kVideoRotation_0))));
AddInputFrame(input_frames_[i].get());
WaitOutputFrame();
- EXPECT_EQ(dummy_handle, output_frames_[i]->native_handle());
+ ASSERT_TRUE(output_frames_[i]->video_frame_buffer());
+ EXPECT_EQ(dummy_handle,
+ output_frames_[i]->video_frame_buffer()->native_handle());
}
EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
@@ -179,8 +180,7 @@ TEST_F(VideoCaptureInputTest, TestI420Frames) {
std::vector<const uint8_t*> ybuffer_pointers;
for (int i = 0; i < kNumFrame; ++i) {
input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1)));
- const VideoFrame* const_input_frame = input_frames_[i].get();
- ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane));
+ ybuffer_pointers.push_back(input_frames_[i]->video_frame_buffer()->DataY());
AddInputFrame(input_frames_[i].get());
WaitOutputFrame();
}
@@ -198,7 +198,9 @@ TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) {
dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0))));
AddInputFrame(input_frames_[0].get());
WaitOutputFrame();
- EXPECT_EQ(dummy_handle, output_frames_[0]->native_handle());
+ ASSERT_TRUE(output_frames_[0]->video_frame_buffer());
+ EXPECT_EQ(dummy_handle,
+ output_frames_[0]->video_frame_buffer()->native_handle());
input_frames_.push_back(CreateVideoFrame(2));
AddInputFrame(input_frames_[1].get());
@@ -222,43 +224,17 @@ TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) {
EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
}
-bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
- if (frame1.native_handle() || frame2.native_handle())
- return EqualTextureFrames(frame1, frame2);
- return EqualBufferFrames(frame1, frame2);
-}
-
-bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
- return ((frame1.native_handle() == frame2.native_handle()) &&
- (frame1.width() == frame2.width()) &&
- (frame1.height() == frame2.height()));
-}
-
-bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
- return ((frame1.width() == frame2.width()) &&
- (frame1.height() == frame2.height()) &&
- (frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
- (frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
- (frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
- (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
- (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
- (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) &&
- (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane),
- frame1.allocated_size(kYPlane)) == 0) &&
- (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane),
- frame1.allocated_size(kUPlane)) == 0) &&
- (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane),
- frame1.allocated_size(kVPlane)) == 0));
-}
-
bool EqualFramesVector(
const std::vector<std::unique_ptr<VideoFrame>>& frames1,
const std::vector<std::unique_ptr<VideoFrame>>& frames2) {
if (frames1.size() != frames2.size())
return false;
for (size_t i = 0; i < frames1.size(); ++i) {
- if (!EqualFrames(*frames1[i], *frames2[i]))
+ // Compare frame buffers, since we don't care about differing timestamps.
+ if (!test::FrameBufsEqual(frames1[i]->video_frame_buffer(),
+ frames2[i]->video_frame_buffer())) {
return false;
+ }
}
return true;
}
diff --git a/chromium/third_party/webrtc/video/video_decoder.cc b/chromium/third_party/webrtc/video/video_decoder.cc
index 602799e075b..5bf503cd802 100644
--- a/chromium/third_party/webrtc/video/video_decoder.cc
+++ b/chromium/third_party/webrtc/video/video_decoder.cc
@@ -25,6 +25,7 @@ VideoDecoder* VideoDecoder::Create(VideoDecoder::DecoderType codec_type) {
case kVp8:
return VP8Decoder::Create();
case kVp9:
+ RTC_DCHECK(VP9Decoder::IsSupported());
return VP9Decoder::Create();
case kUnsupportedCodec:
LOG(LS_ERROR) << "Creating NullVideoDecoder for unsupported codec.";
diff --git a/chromium/third_party/webrtc/video/video_encoder.cc b/chromium/third_party/webrtc/video/video_encoder.cc
index e85e3d97a72..1534a97fdf4 100644
--- a/chromium/third_party/webrtc/video/video_encoder.cc
+++ b/chromium/third_party/webrtc/video/video_encoder.cc
@@ -25,6 +25,7 @@ VideoEncoder* VideoEncoder::Create(VideoEncoder::EncoderType codec_type) {
case kVp8:
return VP8Encoder::Create();
case kVp9:
+ RTC_DCHECK(VP9Encoder::IsSupported());
return VP9Encoder::Create();
case kUnsupportedCodec:
RTC_NOTREACHED();
@@ -191,10 +192,4 @@ const char* VideoEncoderSoftwareFallbackWrapper::ImplementationName() const {
return encoder_->ImplementationName();
}
-int VideoEncoderSoftwareFallbackWrapper::GetTargetFramerate() {
- if (fallback_encoder_)
- return fallback_encoder_->GetTargetFramerate();
- return encoder_->GetTargetFramerate();
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_encoder_unittest.cc b/chromium/third_party/webrtc/video/video_encoder_unittest.cc
index 0f28f891634..f2c3ea61c46 100644
--- a/chromium/third_party/webrtc/video/video_encoder_unittest.cc
+++ b/chromium/third_party/webrtc/video/video_encoder_unittest.cc
@@ -107,11 +107,11 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test {
void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame() {
frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, (kWidth + 1) / 2,
(kWidth + 1) / 2);
- memset(frame_.buffer(webrtc::kYPlane), 16,
+ memset(frame_.video_frame_buffer()->MutableDataY(), 16,
frame_.allocated_size(webrtc::kYPlane));
- memset(frame_.buffer(webrtc::kUPlane), 128,
+ memset(frame_.video_frame_buffer()->MutableDataU(), 128,
frame_.allocated_size(webrtc::kUPlane));
- memset(frame_.buffer(webrtc::kVPlane), 128,
+ memset(frame_.video_frame_buffer()->MutableDataV(), 128,
frame_.allocated_size(webrtc::kVPlane));
std::vector<FrameType> types(1, kVideoFrameKey);
diff --git a/chromium/third_party/webrtc/video/video_loopback.cc b/chromium/third_party/webrtc/video/video_loopback.cc
index 87aacc6755f..0974d5869f1 100644
--- a/chromium/third_party/webrtc/video/video_loopback.cc
+++ b/chromium/third_party/webrtc/video/video_loopback.cc
@@ -178,6 +178,8 @@ DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
+DEFINE_bool(use_fec, false, "Use forward error correction.");
+
DEFINE_string(
force_fieldtrials,
"",
@@ -216,7 +218,9 @@ void Loopback() {
flags::MaxBitrateKbps() * 1000, flags::Codec(),
flags::NumTemporalLayers(), flags::SelectedTL(),
0, // No min transmit bitrate.
- call_bitrate_config, flags::FLAGS_send_side_bwe},
+ call_bitrate_config,
+ flags::FLAGS_send_side_bwe,
+ flags::FLAGS_use_fec},
{flags::Clip()},
{}, // Screenshare specific.
{"video", 0.0, 0.0, flags::DurationSecs(), flags::OutputFilename(),
diff --git a/chromium/third_party/webrtc/video/video_quality_test.cc b/chromium/third_party/webrtc/video/video_quality_test.cc
index 60f957c456a..78d8845ffdb 100644
--- a/chromium/third_party/webrtc/video/video_quality_test.cc
+++ b/chromium/third_party/webrtc/video/video_quality_test.cc
@@ -21,6 +21,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/event.h"
#include "webrtc/base/format_macros.h"
+#include "webrtc/base/optional.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
@@ -63,11 +64,14 @@ class VideoAnalyzer : public PacketReceiver,
graph_data_output_file_(graph_data_output_file),
graph_title_(graph_title),
ssrc_to_analyze_(ssrc_to_analyze),
+ pre_encode_proxy_(this),
encode_timing_proxy_(this),
frames_to_process_(duration_frames),
frames_recorded_(0),
frames_processed_(0),
dropped_frames_(0),
+ dropped_frames_before_first_encode_(0),
+ dropped_frames_before_rendering_(0),
last_render_time_(0),
rtp_timestamp_delta_(0),
avg_psnr_threshold_(avg_psnr_threshold),
@@ -143,18 +147,26 @@ class VideoAnalyzer : public PacketReceiver,
void IncomingCapturedFrame(const VideoFrame& video_frame) override {
VideoFrame copy = video_frame;
copy.set_timestamp(copy.ntp_time_ms() * 90);
-
{
rtc::CritScope lock(&crit_);
- if (first_send_frame_.IsZeroSize() && rtp_timestamp_delta_ == 0)
- first_send_frame_ = copy;
-
frames_.push_back(copy);
}
input_->IncomingCapturedFrame(video_frame);
}
+ void PreEncodeOnFrame(const VideoFrame& video_frame) {
+ rtc::CritScope lock(&crit_);
+ if (!first_send_timestamp_ && rtp_timestamp_delta_ == 0) {
+ while (frames_.front().timestamp() != video_frame.timestamp()) {
+ ++dropped_frames_before_first_encode_;
+ frames_.pop_front();
+ RTC_CHECK(!frames_.empty());
+ }
+ first_send_timestamp_ = rtc::Optional<uint32_t>(video_frame.timestamp());
+ }
+ }
+
bool SendRtp(const uint8_t* packet,
size_t length,
const PacketOptions& options) override {
@@ -169,8 +181,8 @@ class VideoAnalyzer : public PacketReceiver,
rtc::CritScope lock(&crit_);
if (rtp_timestamp_delta_ == 0) {
- rtp_timestamp_delta_ = header.timestamp - first_send_frame_.timestamp();
- first_send_frame_.Reset();
+ rtp_timestamp_delta_ = header.timestamp - *first_send_timestamp_;
+ first_send_timestamp_ = rtc::Optional<uint32_t>();
}
int64_t timestamp =
wrap_handler_.Unwrap(header.timestamp - rtp_timestamp_delta_);
@@ -203,9 +215,18 @@ class VideoAnalyzer : public PacketReceiver,
wrap_handler_.Unwrap(video_frame.timestamp() - rtp_timestamp_delta_);
while (wrap_handler_.Unwrap(frames_.front().timestamp()) < send_timestamp) {
+ if (last_rendered_frame_.IsZeroSize()) {
+ // No previous frame rendered, this one was dropped after sending but
+ // before rendering.
+ ++dropped_frames_before_rendering_;
+ frames_.pop_front();
+ RTC_CHECK(!frames_.empty());
+ continue;
+ }
AddFrameComparison(frames_.front(), last_rendered_frame_, true,
render_time_ms);
frames_.pop_front();
+ RTC_DCHECK(!frames_.empty());
}
VideoFrame reference_frame = frames_.front();
@@ -267,6 +288,9 @@ class VideoAnalyzer : public PacketReceiver,
stats_polling_thread_.Stop();
}
+ rtc::VideoSinkInterface<VideoFrame>* pre_encode_proxy() {
+ return &pre_encode_proxy_;
+ }
EncodedFrameObserver* encode_timing_proxy() { return &encode_timing_proxy_; }
VideoCaptureInput* input_;
@@ -350,11 +374,26 @@ class VideoAnalyzer : public PacketReceiver,
VideoAnalyzer* const parent_;
};
+ // This class receives the send-side OnFrame callback and is provided to not
+ // conflict with the receiver-side renderer callback.
+ class PreEncodeProxy : public rtc::VideoSinkInterface<VideoFrame> {
+ public:
+ explicit PreEncodeProxy(VideoAnalyzer* parent) : parent_(parent) {}
+
+ void OnFrame(const VideoFrame& video_frame) override {
+ parent_->PreEncodeOnFrame(video_frame);
+ }
+
+ private:
+ VideoAnalyzer* const parent_;
+ };
+
void AddFrameComparison(const VideoFrame& reference,
const VideoFrame& render,
bool dropped,
int64_t render_time_ms)
EXCLUSIVE_LOCKS_REQUIRED(crit_) {
+ RTC_DCHECK(!render.IsZeroSize());
int64_t reference_timestamp = wrap_handler_.Unwrap(reference.timestamp());
int64_t send_time_ms = send_times_[reference_timestamp];
send_times_.erase(reference_timestamp);
@@ -487,8 +526,6 @@ class VideoAnalyzer : public PacketReceiver,
PrintResult("psnr", psnr_, " dB");
PrintResult("ssim", ssim_, " score");
PrintResult("sender_time", sender_time_, " ms");
- printf("RESULT dropped_frames: %s = %d frames\n", test_label_.c_str(),
- dropped_frames_);
PrintResult("receiver_time", receiver_time_, " ms");
PrintResult("total_delay_incl_network", end_to_end_, " ms");
PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
@@ -498,6 +535,13 @@ class VideoAnalyzer : public PacketReceiver,
PrintResult("encode_usage_percent", encode_usage_percent, " percent");
PrintResult("media_bitrate", media_bitrate_bps, " bps");
+ printf("RESULT dropped_frames: %s = %d frames\n", test_label_.c_str(),
+ dropped_frames_);
+ printf("RESULT dropped_frames_before_first_encode: %s = %d frames\n",
+ test_label_.c_str(), dropped_frames_before_first_encode_);
+ printf("RESULT dropped_frames_before_rendering: %s = %d frames\n",
+ test_label_.c_str(), dropped_frames_before_rendering_);
+
EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
}
@@ -593,6 +637,7 @@ class VideoAnalyzer : public PacketReceiver,
FILE* const graph_data_output_file_;
const std::string graph_title_;
const uint32_t ssrc_to_analyze_;
+ PreEncodeProxy pre_encode_proxy_;
OnEncodeTimingProxy encode_timing_proxy_;
std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
std::map<int64_t, int> samples_encode_time_ms_ GUARDED_BY(comparison_lock_);
@@ -612,6 +657,8 @@ class VideoAnalyzer : public PacketReceiver,
int frames_recorded_;
int frames_processed_;
int dropped_frames_;
+ int dropped_frames_before_first_encode_;
+ int dropped_frames_before_rendering_;
int64_t last_render_time_;
uint32_t rtp_timestamp_delta_;
@@ -622,7 +669,7 @@ class VideoAnalyzer : public PacketReceiver,
std::map<int64_t, int64_t> send_times_ GUARDED_BY(crit_);
std::map<int64_t, int64_t> recv_times_ GUARDED_BY(crit_);
std::map<int64_t, size_t> encoded_frame_sizes_ GUARDED_BY(crit_);
- VideoFrame first_send_frame_ GUARDED_BY(crit_);
+ rtc::Optional<uint32_t> first_send_timestamp_ GUARDED_BY(crit_);
const double avg_psnr_threshold_;
const double avg_ssim_threshold_;
@@ -1005,6 +1052,7 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
SetupCommon(&analyzer, &recv_transport);
video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer;
+ video_send_config_.pre_encode_callback = analyzer.pre_encode_proxy();
for (auto& config : video_receive_configs_)
config.pre_decode_callback = &analyzer;
RTC_DCHECK(!video_send_config_.post_encode_callback);
@@ -1079,6 +1127,15 @@ void VideoQualityTest::RunWithVideoRenderer(const Params& params) {
video_send_config_.local_renderer = local_preview.get();
video_receive_configs_[stream_id].renderer = loopback_video.get();
+ if (params.common.fec) {
+ video_send_config_.rtp.fec.red_payload_type = kRedPayloadType;
+ video_send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType;
+ video_receive_configs_[stream_id].rtp.fec.red_payload_type =
+ kRedPayloadType;
+ video_receive_configs_[stream_id].rtp.fec.ulpfec_payload_type =
+ kUlpfecPayloadType;
+ }
+
if (params_.screenshare.enabled)
SetupScreenshare();
diff --git a/chromium/third_party/webrtc/video/video_quality_test.h b/chromium/third_party/webrtc/video/video_quality_test.h
index b476004aae8..e138a89a8c2 100644
--- a/chromium/third_party/webrtc/video/video_quality_test.h
+++ b/chromium/third_party/webrtc/video/video_quality_test.h
@@ -41,6 +41,7 @@ class VideoQualityTest : public test::CallTest {
Call::Config::BitrateConfig call_bitrate_config;
bool send_side_bwe;
+ bool fec;
} common;
struct { // Video-specific settings.
std::string clip_name;
diff --git a/chromium/third_party/webrtc/video/video_receive_stream.cc b/chromium/third_party/webrtc/video/video_receive_stream.cc
index c0c3dea5f7a..1753db5d48f 100644
--- a/chromium/third_party/webrtc/video/video_receive_stream.cc
+++ b/chromium/third_party/webrtc/video/video_receive_stream.cc
@@ -21,14 +21,16 @@
#include "webrtc/modules/congestion_controller/include/congestion_controller.h"
#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video/call_stats.h"
#include "webrtc/video/receive_statistics_proxy.h"
-#include "webrtc/video/vie_remb.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {
+static const bool kEnableFrameRecording = false;
+
static bool UseSendSideBwe(const VideoReceiveStream::Config& config) {
if (!config.rtp.transport_cc)
return false;
@@ -158,117 +160,37 @@ VideoReceiveStream::VideoReceiveStream(
decode_thread_(DecodeThreadFunction, this, "DecodingThread"),
congestion_controller_(congestion_controller),
call_stats_(call_stats),
- remb_(remb),
- vcm_(VideoCodingModule::Create(clock_,
- nullptr,
- nullptr,
- this,
- this)),
- incoming_video_stream_(0, config.disable_prerenderer_smoothing),
+ video_receiver_(clock_, nullptr, this, this, this),
+ incoming_video_stream_(config.disable_prerenderer_smoothing),
stats_proxy_(config_, clock_),
- vie_channel_(&transport_adapter_,
- process_thread,
- nullptr,
- vcm_.get(),
- nullptr,
- nullptr,
- nullptr,
- congestion_controller_->GetRemoteBitrateEstimator(
- UseSendSideBwe(config_)),
- call_stats_->rtcp_rtt_stats(),
- congestion_controller_->pacer(),
- congestion_controller_->packet_router(),
- 1,
- false),
- vie_receiver_(vie_channel_.vie_receiver()),
- vie_sync_(vcm_.get()),
- rtp_rtcp_(vie_channel_.rtp_rtcp().front()) {
+ rtp_stream_receiver_(&video_receiver_,
+ congestion_controller_->GetRemoteBitrateEstimator(
+ UseSendSideBwe(config_)),
+ &transport_adapter_,
+ call_stats_->rtcp_rtt_stats(),
+ congestion_controller_->pacer(),
+ congestion_controller_->packet_router(),
+ remb,
+ config,
+ &stats_proxy_,
+ process_thread_),
+ video_stream_decoder_(&video_receiver_,
+ &rtp_stream_receiver_,
+ &rtp_stream_receiver_,
+ rtp_stream_receiver_.IsRetransmissionsEnabled(),
+ rtp_stream_receiver_.IsFecEnabled(),
+ &stats_proxy_,
+ &incoming_video_stream_,
+ this),
+ vie_sync_(&video_receiver_) {
LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString();
RTC_DCHECK(process_thread_);
RTC_DCHECK(congestion_controller_);
RTC_DCHECK(call_stats_);
- RTC_DCHECK(remb_);
- RTC_CHECK(vie_channel_.Init() == 0);
// Register the channel to receive stats updates.
- call_stats_->RegisterStatsObserver(vie_channel_.GetStatsObserver());
-
- // TODO(pbos): This is not fine grained enough...
- vie_channel_.SetProtectionMode(config_.rtp.nack.rtp_history_ms > 0, false, -1,
- -1);
- RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff)
- << "A stream should not be configured with RTCP disabled. This value is "
- "reserved for internal usage.";
- rtp_rtcp_->SetRTCPStatus(config_.rtp.rtcp_mode);
-
- RTC_DCHECK(config_.rtp.remote_ssrc != 0);
- // TODO(pbos): What's an appropriate local_ssrc for receive-only streams?
- RTC_DCHECK(config_.rtp.local_ssrc != 0);
- RTC_DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc);
- rtp_rtcp_->SetSSRC(config_.rtp.local_ssrc);
-
- // TODO(pbos): Support multiple RTX, per video payload.
- for (const auto& kv : config_.rtp.rtx) {
- RTC_DCHECK(kv.second.ssrc != 0);
- RTC_DCHECK(kv.second.payload_type != 0);
-
- vie_receiver_->SetRtxSsrc(kv.second.ssrc);
- vie_receiver_->SetRtxPayloadType(kv.second.payload_type, kv.first);
- }
- // TODO(holmer): When Chrome no longer depends on this being false by default,
- // always use the mapping and remove this whole codepath.
- vie_receiver_->SetUseRtxPayloadMappingOnRestore(
- config_.rtp.use_rtx_payload_mapping_on_restore);
-
- if (config_.rtp.remb) {
- rtp_rtcp_->SetREMBStatus(true);
- remb_->AddReceiveChannel(rtp_rtcp_);
- }
-
- for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) {
- const std::string& extension = config_.rtp.extensions[i].name;
- int id = config_.rtp.extensions[i].id;
- // One-byte-extension local identifiers are in the range 1-14 inclusive.
- RTC_DCHECK_GE(id, 1);
- RTC_DCHECK_LE(id, 14);
- vie_receiver_->EnableReceiveRtpHeaderExtension(extension, id);
- }
-
- if (config_.rtp.fec.ulpfec_payload_type != -1) {
- // ULPFEC without RED doesn't make sense.
- RTC_DCHECK(config_.rtp.fec.red_payload_type != -1);
- VideoCodec codec;
- memset(&codec, 0, sizeof(codec));
- codec.codecType = kVideoCodecULPFEC;
- strncpy(codec.plName, "ulpfec", sizeof(codec.plName));
- codec.plType = config_.rtp.fec.ulpfec_payload_type;
- RTC_CHECK(vie_receiver_->SetReceiveCodec(codec));
- }
- if (config_.rtp.fec.red_payload_type != -1) {
- VideoCodec codec;
- memset(&codec, 0, sizeof(codec));
- codec.codecType = kVideoCodecRED;
- strncpy(codec.plName, "red", sizeof(codec.plName));
- codec.plType = config_.rtp.fec.red_payload_type;
- RTC_CHECK(vie_receiver_->SetReceiveCodec(codec));
- if (config_.rtp.fec.red_rtx_payload_type != -1) {
- vie_receiver_->SetRtxPayloadType(config_.rtp.fec.red_rtx_payload_type,
- config_.rtp.fec.red_payload_type);
- }
- }
-
- if (config.rtp.rtcp_xr.receiver_reference_time_report)
- rtp_rtcp_->SetRtcpXrRrtrStatus(true);
-
- vie_channel_.RegisterReceiveStatisticsProxy(&stats_proxy_);
- vie_receiver_->GetReceiveStatistics()->RegisterRtpStatisticsCallback(
- &stats_proxy_);
- vie_receiver_->GetReceiveStatistics()->RegisterRtcpStatisticsCallback(
- &stats_proxy_);
- // Stats callback for CNAME changes.
- rtp_rtcp_->RegisterRtcpStatisticsCallback(&stats_proxy_);
- vie_channel_.RegisterRtcpPacketTypeCounterObserver(&stats_proxy_);
+ call_stats_->RegisterStatsObserver(&video_stream_decoder_);
RTC_DCHECK(!config_.decoders.empty());
std::set<int> decoder_payload_types;
@@ -279,23 +201,20 @@ VideoReceiveStream::VideoReceiveStream(
<< "Duplicate payload type (" << decoder.payload_type
<< ") for different decoders.";
decoder_payload_types.insert(decoder.payload_type);
- vcm_->RegisterExternalDecoder(decoder.decoder, decoder.payload_type);
+ video_receiver_.RegisterExternalDecoder(decoder.decoder,
+ decoder.payload_type);
VideoCodec codec = CreateDecoderVideoCodec(decoder);
-
- RTC_CHECK(vie_receiver_->SetReceiveCodec(codec));
- RTC_CHECK_EQ(VCM_OK,
- vcm_->RegisterReceiveCodec(&codec, num_cpu_cores, false));
+ RTC_CHECK(rtp_stream_receiver_.SetReceiveCodec(codec));
+ RTC_CHECK_EQ(VCM_OK, video_receiver_.RegisterReceiveCodec(
+ &codec, num_cpu_cores, false));
}
- vcm_->SetRenderDelay(config.render_delay_ms);
+ video_receiver_.SetRenderDelay(config.render_delay_ms);
incoming_video_stream_.SetExpectedRenderDelay(config.render_delay_ms);
- vcm_->RegisterPreDecodeImageCallback(this);
incoming_video_stream_.SetExternalCallback(this);
- vie_channel_.SetIncomingVideoStream(&incoming_video_stream_);
- vie_channel_.RegisterPreRenderCallback(this);
- process_thread_->RegisterModule(vcm_.get());
+ process_thread_->RegisterModule(&video_receiver_);
process_thread_->RegisterModule(&vie_sync_);
}
@@ -304,24 +223,34 @@ VideoReceiveStream::~VideoReceiveStream() {
Stop();
process_thread_->DeRegisterModule(&vie_sync_);
- process_thread_->DeRegisterModule(vcm_.get());
+ process_thread_->DeRegisterModule(&video_receiver_);
- // Deregister external decoders so that they are no longer running during
+ // Deregister external decoders so they are no longer running during
// destruction. This effectively stops the VCM since the decoder thread is
// stopped, the VCM is deregistered and no asynchronous decoder threads are
// running.
for (const Decoder& decoder : config_.decoders)
- vcm_->RegisterExternalDecoder(nullptr, decoder.payload_type);
-
- vie_channel_.RegisterPreRenderCallback(nullptr);
- vcm_->RegisterPreDecodeImageCallback(nullptr);
+ video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type);
- call_stats_->DeregisterStatsObserver(vie_channel_.GetStatsObserver());
- rtp_rtcp_->SetREMBStatus(false);
- remb_->RemoveReceiveChannel(rtp_rtcp_);
+ call_stats_->DeregisterStatsObserver(&video_stream_decoder_);
congestion_controller_->GetRemoteBitrateEstimator(UseSendSideBwe(config_))
- ->RemoveStream(vie_receiver_->GetRemoteSsrc());
+ ->RemoveStream(rtp_stream_receiver_.GetRemoteSsrc());
+}
+
+void VideoReceiveStream::SignalNetworkState(NetworkState state) {
+ rtp_stream_receiver_.SignalNetworkState(state);
+}
+
+
+bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+ return rtp_stream_receiver_.DeliverRtcp(packet, length);
+}
+
+bool VideoReceiveStream::DeliverRtp(const uint8_t* packet,
+ size_t length,
+ const PacketTime& packet_time) {
+ return rtp_stream_receiver_.DeliverRtp(packet, length, packet_time);
}
void VideoReceiveStream::Start() {
@@ -332,13 +261,13 @@ void VideoReceiveStream::Start() {
// Start the decode thread
decode_thread_.Start();
decode_thread_.SetPriority(rtc::kHighestPriority);
- vie_receiver_->StartReceive();
+ rtp_stream_receiver_.StartReceive();
}
void VideoReceiveStream::Stop() {
incoming_video_stream_.Stop();
- vie_receiver_->StopReceive();
- vcm_->TriggerDecoderShutdown();
+ rtp_stream_receiver_.StopReceive();
+ video_receiver_.TriggerDecoderShutdown();
decode_thread_.Stop();
transport_adapter_.Disable();
}
@@ -347,41 +276,31 @@ void VideoReceiveStream::SetSyncChannel(VoiceEngine* voice_engine,
int audio_channel_id) {
if (voice_engine && audio_channel_id != -1) {
VoEVideoSync* voe_sync_interface = VoEVideoSync::GetInterface(voice_engine);
- vie_sync_.ConfigureSync(audio_channel_id, voe_sync_interface, rtp_rtcp_,
- vie_receiver_->GetRtpReceiver());
+ vie_sync_.ConfigureSync(audio_channel_id, voe_sync_interface,
+ rtp_stream_receiver_.rtp_rtcp(),
+ rtp_stream_receiver_.GetRtpReceiver());
voe_sync_interface->Release();
- return;
+ } else {
+ vie_sync_.ConfigureSync(-1, nullptr, rtp_stream_receiver_.rtp_rtcp(),
+ rtp_stream_receiver_.GetRtpReceiver());
}
- vie_sync_.ConfigureSync(-1, nullptr, rtp_rtcp_,
- vie_receiver_->GetRtpReceiver());
}
VideoReceiveStream::Stats VideoReceiveStream::GetStats() const {
return stats_proxy_.GetStats();
}
-bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
- return vie_receiver_->DeliverRtcp(packet, length);
-}
-
-bool VideoReceiveStream::DeliverRtp(const uint8_t* packet,
- size_t length,
- const PacketTime& packet_time) {
- return vie_receiver_->DeliverRtp(packet, length, packet_time);
-}
-
void VideoReceiveStream::FrameCallback(VideoFrame* video_frame) {
stats_proxy_.OnDecodedFrame();
// Post processing is not supported if the frame is backed by a texture.
- if (!video_frame->native_handle()) {
+ if (!video_frame->video_frame_buffer()->native_handle()) {
if (config_.pre_render_callback)
config_.pre_render_callback->FrameCallback(video_frame);
}
}
-int VideoReceiveStream::RenderFrame(const uint32_t /*stream_id*/,
- const VideoFrame& video_frame) {
+void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) {
int64_t sync_offset_ms;
if (vie_sync_.GetStreamSyncOffsetInMs(video_frame, &sync_offset_ms))
stats_proxy_.OnSyncOffsetUpdated(sync_offset_ms);
@@ -390,8 +309,6 @@ int VideoReceiveStream::RenderFrame(const uint32_t /*stream_id*/,
config_.renderer->OnFrame(video_frame);
stats_proxy_.OnRenderedFrame(video_frame.width(), video_frame.height());
-
- return 0;
}
// TODO(asapersson): Consider moving callback from video_encoder.h or
@@ -406,12 +323,21 @@ int32_t VideoReceiveStream::Encoded(
encoded_frame_proxy_.Encoded(
encoded_image, codec_specific_info, fragmentation);
}
- return 0;
-}
+ if (kEnableFrameRecording) {
+ if (!ivf_writer_.get()) {
+ RTC_DCHECK(codec_specific_info);
+ std::ostringstream oss;
+ oss << "receive_bitstream_ssrc_" << config_.rtp.remote_ssrc << ".ivf";
+ ivf_writer_ =
+ IvfFileWriter::Open(oss.str(), codec_specific_info->codecType);
+ }
+ if (ivf_writer_.get()) {
+ bool ok = ivf_writer_->WriteFrame(encoded_image);
+ RTC_DCHECK(ok);
+ }
+ }
-void VideoReceiveStream::SignalNetworkState(NetworkState state) {
- rtp_rtcp_->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode
- : RtcpMode::kOff);
+ return 0;
}
bool VideoReceiveStream::DecodeThreadFunction(void* ptr) {
@@ -421,16 +347,16 @@ bool VideoReceiveStream::DecodeThreadFunction(void* ptr) {
void VideoReceiveStream::Decode() {
static const int kMaxDecodeWaitTimeMs = 50;
- vcm_->Decode(kMaxDecodeWaitTimeMs);
+ video_receiver_.Decode(kMaxDecodeWaitTimeMs);
}
void VideoReceiveStream::SendNack(
const std::vector<uint16_t>& sequence_numbers) {
- rtp_rtcp_->SendNack(sequence_numbers);
+ rtp_stream_receiver_.RequestPacketRetransmit(sequence_numbers);
}
void VideoReceiveStream::RequestKeyFrame() {
- rtp_rtcp_->RequestKeyFrame();
+ rtp_stream_receiver_.RequestKeyFrame();
}
} // namespace internal
diff --git a/chromium/third_party/webrtc/video/video_receive_stream.h b/chromium/third_party/webrtc/video/video_receive_stream.h
index 3aca570f99e..0ea5385b319 100644
--- a/chromium/third_party/webrtc/video/video_receive_stream.h
+++ b/chromium/third_party/webrtc/video/video_receive_stream.h
@@ -18,19 +18,19 @@
#include "webrtc/call/transport_adapter.h"
#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/video/receive_statistics_proxy.h"
-#include "webrtc/video/vie_channel.h"
-#include "webrtc/video/vie_encoder.h"
-#include "webrtc/video_encoder.h"
+#include "webrtc/video/rtp_stream_receiver.h"
+#include "webrtc/video/video_stream_decoder.h"
#include "webrtc/video_receive_stream.h"
namespace webrtc {
class CallStats;
class CongestionController;
+class IvfFileWriter;
class ProcessThread;
class VoiceEngine;
class VieRemb;
@@ -39,7 +39,7 @@ namespace internal {
class VideoReceiveStream : public webrtc::VideoReceiveStream,
public I420FrameCallback,
- public VideoRenderCallback,
+ public rtc::VideoSinkInterface<VideoFrame>,
public EncodedImageCallback,
public NackSender,
public KeyFrameRequestSender {
@@ -53,24 +53,23 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
VieRemb* remb);
~VideoReceiveStream() override;
- // webrtc::ReceiveStream implementation.
- void Start() override;
- void Stop() override;
- void SignalNetworkState(NetworkState state) override;
- bool DeliverRtcp(const uint8_t* packet, size_t length) override;
+ void SignalNetworkState(NetworkState state);
+ bool DeliverRtcp(const uint8_t* packet, size_t length);
bool DeliverRtp(const uint8_t* packet,
size_t length,
- const PacketTime& packet_time) override;
+ const PacketTime& packet_time);
// webrtc::VideoReceiveStream implementation.
+ void Start() override;
+ void Stop() override;
+
webrtc::VideoReceiveStream::Stats GetStats() const override;
// Overrides I420FrameCallback.
void FrameCallback(VideoFrame* video_frame) override;
- // Overrides VideoRenderCallback.
- int RenderFrame(const uint32_t /*stream_id*/,
- const VideoFrame& video_frame) override;
+ // Overrides rtc::VideoSinkInterface<VideoFrame>.
+ void OnFrame(const VideoFrame& video_frame) override;
// Overrides EncodedImageCallback.
int32_t Encoded(const EncodedImage& encoded_image,
@@ -81,10 +80,10 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
void SetSyncChannel(VoiceEngine* voice_engine, int audio_channel_id);
- // NackSender
+ // Implements NackSender.
void SendNack(const std::vector<uint16_t>& sequence_numbers) override;
- // KeyFrameRequestSender
+ // Implements KeyFrameRequestSender.
void RequestKeyFrame() override;
private:
@@ -101,15 +100,15 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
CongestionController* const congestion_controller_;
CallStats* const call_stats_;
- VieRemb* const remb_;
- std::unique_ptr<VideoCodingModule> vcm_;
+ vcm::VideoReceiver video_receiver_;
IncomingVideoStream incoming_video_stream_;
ReceiveStatisticsProxy stats_proxy_;
- ViEChannel vie_channel_;
- ViEReceiver* const vie_receiver_;
+ RtpStreamReceiver rtp_stream_receiver_;
+ VideoStreamDecoder video_stream_decoder_;
ViESyncModule vie_sync_;
- RtpRtcp* const rtp_rtcp_;
+
+ std::unique_ptr<IvfFileWriter> ivf_writer_;
};
} // namespace internal
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_send_stream.cc b/chromium/third_party/webrtc/video/video_send_stream.cc
index 6b6b1af3464..ec8fbea3630 100644
--- a/chromium/third_party/webrtc/video/video_send_stream.cc
+++ b/chromium/third_party/webrtc/video/video_send_stream.cc
@@ -24,6 +24,7 @@
#include "webrtc/modules/pacing/packet_router.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
#include "webrtc/video/call_stats.h"
#include "webrtc/video/video_capture_input.h"
#include "webrtc/video/vie_remb.h"
@@ -34,6 +35,54 @@ namespace webrtc {
class RtcpIntraFrameObserver;
class TransportFeedbackObserver;
+static const int kMinSendSidePacketHistorySize = 600;
+
+namespace {
+
+std::vector<RtpRtcp*> CreateRtpRtcpModules(
+ Transport* outgoing_transport,
+ RtcpIntraFrameObserver* intra_frame_callback,
+ RtcpBandwidthObserver* bandwidth_callback,
+ TransportFeedbackObserver* transport_feedback_callback,
+ RtcpRttStats* rtt_stats,
+ RtpPacketSender* paced_sender,
+ TransportSequenceNumberAllocator* transport_sequence_number_allocator,
+ SendStatisticsProxy* stats_proxy,
+ SendDelayStats* send_delay_stats,
+ size_t num_modules) {
+ RTC_DCHECK_GT(num_modules, 0u);
+ RtpRtcp::Configuration configuration;
+ ReceiveStatistics* null_receive_statistics = configuration.receive_statistics;
+ configuration.audio = false;
+ configuration.receiver_only = false;
+ configuration.receive_statistics = null_receive_statistics;
+ configuration.outgoing_transport = outgoing_transport;
+ configuration.intra_frame_callback = intra_frame_callback;
+ configuration.rtt_stats = rtt_stats;
+ configuration.rtcp_packet_type_counter_observer = stats_proxy;
+ configuration.paced_sender = paced_sender;
+ configuration.transport_sequence_number_allocator =
+ transport_sequence_number_allocator;
+ configuration.send_bitrate_observer = stats_proxy;
+ configuration.send_frame_count_observer = stats_proxy;
+ configuration.send_side_delay_observer = stats_proxy;
+ configuration.send_packet_observer = send_delay_stats;
+ configuration.bandwidth_callback = bandwidth_callback;
+ configuration.transport_feedback_callback = transport_feedback_callback;
+
+ std::vector<RtpRtcp*> modules;
+ for (size_t i = 0; i < num_modules; ++i) {
+ RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration);
+ rtp_rtcp->SetSendingStatus(false);
+ rtp_rtcp->SetSendingMediaStatus(false);
+ rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
+ modules.push_back(rtp_rtcp);
+ }
+ return modules;
+}
+
+} // namespace
+
std::string
VideoSendStream::Config::EncoderSettings::ToString() const {
std::stringstream ss;
@@ -150,6 +199,149 @@ CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) {
}
return options;
}
+
+VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
+ const std::string& payload_name,
+ int payload_type) {
+ const std::vector<VideoStream>& streams = config.streams;
+ static const int kEncoderMinBitrateKbps = 30;
+ RTC_DCHECK(!streams.empty());
+ RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
+
+ VideoCodec video_codec;
+ memset(&video_codec, 0, sizeof(video_codec));
+ video_codec.codecType = PayloadNameToCodecType(payload_name);
+
+ switch (config.content_type) {
+ case VideoEncoderConfig::ContentType::kRealtimeVideo:
+ video_codec.mode = kRealtimeVideo;
+ break;
+ case VideoEncoderConfig::ContentType::kScreen:
+ video_codec.mode = kScreensharing;
+ if (config.streams.size() == 1 &&
+ config.streams[0].temporal_layer_thresholds_bps.size() == 1) {
+ video_codec.targetBitrate =
+ config.streams[0].temporal_layer_thresholds_bps[0] / 1000;
+ }
+ break;
+ }
+
+ switch (video_codec.codecType) {
+ case kVideoCodecVP8: {
+ if (config.encoder_specific_settings) {
+ video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
+ config.encoder_specific_settings);
+ } else {
+ video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
+ }
+ video_codec.codecSpecific.VP8.numberOfTemporalLayers =
+ static_cast<unsigned char>(
+ streams.back().temporal_layer_thresholds_bps.size() + 1);
+ break;
+ }
+ case kVideoCodecVP9: {
+ if (config.encoder_specific_settings) {
+ video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
+ config.encoder_specific_settings);
+ if (video_codec.mode == kScreensharing) {
+ video_codec.codecSpecific.VP9.flexibleMode = true;
+ // For now VP9 screensharing use 1 temporal and 2 spatial layers.
+ RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers,
+ 1);
+ RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
+ }
+ } else {
+ video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
+ }
+ video_codec.codecSpecific.VP9.numberOfTemporalLayers =
+ static_cast<unsigned char>(
+ streams.back().temporal_layer_thresholds_bps.size() + 1);
+ break;
+ }
+ case kVideoCodecH264: {
+ if (config.encoder_specific_settings) {
+ video_codec.codecSpecific.H264 =
+ *reinterpret_cast<const VideoCodecH264*>(
+ config.encoder_specific_settings);
+ } else {
+ video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
+ }
+ break;
+ }
+ default:
+ // TODO(pbos): Support encoder_settings codec-agnostically.
+ RTC_DCHECK(!config.encoder_specific_settings)
+ << "Encoder-specific settings for codec type not wired up.";
+ break;
+ }
+
+ strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1);
+ video_codec.plName[kPayloadNameSize - 1] = '\0';
+ video_codec.plType = payload_type;
+ video_codec.numberOfSimulcastStreams =
+ static_cast<unsigned char>(streams.size());
+ video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
+ if (video_codec.minBitrate < kEncoderMinBitrateKbps)
+ video_codec.minBitrate = kEncoderMinBitrateKbps;
+ RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
+ if (video_codec.codecType == kVideoCodecVP9) {
+ // If the vector is empty, bitrates will be configured automatically.
+ RTC_DCHECK(config.spatial_layers.empty() ||
+ config.spatial_layers.size() ==
+ video_codec.codecSpecific.VP9.numberOfSpatialLayers);
+ RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
+ kMaxSimulcastStreams);
+ for (size_t i = 0; i < config.spatial_layers.size(); ++i)
+ video_codec.spatialLayers[i] = config.spatial_layers[i];
+ }
+ for (size_t i = 0; i < streams.size(); ++i) {
+ SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
+ RTC_DCHECK_GT(streams[i].width, 0u);
+ RTC_DCHECK_GT(streams[i].height, 0u);
+ RTC_DCHECK_GT(streams[i].max_framerate, 0);
+ // Different framerates not supported per stream at the moment.
+ RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate);
+ RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0);
+ RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps);
+ RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
+ RTC_DCHECK_GE(streams[i].max_qp, 0);
+
+ sim_stream->width = static_cast<uint16_t>(streams[i].width);
+ sim_stream->height = static_cast<uint16_t>(streams[i].height);
+ sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
+ sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
+ sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
+ sim_stream->qpMax = streams[i].max_qp;
+ sim_stream->numberOfTemporalLayers = static_cast<unsigned char>(
+ streams[i].temporal_layer_thresholds_bps.size() + 1);
+
+ video_codec.width = std::max(video_codec.width,
+ static_cast<uint16_t>(streams[i].width));
+ video_codec.height = std::max(
+ video_codec.height, static_cast<uint16_t>(streams[i].height));
+ video_codec.minBitrate =
+ std::min(static_cast<uint16_t>(video_codec.minBitrate),
+ static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
+ video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
+ video_codec.qpMax = std::max(video_codec.qpMax,
+ static_cast<unsigned int>(streams[i].max_qp));
+ }
+
+ if (video_codec.maxBitrate == 0) {
+ // Unset max bitrate -> cap to one bit per pixel.
+ video_codec.maxBitrate =
+ (video_codec.width * video_codec.height * video_codec.maxFramerate) /
+ 1000;
+ }
+ if (video_codec.maxBitrate < kEncoderMinBitrateKbps)
+ video_codec.maxBitrate = kEncoderMinBitrateKbps;
+
+ RTC_DCHECK_GT(streams[0].max_framerate, 0);
+ video_codec.maxFramerate = streams[0].max_framerate;
+
+ return video_codec;
+}
+
} // namespace
namespace internal {
@@ -159,6 +351,7 @@ VideoSendStream::VideoSendStream(
CallStats* call_stats,
CongestionController* congestion_controller,
BitrateAllocator* bitrate_allocator,
+ SendDelayStats* send_delay_stats,
VieRemb* remb,
const VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config,
@@ -183,31 +376,28 @@ VideoSendStream::VideoSendStream(
this,
config.post_encode_callback,
&stats_proxy_),
- vie_channel_(config.send_transport,
- module_process_thread_,
- &payload_router_,
- nullptr,
- &encoder_feedback_,
- congestion_controller_->GetBitrateController()
- ->CreateRtcpBandwidthObserver(),
- congestion_controller_->GetTransportFeedbackObserver(),
- nullptr,
- call_stats_->rtcp_rtt_stats(),
- congestion_controller_->pacer(),
- congestion_controller_->packet_router(),
- config_.rtp.ssrcs.size(),
- true),
- vie_receiver_(vie_channel_.vie_receiver()),
vie_encoder_(num_cpu_cores,
- config_.rtp.ssrcs,
module_process_thread_,
&stats_proxy_,
- config.pre_encode_callback,
- &overuse_detector_,
- congestion_controller_->pacer(),
- &payload_router_),
- vcm_(vie_encoder_.vcm()),
- rtp_rtcp_modules_(vie_channel_.rtp_rtcp()),
+ &overuse_detector_),
+ encoder_feedback_(Clock::GetRealTimeClock(),
+ config.rtp.ssrcs,
+ &vie_encoder_),
+ video_sender_(vie_encoder_.video_sender()),
+ bandwidth_observer_(congestion_controller_->GetBitrateController()
+ ->CreateRtcpBandwidthObserver()),
+ rtp_rtcp_modules_(CreateRtpRtcpModules(
+ config.send_transport,
+ &encoder_feedback_,
+ bandwidth_observer_.get(),
+ congestion_controller_->GetTransportFeedbackObserver(),
+ call_stats_->rtcp_rtt_stats(),
+ congestion_controller_->pacer(),
+ congestion_controller_->packet_router(),
+ &stats_proxy_,
+ send_delay_stats,
+ config_.rtp.ssrcs.size())),
+ payload_router_(rtp_rtcp_modules_, config.encoder_settings.payload_type),
input_(&encoder_wakeup_event_,
config_.local_renderer,
&stats_proxy_,
@@ -220,14 +410,14 @@ VideoSendStream::VideoSendStream(
RTC_DCHECK(congestion_controller_);
RTC_DCHECK(remb_);
- payload_router_.Init(rtp_rtcp_modules_);
- RTC_CHECK(vie_encoder_.Init());
- encoder_feedback_.Init(config_.rtp.ssrcs, &vie_encoder_);
- RTC_CHECK(vie_channel_.Init() == 0);
- vcm_->RegisterProtectionCallback(vie_channel_.vcm_protection_callback());
+ // RTP/RTCP initialization.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ module_process_thread_->RegisterModule(rtp_rtcp);
+ congestion_controller_->packet_router()->AddRtpModule(rtp_rtcp);
+ }
- call_stats_->RegisterStatsObserver(vie_channel_.GetStatsObserver());
+ video_sender_->RegisterProtectionCallback(this);
for (size_t i = 0; i < config_.rtp.extensions.size(); ++i) {
const std::string& extension = config_.rtp.extensions[i].name;
@@ -245,28 +435,7 @@ VideoSendStream::VideoSendStream(
remb_->AddRembSender(rtp_rtcp_modules_[0]);
rtp_rtcp_modules_[0]->SetREMBStatus(true);
- // Enable NACK, FEC or both.
- const bool enable_protection_nack = config_.rtp.nack.rtp_history_ms > 0;
- bool enable_protection_fec = config_.rtp.fec.red_payload_type != -1;
- // Payload types without picture ID cannot determine that a stream is complete
- // without retransmitting FEC, so using FEC + NACK for H.264 (for instance) is
- // a waste of bandwidth since FEC packets still have to be transmitted. Note
- // that this is not the case with FLEXFEC.
- if (enable_protection_nack &&
- !PayloadTypeSupportsSkippingFecPackets(
- config_.encoder_settings.payload_name)) {
- LOG(LS_WARNING) << "Transmitting payload type without picture ID using"
- "NACK+FEC is a waste of bandwidth since FEC packets "
- "also have to be retransmitted. Disabling FEC.";
- enable_protection_fec = false;
- }
- // TODO(changbin): Should set RTX for RED mapping in RTP sender in future.
- vie_channel_.SetProtectionMode(enable_protection_nack, enable_protection_fec,
- config_.rtp.fec.red_payload_type,
- config_.rtp.fec.ulpfec_payload_type);
- vie_encoder_.SetProtectionMethod(enable_protection_nack,
- enable_protection_fec);
-
+ ConfigureProtection();
ConfigureSsrcs();
// TODO(pbos): Should we set CNAME on all RTP modules?
@@ -288,27 +457,8 @@ VideoSendStream::VideoSendStream(
RTC_DCHECK(config.encoder_settings.encoder);
RTC_DCHECK_GE(config.encoder_settings.payload_type, 0);
RTC_DCHECK_LE(config.encoder_settings.payload_type, 127);
- RTC_CHECK_EQ(0, vie_encoder_.RegisterExternalEncoder(
- config.encoder_settings.encoder,
- config.encoder_settings.payload_type,
- config.encoder_settings.internal_source));
-
ReconfigureVideoEncoder(encoder_config);
- vie_channel_.RegisterSendSideDelayObserver(&stats_proxy_);
-
- if (config_.post_encode_callback)
- vie_encoder_.RegisterPostEncodeImageCallback(&encoded_frame_proxy_);
-
- if (config_.suspend_below_min_bitrate) {
- vcm_->SuspendBelowMinBitrate();
- bitrate_allocator_->EnforceMinBitrate(false);
- }
-
- vie_channel_.RegisterRtcpPacketTypeCounterObserver(&stats_proxy_);
- vie_channel_.RegisterSendBitrateObserver(&stats_proxy_);
- vie_channel_.RegisterSendFrameCountObserver(&stats_proxy_);
-
module_process_thread_->RegisterModule(&overuse_detector_);
encoder_thread_.Start();
@@ -330,45 +480,43 @@ VideoSendStream::~VideoSendStream() {
bitrate_allocator_->RemoveObserver(this);
module_process_thread_->DeRegisterModule(&overuse_detector_);
- vie_channel_.RegisterSendFrameCountObserver(nullptr);
- vie_channel_.RegisterSendBitrateObserver(nullptr);
- vie_channel_.RegisterRtcpPacketTypeCounterObserver(nullptr);
- vie_encoder_.DeRegisterExternalEncoder(config_.encoder_settings.payload_type);
-
- call_stats_->DeregisterStatsObserver(vie_channel_.GetStatsObserver());
rtp_rtcp_modules_[0]->SetREMBStatus(false);
remb_->RemoveRembSender(rtp_rtcp_modules_[0]);
- // ViEChannel outlives ViEEncoder so remove encoder from feedback before
- // destruction.
- encoder_feedback_.TearDown();
-
- congestion_controller_->GetRemoteBitrateEstimator(false)->RemoveStream(
- vie_receiver_->GetRemoteSsrc());
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ congestion_controller_->packet_router()->RemoveRtpModule(rtp_rtcp);
+ module_process_thread_->DeRegisterModule(rtp_rtcp);
+ delete rtp_rtcp;
+ }
}
-VideoCaptureInput* VideoSendStream::Input() {
- return &input_;
+bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
+ rtp_rtcp->IncomingRtcpPacket(packet, length);
+ return true;
}
void VideoSendStream::Start() {
if (payload_router_.active())
return;
- vie_encoder_.Pause();
+ TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Start");
payload_router_.set_active(true);
// Was not already started, trigger a keyframe.
vie_encoder_.SendKeyFrame();
- vie_encoder_.Restart();
- vie_receiver_->StartReceive();
+ vie_encoder_.Start();
}
void VideoSendStream::Stop() {
if (!payload_router_.active())
return;
- // TODO(pbos): Make sure the encoder stops here.
+ TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop");
+ vie_encoder_.Pause();
payload_router_.set_active(false);
- vie_receiver_->StopReceive();
+}
+
+VideoCaptureInput* VideoSendStream::Input() {
+ return &input_;
}
bool VideoSendStream::EncoderThreadFunction(void* obj) {
@@ -378,176 +526,186 @@ bool VideoSendStream::EncoderThreadFunction(void* obj) {
}
void VideoSendStream::EncoderProcess() {
+ RTC_CHECK_EQ(0, vie_encoder_.RegisterExternalEncoder(
+ config_.encoder_settings.encoder,
+ config_.encoder_settings.payload_type,
+ config_.encoder_settings.internal_source));
+
while (true) {
encoder_wakeup_event_.Wait(rtc::Event::kForever);
if (rtc::AtomicOps::AcquireLoad(&stop_encoder_thread_))
- return;
+ break;
+ rtc::Optional<EncoderSettings> encoder_settings;
+ {
+ rtc::CritScope lock(&encoder_settings_crit_);
+ if (pending_encoder_settings_) {
+ encoder_settings = pending_encoder_settings_;
+ pending_encoder_settings_ = rtc::Optional<EncoderSettings>();
+ }
+ }
+ if (encoder_settings) {
+ encoder_settings->video_codec.startBitrate =
+ bitrate_allocator_->AddObserver(
+ this, encoder_settings->video_codec.minBitrate * 1000,
+ encoder_settings->video_codec.maxBitrate * 1000,
+ !config_.suspend_below_min_bitrate) /
+ 1000;
+
+ payload_router_.SetSendStreams(encoder_settings->streams);
+ vie_encoder_.SetEncoder(encoder_settings->video_codec,
+ encoder_settings->min_transmit_bitrate_bps,
+ payload_router_.MaxPayloadLength(), this);
+
+ // vie_encoder_.SetEncoder must be called before this.
+ if (config_.suspend_below_min_bitrate)
+ video_sender_->SuspendBelowMinBitrate();
+
+ // Clear stats for disabled layers.
+ for (size_t i = encoder_settings->streams.size();
+ i < config_.rtp.ssrcs.size(); ++i) {
+ stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]);
+ }
+
+ // We might've gotten new settings while configuring the encoder settings,
+ // restart from the top to see if that's the case before trying to encode
+ // a frame (which might correspond to the last frame size).
+ encoder_wakeup_event_.Set();
+ continue;
+ }
VideoFrame frame;
- if (input_.GetVideoFrame(&frame))
+ if (input_.GetVideoFrame(&frame)) {
+ // TODO(perkj): |pre_encode_callback| is only used by tests. Tests should
+ // register as a sink to the VideoSource instead.
+ if (config_.pre_encode_callback) {
+ config_.pre_encode_callback->OnFrame(frame);
+ }
vie_encoder_.EncodeVideoFrame(frame);
+ }
}
+ vie_encoder_.DeRegisterExternalEncoder(config_.encoder_settings.payload_type);
}
void VideoSendStream::ReconfigureVideoEncoder(
const VideoEncoderConfig& config) {
TRACE_EVENT0("webrtc", "VideoSendStream::(Re)configureVideoEncoder");
LOG(LS_INFO) << "(Re)configureVideoEncoder: " << config.ToString();
- const std::vector<VideoStream>& streams = config.streams;
- static const int kEncoderMinBitrateKbps = 30;
- RTC_DCHECK(!streams.empty());
- RTC_DCHECK_GE(config_.rtp.ssrcs.size(), streams.size());
- RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
+ RTC_DCHECK_GE(config_.rtp.ssrcs.size(), config.streams.size());
+ VideoCodec video_codec = VideoEncoderConfigToVideoCodec(
+ config, config_.encoder_settings.payload_name,
+ config_.encoder_settings.payload_type);
+ {
+ rtc::CritScope lock(&encoder_settings_crit_);
+ pending_encoder_settings_ = rtc::Optional<EncoderSettings>(
+ {video_codec, config.min_transmit_bitrate_bps, config.streams});
+ }
+ encoder_wakeup_event_.Set();
+}
- VideoCodec video_codec;
- memset(&video_codec, 0, sizeof(video_codec));
- video_codec.codecType =
- PayloadNameToCodecType(config_.encoder_settings.payload_name);
+VideoSendStream::Stats VideoSendStream::GetStats() {
+ return stats_proxy_.GetStats();
+}
- switch (config.content_type) {
- case VideoEncoderConfig::ContentType::kRealtimeVideo:
- video_codec.mode = kRealtimeVideo;
- break;
- case VideoEncoderConfig::ContentType::kScreen:
- video_codec.mode = kScreensharing;
- if (config.streams.size() == 1 &&
- config.streams[0].temporal_layer_thresholds_bps.size() == 1) {
- video_codec.targetBitrate =
- config.streams[0].temporal_layer_thresholds_bps[0] / 1000;
- }
- break;
- }
+void VideoSendStream::OveruseDetected() {
+ if (config_.overuse_callback)
+ config_.overuse_callback->OnLoadUpdate(LoadObserver::kOveruse);
+}
- if (video_codec.codecType == kVideoCodecVP8) {
- video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
- } else if (video_codec.codecType == kVideoCodecVP9) {
- video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
- } else if (video_codec.codecType == kVideoCodecH264) {
- video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
- }
+void VideoSendStream::NormalUsage() {
+ if (config_.overuse_callback)
+ config_.overuse_callback->OnLoadUpdate(LoadObserver::kUnderuse);
+}
- if (video_codec.codecType == kVideoCodecVP8) {
- if (config.encoder_specific_settings) {
- video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
- config.encoder_specific_settings);
- }
- video_codec.codecSpecific.VP8.numberOfTemporalLayers =
- static_cast<unsigned char>(
- streams.back().temporal_layer_thresholds_bps.size() + 1);
- } else if (video_codec.codecType == kVideoCodecVP9) {
- if (config.encoder_specific_settings) {
- video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
- config.encoder_specific_settings);
- if (video_codec.mode == kScreensharing) {
- video_codec.codecSpecific.VP9.flexibleMode = true;
- // For now VP9 screensharing use 1 temporal and 2 spatial layers.
- RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers, 1);
- RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
+int32_t VideoSendStream::Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) {
+ // |encoded_frame_proxy_| forwards frames to |config_.post_encode_callback|;
+ encoded_frame_proxy_.Encoded(encoded_image, codec_specific_info,
+ fragmentation);
+ int32_t return_value = payload_router_.Encoded(
+ encoded_image, codec_specific_info, fragmentation);
+
+ if (kEnableFrameRecording) {
+ int layer = codec_specific_info->codecType == kVideoCodecVP8
+ ? codec_specific_info->codecSpecific.VP8.simulcastIdx
+ : 0;
+ IvfFileWriter* file_writer;
+ {
+ if (file_writers_[layer] == nullptr) {
+ std::ostringstream oss;
+ oss << "send_bitstream_ssrc";
+ for (uint32_t ssrc : config_.rtp.ssrcs)
+ oss << "_" << ssrc;
+ oss << "_layer" << layer << ".ivf";
+ file_writers_[layer] =
+ IvfFileWriter::Open(oss.str(), codec_specific_info->codecType);
}
+ file_writer = file_writers_[layer].get();
}
- video_codec.codecSpecific.VP9.numberOfTemporalLayers =
- static_cast<unsigned char>(
- streams.back().temporal_layer_thresholds_bps.size() + 1);
- } else if (video_codec.codecType == kVideoCodecH264) {
- if (config.encoder_specific_settings) {
- video_codec.codecSpecific.H264 = *reinterpret_cast<const VideoCodecH264*>(
- config.encoder_specific_settings);
+ if (file_writer) {
+ bool ok = file_writer->WriteFrame(encoded_image);
+ RTC_DCHECK(ok);
}
- } else {
- // TODO(pbos): Support encoder_settings codec-agnostically.
- RTC_DCHECK(!config.encoder_specific_settings)
- << "Encoder-specific settings for codec type not wired up.";
}
- strncpy(video_codec.plName,
- config_.encoder_settings.payload_name.c_str(),
- kPayloadNameSize - 1);
- video_codec.plName[kPayloadNameSize - 1] = '\0';
- video_codec.plType = config_.encoder_settings.payload_type;
- video_codec.numberOfSimulcastStreams =
- static_cast<unsigned char>(streams.size());
- video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
- if (video_codec.minBitrate < kEncoderMinBitrateKbps)
- video_codec.minBitrate = kEncoderMinBitrateKbps;
- RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
- if (video_codec.codecType == kVideoCodecVP9) {
- // If the vector is empty, bitrates will be configured automatically.
- RTC_DCHECK(config.spatial_layers.empty() ||
- config.spatial_layers.size() ==
- video_codec.codecSpecific.VP9.numberOfSpatialLayers);
- RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
- kMaxSimulcastStreams);
- for (size_t i = 0; i < config.spatial_layers.size(); ++i)
- video_codec.spatialLayers[i] = config.spatial_layers[i];
+ return return_value;
+}
+
+void VideoSendStream::ConfigureProtection() {
+ // Enable NACK, FEC or both.
+ const bool enable_protection_nack = config_.rtp.nack.rtp_history_ms > 0;
+ bool enable_protection_fec = config_.rtp.fec.ulpfec_payload_type != -1;
+ // Payload types without picture ID cannot determine that a stream is complete
+ // without retransmitting FEC, so using FEC + NACK for H.264 (for instance) is
+ // a waste of bandwidth since FEC packets still have to be transmitted. Note
+ // that this is not the case with FLEXFEC.
+ if (enable_protection_nack &&
+ !PayloadTypeSupportsSkippingFecPackets(
+ config_.encoder_settings.payload_name)) {
+ LOG(LS_WARNING) << "Transmitting payload type without picture ID using"
+ "NACK+FEC is a waste of bandwidth since FEC packets "
+ "also have to be retransmitted. Disabling FEC.";
+ enable_protection_fec = false;
}
- for (size_t i = 0; i < streams.size(); ++i) {
- SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
- RTC_DCHECK_GT(streams[i].width, 0u);
- RTC_DCHECK_GT(streams[i].height, 0u);
- RTC_DCHECK_GT(streams[i].max_framerate, 0);
- // Different framerates not supported per stream at the moment.
- RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate);
- RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0);
- RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps);
- RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
- RTC_DCHECK_GE(streams[i].max_qp, 0);
- sim_stream->width = static_cast<uint16_t>(streams[i].width);
- sim_stream->height = static_cast<uint16_t>(streams[i].height);
- sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
- sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
- sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
- sim_stream->qpMax = streams[i].max_qp;
- sim_stream->numberOfTemporalLayers = static_cast<unsigned char>(
- streams[i].temporal_layer_thresholds_bps.size() + 1);
+ // Set to valid uint8_ts to be castable later without signed overflows.
+ uint8_t payload_type_red = 0;
+ uint8_t payload_type_fec = 0;
- video_codec.width = std::max(video_codec.width,
- static_cast<uint16_t>(streams[i].width));
- video_codec.height = std::max(
- video_codec.height, static_cast<uint16_t>(streams[i].height));
- video_codec.minBitrate =
- std::min(static_cast<uint16_t>(video_codec.minBitrate),
- static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
- video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
- video_codec.qpMax = std::max(video_codec.qpMax,
- static_cast<unsigned int>(streams[i].max_qp));
+ // TODO(changbin): Should set RTX for RED mapping in RTP sender in future.
+ // Validate payload types. If either RED or FEC payload types are set then
+ // both should be. If FEC is enabled then they both have to be set.
+ if (config_.rtp.fec.red_payload_type != -1) {
+ RTC_DCHECK_GE(config_.rtp.fec.red_payload_type, 0);
+ RTC_DCHECK_LE(config_.rtp.fec.red_payload_type, 127);
+ // TODO(holmer): We should only enable red if ulpfec is also enabled, but
+ // but due to an incompatibility issue with previous versions the receiver
+ // assumes rtx packets are containing red if it has been configured to
+ // receive red. Remove this in a few versions once the incompatibility
+ // issue is resolved (M53 timeframe).
+ payload_type_red = static_cast<uint8_t>(config_.rtp.fec.red_payload_type);
}
-
- if (video_codec.maxBitrate == 0) {
- // Unset max bitrate -> cap to one bit per pixel.
- video_codec.maxBitrate =
- (video_codec.width * video_codec.height * video_codec.maxFramerate) /
- 1000;
+ if (config_.rtp.fec.ulpfec_payload_type != -1) {
+ RTC_DCHECK_GE(config_.rtp.fec.ulpfec_payload_type, 0);
+ RTC_DCHECK_LE(config_.rtp.fec.ulpfec_payload_type, 127);
+ payload_type_fec =
+ static_cast<uint8_t>(config_.rtp.fec.ulpfec_payload_type);
}
- if (video_codec.maxBitrate < kEncoderMinBitrateKbps)
- video_codec.maxBitrate = kEncoderMinBitrateKbps;
-
- RTC_DCHECK_GT(streams[0].max_framerate, 0);
- video_codec.maxFramerate = streams[0].max_framerate;
-
- video_codec.startBitrate =
- bitrate_allocator_->AddObserver(this,
- video_codec.minBitrate * 1000,
- video_codec.maxBitrate * 1000) / 1000;
- vie_encoder_.SetEncoder(video_codec, config.min_transmit_bitrate_bps);
-}
-bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
- return vie_receiver_->DeliverRtcp(packet, length);
-}
-
-VideoSendStream::Stats VideoSendStream::GetStats() {
- return stats_proxy_.GetStats();
-}
-
-void VideoSendStream::OveruseDetected() {
- if (config_.overuse_callback)
- config_.overuse_callback->OnLoadUpdate(LoadObserver::kOveruse);
-}
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ // Set NACK.
+ rtp_rtcp->SetStorePacketsStatus(
+ enable_protection_nack || congestion_controller_->pacer(),
+ kMinSendSidePacketHistorySize);
+ // Set FEC.
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->SetGenericFECStatus(enable_protection_fec, payload_type_red,
+ payload_type_fec);
+ }
+ }
-void VideoSendStream::NormalUsage() {
- if (config_.overuse_callback)
- config_.overuse_callback->OnLoadUpdate(LoadObserver::kUnderuse);
+ vie_encoder_.SetProtectionMethod(enable_protection_nack,
+ enable_protection_fec);
}
void VideoSendStream::ConfigureSsrcs() {
@@ -560,7 +718,7 @@ void VideoSendStream::ConfigureSsrcs() {
// Restore RTP state if previous existed.
RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc);
if (it != suspended_ssrcs_.end())
- rtp_rtcp->SetRtpStateForSsrc(ssrc, it->second);
+ rtp_rtcp->SetRtpState(it->second);
}
// Set up RTX if available.
@@ -575,7 +733,7 @@ void VideoSendStream::ConfigureSsrcs() {
rtp_rtcp->SetRtxSsrc(ssrc);
RtpStateMap::iterator it = suspended_ssrcs_.find(ssrc);
if (it != suspended_ssrcs_.end())
- rtp_rtcp->SetRtpStateForSsrc(ssrc, it->second);
+ rtp_rtcp->SetRtxState(it->second);
}
// Configure RTX payload types.
@@ -598,29 +756,22 @@ std::map<uint32_t, RtpState> VideoSendStream::GetRtpStates() const {
std::map<uint32_t, RtpState> rtp_states;
for (size_t i = 0; i < config_.rtp.ssrcs.size(); ++i) {
uint32_t ssrc = config_.rtp.ssrcs[i];
- rtp_states[ssrc] = vie_channel_.GetRtpStateForSsrc(ssrc);
+ RTC_DCHECK_EQ(ssrc, rtp_rtcp_modules_[i]->SSRC());
+ rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtpState();
}
for (size_t i = 0; i < config_.rtp.rtx.ssrcs.size(); ++i) {
uint32_t ssrc = config_.rtp.rtx.ssrcs[i];
- rtp_states[ssrc] = vie_channel_.GetRtpStateForSsrc(ssrc);
+ rtp_states[ssrc] = rtp_rtcp_modules_[i]->GetRtxState();
}
return rtp_states;
}
void VideoSendStream::SignalNetworkState(NetworkState state) {
- // When network goes up, enable RTCP status before setting transmission state.
- // When it goes down, disable RTCP afterwards. This ensures that any packets
- // sent due to the network state changed will not be dropped.
- if (state == kNetworkUp) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetRTCPStatus(config_.rtp.rtcp_mode);
- }
- vie_encoder_.SetNetworkTransmissionState(state == kNetworkUp);
- if (state == kNetworkDown) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetRTCPStatus(RtcpMode::kOff);
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ rtp_rtcp->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode
+ : RtcpMode::kOff);
}
}
@@ -631,8 +782,32 @@ int VideoSendStream::GetPaddingNeededBps() const {
void VideoSendStream::OnBitrateUpdated(uint32_t bitrate_bps,
uint8_t fraction_loss,
int64_t rtt) {
+ payload_router_.SetTargetSendBitrate(bitrate_bps);
vie_encoder_.OnBitrateUpdated(bitrate_bps, fraction_loss, rtt);
}
+int VideoSendStream::ProtectionRequest(const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps) {
+ *sent_video_rate_bps = 0;
+ *sent_nack_rate_bps = 0;
+ *sent_fec_rate_bps = 0;
+ for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
+ uint32_t not_used = 0;
+ uint32_t module_video_rate = 0;
+ uint32_t module_fec_rate = 0;
+ uint32_t module_nack_rate = 0;
+ rtp_rtcp->SetFecParameters(delta_params, key_params);
+ rtp_rtcp->BitrateSent(&not_used, &module_video_rate, &module_fec_rate,
+ &module_nack_rate);
+ *sent_video_rate_bps += module_video_rate;
+ *sent_nack_rate_bps += module_nack_rate;
+ *sent_fec_rate_bps += module_fec_rate;
+ }
+ return 0;
+}
+
} // namespace internal
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_send_stream.h b/chromium/third_party/webrtc/video/video_send_stream.h
index 8b3d064f3e3..fa6a7a7c16d 100644
--- a/chromium/third_party/webrtc/video/video_send_stream.h
+++ b/chromium/third_party/webrtc/video/video_send_stream.h
@@ -12,17 +12,19 @@
#define WEBRTC_VIDEO_VIDEO_SEND_STREAM_H_
#include <map>
+#include <memory>
#include <vector>
#include "webrtc/call/bitrate_allocator.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/call.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"
#include "webrtc/video/encoder_state_feedback.h"
#include "webrtc/video/payload_router.h"
+#include "webrtc/video/send_delay_stats.h"
#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video/video_capture_input.h"
-#include "webrtc/video/vie_channel.h"
#include "webrtc/video/vie_encoder.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
@@ -32,23 +34,30 @@ namespace webrtc {
class BitrateAllocator;
class CallStats;
class CongestionController;
+class IvfFileWriter;
class ProcessThread;
class RtpRtcp;
-class ViEChannel;
class ViEEncoder;
class VieRemb;
+namespace vcm {
+class VideoSender;
+} // namespace vcm
+
namespace internal {
class VideoSendStream : public webrtc::VideoSendStream,
public webrtc::CpuOveruseObserver,
- public webrtc::BitrateAllocatorObserver {
+ public webrtc::BitrateAllocatorObserver,
+ public webrtc::VCMProtectionCallback,
+ protected webrtc::EncodedImageCallback {
public:
VideoSendStream(int num_cpu_cores,
ProcessThread* module_process_thread,
CallStats* call_stats,
CongestionController* congestion_controller,
BitrateAllocator* bitrate_allocator,
+ SendDelayStats* send_delay_stats,
VieRemb* remb,
const VideoSendStream::Config& config,
const VideoEncoderConfig& encoder_config,
@@ -56,13 +65,12 @@ class VideoSendStream : public webrtc::VideoSendStream,
~VideoSendStream() override;
- // webrtc::SendStream implementation.
- void Start() override;
- void Stop() override;
- void SignalNetworkState(NetworkState state) override;
- bool DeliverRtcp(const uint8_t* packet, size_t length) override;
+ void SignalNetworkState(NetworkState state);
+ bool DeliverRtcp(const uint8_t* packet, size_t length);
// webrtc::VideoSendStream implementation.
+ void Start() override;
+ void Stop() override;
VideoCaptureInput* Input() override;
void ReconfigureVideoEncoder(const VideoEncoderConfig& config) override;
Stats GetStats() override;
@@ -81,10 +89,31 @@ class VideoSendStream : public webrtc::VideoSendStream,
uint8_t fraction_loss,
int64_t rtt) override;
+ // Implements webrtc::VCMProtectionCallback.
+ int ProtectionRequest(const FecProtectionParams* delta_params,
+ const FecProtectionParams* key_params,
+ uint32_t* sent_video_rate_bps,
+ uint32_t* sent_nack_rate_bps,
+ uint32_t* sent_fec_rate_bps) override;
+
private:
+ struct EncoderSettings {
+ VideoCodec video_codec;
+ int min_transmit_bitrate_bps;
+ std::vector<VideoStream> streams;
+ };
+
+ // Implements EncodedImageCallback. The implementation routes encoded frames
+ // to the |payload_router_| and |config.pre_encode_callback| if set.
+ // Called on an arbitrary encoder callback thread.
+ int32_t Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) override;
+
static bool EncoderThreadFunction(void* obj);
void EncoderProcess();
+ void ConfigureProtection();
void ConfigureSsrcs();
SendStatisticsProxy stats_proxy_;
@@ -98,21 +127,26 @@ class VideoSendStream : public webrtc::VideoSendStream,
BitrateAllocator* const bitrate_allocator_;
VieRemb* const remb_;
+ static const bool kEnableFrameRecording = false;
+ static const int kMaxLayers = 3;
+ std::unique_ptr<IvfFileWriter> file_writers_[kMaxLayers];
+
rtc::PlatformThread encoder_thread_;
rtc::Event encoder_wakeup_event_;
volatile int stop_encoder_thread_;
+ rtc::CriticalSection encoder_settings_crit_;
+ rtc::Optional<EncoderSettings> pending_encoder_settings_
+ GUARDED_BY(encoder_settings_crit_);
OveruseFrameDetector overuse_detector_;
- PayloadRouter payload_router_;
- EncoderStateFeedback encoder_feedback_;
- ViEChannel vie_channel_;
- ViEReceiver* const vie_receiver_;
ViEEncoder vie_encoder_;
- VideoCodingModule* const vcm_;
- // TODO(pbos): Move RtpRtcp ownership to VideoSendStream.
- // RtpRtcp modules, currently owned by ViEChannel but ownership should
- // eventually move here.
+ EncoderStateFeedback encoder_feedback_;
+ vcm::VideoSender* const video_sender_;
+
+ const std::unique_ptr<RtcpBandwidthObserver> bandwidth_observer_;
+ // RtpRtcp modules, declared here as they use other members on construction.
const std::vector<RtpRtcp*> rtp_rtcp_modules_;
+ PayloadRouter payload_router_;
VideoCaptureInput input_;
};
} // namespace internal
diff --git a/chromium/third_party/webrtc/video/video_send_stream_tests.cc b/chromium/third_party/webrtc/video/video_send_stream_tests.cc
index 53f72de5025..1d4acfcb04f 100644
--- a/chromium/third_party/webrtc/video/video_send_stream_tests.cc
+++ b/chromium/third_party/webrtc/video/video_send_stream_tests.cc
@@ -21,7 +21,7 @@
#include "webrtc/base/platform_thread.h"
#include "webrtc/call.h"
#include "webrtc/call/transport_adapter.h"
-#include "webrtc/frame_callback.h"
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_sender.h"
@@ -32,6 +32,7 @@
#include "webrtc/test/call_test.h"
#include "webrtc/test/configurable_frame_size_encoder.h"
#include "webrtc/test/fake_texture_frame.h"
+#include "webrtc/test/frame_utils.h"
#include "webrtc/test/null_transport.h"
#include "webrtc/test/testsupport/perf_test.h"
#include "webrtc/video/send_statistics_proxy.h"
@@ -42,11 +43,6 @@ namespace webrtc {
enum VideoFormat { kGeneric, kVP8, };
-void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
-void ExpectEqualTextureFrames(const VideoFrame& frame1,
- const VideoFrame& frame2);
-void ExpectEqualBufferFrames(const VideoFrame& frame1,
- const VideoFrame& frame2);
void ExpectEqualFramesVector(const std::vector<VideoFrame>& frames1,
const std::vector<VideoFrame>& frames2);
VideoFrame CreateVideoFrame(int width, int height, uint8_t data);
@@ -312,11 +308,13 @@ class FecObserver : public test::EndToEndTest {
FecObserver(bool header_extensions_enabled,
bool use_nack,
bool expect_red,
+ bool expect_fec,
const std::string& codec)
: EndToEndTest(VideoSendStreamTest::kDefaultTimeoutMs),
payload_name_(codec),
use_nack_(use_nack),
expect_red_(expect_red),
+ expect_fec_(expect_fec),
send_count_(0),
received_media_(false),
received_fec_(false),
@@ -379,6 +377,7 @@ class FecObserver : public test::EndToEndTest {
if (encapsulated_payload_type != -1) {
if (encapsulated_payload_type ==
VideoSendStreamTest::kUlpfecPayloadType) {
+ EXPECT_TRUE(expect_fec_);
received_fec_ = true;
} else {
received_media_ = true;
@@ -386,7 +385,7 @@ class FecObserver : public test::EndToEndTest {
}
if (send_count_ > 100 && received_media_) {
- if (received_fec_ || !expect_red_)
+ if (received_fec_ || !expect_fec_)
observation_complete_.Set();
}
@@ -446,6 +445,7 @@ class FecObserver : public test::EndToEndTest {
const std::string payload_name_;
const bool use_nack_;
const bool expect_red_;
+ const bool expect_fec_;
int send_count_;
bool received_media_;
bool received_fec_;
@@ -454,12 +454,12 @@ class FecObserver : public test::EndToEndTest {
};
TEST_F(VideoSendStreamTest, SupportsFecWithExtensions) {
- FecObserver test(true, false, true, "VP8");
+ FecObserver test(true, false, true, true, "VP8");
RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, SupportsFecWithoutExtensions) {
- FecObserver test(false, false, true, "VP8");
+ FecObserver test(false, false, true, true, "VP8");
RunBaseTest(&test);
}
@@ -467,26 +467,28 @@ TEST_F(VideoSendStreamTest, SupportsFecWithoutExtensions) {
// since we'll still have to re-request FEC packets, effectively wasting
// bandwidth since the receiver has to wait for FEC retransmissions to determine
// that the received state is actually decodable.
-TEST_F(VideoSendStreamTest, DoesNotUtilizeRedForH264WithNackEnabled) {
- FecObserver test(false, true, false, "H264");
+TEST_F(VideoSendStreamTest, DoesNotUtilizeFecForH264WithNackEnabled) {
+ FecObserver test(false, true, true, false, "H264");
RunBaseTest(&test);
}
// Without retransmissions FEC for H264 is fine.
TEST_F(VideoSendStreamTest, DoesUtilizeRedForH264WithoutNackEnabled) {
- FecObserver test(false, false, true, "H264");
+ FecObserver test(false, false, true, true, "H264");
RunBaseTest(&test);
}
TEST_F(VideoSendStreamTest, DoesUtilizeRedForVp8WithNackEnabled) {
- FecObserver test(false, true, true, "VP8");
+ FecObserver test(false, true, true, true, "VP8");
RunBaseTest(&test);
}
+#if !defined(RTC_DISABLE_VP9)
TEST_F(VideoSendStreamTest, DoesUtilizeRedForVp9WithNackEnabled) {
- FecObserver test(false, true, true, "VP9");
+ FecObserver test(false, true, true, true, "VP9");
RunBaseTest(&test);
}
+#endif // !defined(RTC_DISABLE_VP9)
void VideoSendStreamTest::TestNackRetransmission(
uint32_t retransmit_ssrc,
@@ -712,7 +714,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format,
}
}
- virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame) {
+ void EncodedFrameCallback(const EncodedFrame& encoded_frame) override {
// Increase frame size for next encoded frame, in the context of the
// encoder thread.
if (!use_fec_ &&
@@ -811,7 +813,8 @@ TEST_F(VideoSendStreamTest, FragmentsVp8AccordingToMaxPacketSizeWithFec) {
TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
static const int kSuspendTimeFrames = 60; // Suspend for 2 seconds @ 30 fps.
- class RembObserver : public test::SendTest, public I420FrameCallback {
+ class RembObserver : public test::SendTest,
+ public rtc::VideoSinkInterface<VideoFrame> {
public:
RembObserver()
: SendTest(kDefaultTimeoutMs),
@@ -862,8 +865,8 @@ TEST_F(VideoSendStreamTest, SuspendBelowMinBitrate) {
return SEND_PACKET;
}
- // This method implements the I420FrameCallback.
- void FrameCallback(VideoFrame* video_frame) override {
+ // This method implements the rtc::VideoSinkInterface
+ void OnFrame(const VideoFrame& video_frame) override {
rtc::CritScope lock(&crit_);
if (test_state_ == kDuringSuspend &&
++suspended_frame_count_ > kSuspendTimeFrames) {
@@ -1002,8 +1005,8 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) {
size_t GetNumVideoStreams() const override { return 3; }
- virtual void OnFrameGeneratorCapturerCreated(
- test::FrameGeneratorCapturer* frame_generator_capturer) {
+ void OnFrameGeneratorCapturerCreated(
+ test::FrameGeneratorCapturer* frame_generator_capturer) override {
rtc::CritScope lock(&crit_);
capturer_ = frame_generator_capturer;
}
@@ -1043,7 +1046,7 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) {
}
private:
- virtual Action OnSendRtp(const uint8_t* packet, size_t length) {
+ Action OnSendRtp(const uint8_t* packet, size_t length) override {
if (RtpHeaderParser::IsRtcp(packet, length))
return DROP_PACKET;
@@ -1115,18 +1118,22 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
class StartBitrateObserver : public test::FakeEncoder {
public:
StartBitrateObserver()
- : FakeEncoder(Clock::GetRealTimeClock()), start_bitrate_kbps_(0) {}
+ : FakeEncoder(Clock::GetRealTimeClock()),
+ start_bitrate_changed_(false, false),
+ start_bitrate_kbps_(0) {}
int32_t InitEncode(const VideoCodec* config,
int32_t number_of_cores,
size_t max_payload_size) override {
rtc::CritScope lock(&crit_);
start_bitrate_kbps_ = config->startBitrate;
+ start_bitrate_changed_.Set();
return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
}
int32_t SetRates(uint32_t new_target_bitrate, uint32_t framerate) override {
rtc::CritScope lock(&crit_);
start_bitrate_kbps_ = new_target_bitrate;
+ start_bitrate_changed_.Set();
return FakeEncoder::SetRates(new_target_bitrate, framerate);
}
@@ -1135,8 +1142,14 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
return start_bitrate_kbps_;
}
+ bool WaitForStartBitrate() {
+ return start_bitrate_changed_.Wait(
+ VideoSendStreamTest::kDefaultTimeoutMs);
+ }
+
private:
rtc::CriticalSection crit_;
+ rtc::Event start_bitrate_changed_;
int start_bitrate_kbps_ GUARDED_BY(crit_);
};
@@ -1155,6 +1168,7 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
CreateVideoStreams();
+ EXPECT_TRUE(encoder.WaitForStartBitrate());
EXPECT_EQ(video_encoder_config_.streams[0].max_bitrate_bps / 1000,
encoder.GetStartBitrateKbps());
@@ -1165,6 +1179,7 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
// New bitrate should be reconfigured above the previous max. As there's no
// network connection this shouldn't be flaky, as no bitrate should've been
// reported in between.
+ EXPECT_TRUE(encoder.WaitForStartBitrate());
EXPECT_EQ(bitrate_config.start_bitrate_bps / 1000,
encoder.GetStartBitrateKbps());
@@ -1172,12 +1187,12 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) {
}
TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
- class FrameObserver : public I420FrameCallback {
+ class FrameObserver : public rtc::VideoSinkInterface<VideoFrame> {
public:
FrameObserver() : output_frame_event_(false, false) {}
- void FrameCallback(VideoFrame* video_frame) override {
- output_frames_.push_back(*video_frame);
+ void OnFrame(const VideoFrame& video_frame) override {
+ output_frames_.push_back(video_frame);
output_frame_event_.Set();
}
@@ -1244,49 +1259,13 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
DestroyStreams();
}
-void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
- if (frame1.native_handle() || frame2.native_handle())
- ExpectEqualTextureFrames(frame1, frame2);
- else
- ExpectEqualBufferFrames(frame1, frame2);
-}
-
-void ExpectEqualTextureFrames(const VideoFrame& frame1,
- const VideoFrame& frame2) {
- EXPECT_EQ(frame1.native_handle(), frame2.native_handle());
- EXPECT_EQ(frame1.width(), frame2.width());
- EXPECT_EQ(frame1.height(), frame2.height());
-}
-
-void ExpectEqualBufferFrames(const VideoFrame& frame1,
- const VideoFrame& frame2) {
- EXPECT_EQ(frame1.width(), frame2.width());
- EXPECT_EQ(frame1.height(), frame2.height());
- EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane));
- EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane));
- EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane));
- ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane));
- EXPECT_EQ(0,
- memcmp(frame1.buffer(kYPlane),
- frame2.buffer(kYPlane),
- frame1.allocated_size(kYPlane)));
- ASSERT_EQ(frame1.allocated_size(kUPlane), frame2.allocated_size(kUPlane));
- EXPECT_EQ(0,
- memcmp(frame1.buffer(kUPlane),
- frame2.buffer(kUPlane),
- frame1.allocated_size(kUPlane)));
- ASSERT_EQ(frame1.allocated_size(kVPlane), frame2.allocated_size(kVPlane));
- EXPECT_EQ(0,
- memcmp(frame1.buffer(kVPlane),
- frame2.buffer(kVPlane),
- frame1.allocated_size(kVPlane)));
-}
-
void ExpectEqualFramesVector(const std::vector<VideoFrame>& frames1,
const std::vector<VideoFrame>& frames2) {
EXPECT_EQ(frames1.size(), frames2.size());
for (size_t i = 0; i < std::min(frames1.size(), frames2.size()); ++i)
- ExpectEqualFrames(frames1[i], frames2[i]);
+ // Compare frame buffers, since we don't care about differing timestamps.
+ EXPECT_TRUE(test::FrameBufsEqual(frames1[i].video_frame_buffer(),
+ frames2[i].video_frame_buffer()));
}
VideoFrame CreateVideoFrame(int width, int height, uint8_t data) {
@@ -1378,9 +1357,6 @@ TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) {
void OnVideoStreamsCreated(
VideoSendStream* send_stream,
const std::vector<VideoReceiveStream*>& receive_streams) override {
- // Encoder initialization should be done in stream construction before
- // starting.
- EXPECT_TRUE(IsReadyForEncode());
stream_ = send_stream;
}
@@ -1428,6 +1404,7 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
VideoCodecConfigObserver()
: SendTest(kDefaultTimeoutMs),
FakeEncoder(Clock::GetRealTimeClock()),
+ init_encode_event_(false, false),
num_initializations_(0) {}
private:
@@ -1456,19 +1433,23 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesCommonEncoderConfigValues) {
EXPECT_EQ(kScreensharing, config->mode);
}
++num_initializations_;
+ init_encode_event_.Set();
return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
}
void PerformTest() override {
+ EXPECT_TRUE(init_encode_event_.Wait(kDefaultTimeoutMs));
EXPECT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
stream_->ReconfigureVideoEncoder(encoder_config_);
+ EXPECT_TRUE(init_encode_event_.Wait(kDefaultTimeoutMs));
EXPECT_EQ(2u, num_initializations_)
<< "ReconfigureVideoEncoder did not reinitialize the encoder with "
"new encoder settings.";
}
+ rtc::Event init_encode_event_;
size_t num_initializations_;
VideoSendStream* stream_;
VideoEncoderConfig encoder_config_;
@@ -1488,6 +1469,7 @@ class VideoCodecConfigObserver : public test::SendTest,
FakeEncoder(Clock::GetRealTimeClock()),
video_codec_type_(video_codec_type),
codec_name_(codec_name),
+ init_encode_event_(false, false),
num_initializations_(0) {
memset(&encoder_settings_, 0, sizeof(encoder_settings_));
}
@@ -1521,16 +1503,21 @@ class VideoCodecConfigObserver : public test::SendTest,
EXPECT_EQ(video_codec_type_, config->codecType);
VerifyCodecSpecifics(*config);
++num_initializations_;
+ init_encode_event_.Set();
return FakeEncoder::InitEncode(config, number_of_cores, max_payload_size);
}
void VerifyCodecSpecifics(const VideoCodec& config) const;
void PerformTest() override {
- EXPECT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
+ EXPECT_TRUE(
+ init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
+ ASSERT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
encoder_settings_.frameDroppingOn = true;
stream_->ReconfigureVideoEncoder(encoder_config_);
+ ASSERT_TRUE(
+ init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(2u, num_initializations_)
<< "ReconfigureVideoEncoder did not reinitialize the encoder with "
"new encoder settings.";
@@ -1546,6 +1533,7 @@ class VideoCodecConfigObserver : public test::SendTest,
T encoder_settings_;
const VideoCodecType video_codec_type_;
const char* const codec_name_;
+ rtc::Event init_encode_event_;
size_t num_initializations_;
VideoSendStream* stream_;
VideoEncoderConfig encoder_config_;
@@ -1708,18 +1696,7 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) {
RunBaseTest(&test);
}
-// Disabled on LinuxAsan:
-// https://bugs.chromium.org/p/webrtc/issues/detail?id=5382
-#if defined(ADDRESS_SANITIZER) && defined(WEBRTC_LINUX)
-#define MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly \
- DISABLED_ReconfigureBitratesSetsEncoderBitratesCorrectly
-#else
-#define MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly \
- ReconfigureBitratesSetsEncoderBitratesCorrectly
-#endif
-
-TEST_F(VideoSendStreamTest,
- MAYBE_ReconfigureBitratesSetsEncoderBitratesCorrectly) {
+TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) {
// These are chosen to be "kind of odd" to not be accidentally checked against
// default values.
static const int kMinBitrateKbps = 137;
@@ -1734,6 +1711,7 @@ TEST_F(VideoSendStreamTest,
EncoderBitrateThresholdObserver()
: SendTest(kDefaultTimeoutMs),
FakeEncoder(Clock::GetRealTimeClock()),
+ init_encode_event_(false, false),
num_initializations_(0) {}
private:
@@ -1762,6 +1740,7 @@ TEST_F(VideoSendStreamTest,
codecSettings->startBitrate);
}
++num_initializations_;
+ init_encode_event_.Set();
return FakeEncoder::InitEncode(codecSettings, numberOfCores,
maxPayloadSize);
}
@@ -1797,6 +1776,9 @@ TEST_F(VideoSendStreamTest,
}
void PerformTest() override {
+ ASSERT_TRUE(
+ init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs))
+ << "Timed out while waiting encoder to be configured.";
Call::Config::BitrateConfig bitrate_config;
bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000;
bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
@@ -1806,6 +1788,8 @@ TEST_F(VideoSendStreamTest,
encoder_config_.streams[0].min_bitrate_bps = 0;
encoder_config_.streams[0].max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
send_stream_->ReconfigureVideoEncoder(encoder_config_);
+ ASSERT_TRUE(
+ init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(2, num_initializations_)
<< "Encoder should have been reconfigured with the new value.";
encoder_config_.streams[0].target_bitrate_bps =
@@ -1813,10 +1797,13 @@ TEST_F(VideoSendStreamTest,
encoder_config_.streams[0].max_bitrate_bps =
kIncreasedMaxBitrateKbps * 1000;
send_stream_->ReconfigureVideoEncoder(encoder_config_);
+ ASSERT_TRUE(
+ init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
EXPECT_EQ(3, num_initializations_)
<< "Encoder should have been reconfigured with the new value.";
}
+ rtc::Event init_encode_event_;
int num_initializations_;
webrtc::Call* call_;
webrtc::VideoSendStream* send_stream_;
@@ -1846,7 +1833,6 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
const CodecSpecificInfo* codecSpecificInfo,
const std::vector<FrameType>* frame_types) override {
CodecSpecificInfo specifics;
- memset(&specifics, 0, sizeof(specifics));
specifics.codecType = kVideoCodecGeneric;
uint8_t buffer[16] = {0};
@@ -1906,6 +1892,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) {
RunBaseTest(&test);
}
+#if !defined(RTC_DISABLE_VP9)
class Vp9HeaderObserver : public test::SendTest {
public:
Vp9HeaderObserver()
@@ -2289,9 +2276,6 @@ TEST_F(VideoSendStreamTest, Vp9NonFlexModeSmallResolution) {
RunBaseTest(&test);
}
-#if !defined(MEMORY_SANITIZER)
-// Fails under MemorySanitizer:
-// See https://code.google.com/p/webrtc/issues/detail?id=5402.
TEST_F(VideoSendStreamTest, Vp9FlexModeRefCount) {
class FlexibleMode : public Vp9HeaderObserver {
void ModifyVideoConfigsHook(
@@ -2316,6 +2300,6 @@ TEST_F(VideoSendStreamTest, Vp9FlexModeRefCount) {
RunBaseTest(&test);
}
-#endif
+#endif // !defined(RTC_DISABLE_VP9)
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_stream_decoder.cc b/chromium/third_party/webrtc/video/video_stream_decoder.cc
new file mode 100644
index 00000000000..5aab7b246c7
--- /dev/null
+++ b/chromium/third_party/webrtc/video/video_stream_decoder.cc
@@ -0,0 +1,139 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/video/video_stream_decoder.h"
+
+#include <algorithm>
+#include <map>
+#include <vector>
+
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/common_video/include/frame_callback.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
+#include "webrtc/modules/video_processing/include/video_processing.h"
+#include "webrtc/system_wrappers/include/metrics.h"
+#include "webrtc/video/call_stats.h"
+#include "webrtc/video/payload_router.h"
+#include "webrtc/video/receive_statistics_proxy.h"
+
+namespace webrtc {
+
+VideoStreamDecoder::VideoStreamDecoder(
+ vcm::VideoReceiver* video_receiver,
+ VCMFrameTypeCallback* vcm_frame_type_callback,
+ VCMPacketRequestCallback* vcm_packet_request_callback,
+ bool enable_nack,
+ bool enable_fec, // TODO(philipel): Actually use this.
+ ReceiveStatisticsProxy* receive_statistics_proxy,
+ IncomingVideoStream* incoming_video_stream,
+ I420FrameCallback* pre_render_callback)
+ : video_receiver_(video_receiver),
+ receive_stats_callback_(receive_statistics_proxy),
+ incoming_video_stream_(incoming_video_stream),
+ pre_render_callback_(pre_render_callback),
+ last_rtt_ms_(0) {
+ RTC_DCHECK(video_receiver_);
+
+ static const int kMaxPacketAgeToNack = 450;
+ static const int kMaxNackListSize = 250;
+ video_receiver_->SetNackSettings(kMaxNackListSize,
+ kMaxPacketAgeToNack, 0);
+ video_receiver_->RegisterReceiveCallback(this);
+ video_receiver_->RegisterFrameTypeCallback(vcm_frame_type_callback);
+ video_receiver_->RegisterReceiveStatisticsCallback(this);
+ video_receiver_->RegisterDecoderTimingCallback(this);
+ static const int kDefaultRenderDelayMs = 10;
+ video_receiver_->SetRenderDelay(kDefaultRenderDelayMs);
+
+ VCMVideoProtection video_protection = kProtectionNone;
+ if (enable_nack) {
+ if (enable_fec)
+ video_protection = kProtectionNackFEC;
+ else
+ video_protection = kProtectionNack;
+ }
+
+ VCMDecodeErrorMode decode_error_mode = enable_nack ? kNoErrors : kWithErrors;
+ video_receiver_->SetVideoProtection(video_protection, true);
+ video_receiver_->SetDecodeErrorMode(decode_error_mode);
+ VCMPacketRequestCallback* packet_request_callback =
+ enable_nack ? vcm_packet_request_callback : nullptr;
+ video_receiver_->RegisterPacketRequestCallback(packet_request_callback);
+}
+
+VideoStreamDecoder::~VideoStreamDecoder() {}
+
+// Do not acquire the lock of |video_receiver_| in this function. Decode
+// callback won't necessarily be called from the decoding thread. The decoding
+// thread may have held the lock when calling VideoDecoder::Decode, Reset, or
+// Release. Acquiring the same lock in the path of decode callback can deadlock.
+int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame) { // NOLINT
+ if (pre_render_callback_)
+ pre_render_callback_->FrameCallback(&video_frame);
+
+ incoming_video_stream_->OnFrame(video_frame);
+ return 0;
+}
+
+int32_t VideoStreamDecoder::ReceivedDecodedReferenceFrame(
+ const uint64_t picture_id) {
+ RTC_NOTREACHED();
+ return 0;
+}
+
+void VideoStreamDecoder::OnIncomingPayloadType(int payload_type) {
+ receive_stats_callback_->OnIncomingPayloadType(payload_type);
+}
+
+void VideoStreamDecoder::OnDecoderImplementationName(
+ const char* implementation_name) {
+ receive_stats_callback_->OnDecoderImplementationName(implementation_name);
+}
+
+void VideoStreamDecoder::OnReceiveRatesUpdated(uint32_t bit_rate,
+ uint32_t frame_rate) {
+ receive_stats_callback_->OnIncomingRate(frame_rate, bit_rate);
+}
+
+void VideoStreamDecoder::OnDiscardedPacketsUpdated(int discarded_packets) {
+ receive_stats_callback_->OnDiscardedPacketsUpdated(discarded_packets);
+}
+
+void VideoStreamDecoder::OnFrameCountsUpdated(const FrameCounts& frame_counts) {
+ receive_stats_callback_->OnFrameCountsUpdated(frame_counts);
+}
+
+void VideoStreamDecoder::OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) {
+ int last_rtt = -1;
+ {
+ rtc::CritScope lock(&crit_);
+ last_rtt = last_rtt_ms_;
+ }
+
+ receive_stats_callback_->OnDecoderTiming(
+ decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
+ jitter_buffer_ms, min_playout_delay_ms, render_delay_ms, last_rtt);
+}
+
+void VideoStreamDecoder::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
+ video_receiver_->SetReceiveChannelParameters(max_rtt_ms);
+
+ rtc::CritScope lock(&crit_);
+ last_rtt_ms_ = avg_rtt_ms;
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/video_stream_decoder.h b/chromium/third_party/webrtc/video/video_stream_decoder.h
new file mode 100644
index 00000000000..24a0ea3449b
--- /dev/null
+++ b/chromium/third_party/webrtc/video/video_stream_decoder.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_VIDEO_STREAM_DECODER_H_
+#define WEBRTC_VIDEO_VIDEO_STREAM_DECODER_H_
+
+#include <list>
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/typedefs.h"
+#include "webrtc/video/vie_sync_module.h"
+
+namespace webrtc {
+
+class CallStatsObserver;
+class ChannelStatsObserver;
+class Config;
+class EncodedImageCallback;
+class I420FrameCallback;
+class IncomingVideoStream;
+class ReceiveStatisticsProxy;
+class VideoRenderCallback;
+class VoEVideoSync;
+
+namespace vcm {
+class VideoReceiver;
+} // namespace vcm
+
+enum StreamType {
+ kViEStreamTypeNormal = 0, // Normal media stream
+ kViEStreamTypeRtx = 1 // Retransmission media stream
+};
+
+class VideoStreamDecoder : public VCMReceiveCallback,
+ public VCMReceiveStatisticsCallback,
+ public VCMDecoderTimingCallback,
+ public CallStatsObserver {
+ public:
+ friend class ChannelStatsObserver;
+
+ VideoStreamDecoder(vcm::VideoReceiver* video_receiver,
+ VCMFrameTypeCallback* vcm_frame_type_callback,
+ VCMPacketRequestCallback* vcm_packet_request_callback,
+ bool enable_nack,
+ bool enable_fec,
+ ReceiveStatisticsProxy* receive_statistics_proxy,
+ IncomingVideoStream* incoming_video_stream,
+ I420FrameCallback* pre_render_callback);
+ ~VideoStreamDecoder();
+
+ // Implements VCMReceiveCallback.
+ int32_t FrameToRender(VideoFrame& video_frame) override; // NOLINT
+ int32_t ReceivedDecodedReferenceFrame(const uint64_t picture_id) override;
+ void OnIncomingPayloadType(int payload_type) override;
+ void OnDecoderImplementationName(const char* implementation_name) override;
+
+ // Implements VCMReceiveStatisticsCallback.
+ void OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) override;
+ void OnDiscardedPacketsUpdated(int discarded_packets) override;
+ void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
+
+ // Implements VCMDecoderTimingCallback.
+ void OnDecoderTiming(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) override;
+
+ void RegisterReceiveStatisticsProxy(
+ ReceiveStatisticsProxy* receive_statistics_proxy);
+
+ // Implements StatsObserver.
+ void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;
+
+ private:
+ // Assumed to be protected.
+ void StartDecodeThread();
+ void StopDecodeThread();
+
+ // Used for all registered callbacks except rendering.
+ rtc::CriticalSection crit_;
+
+ vcm::VideoReceiver* const video_receiver_;
+
+ ReceiveStatisticsProxy* const receive_stats_callback_;
+ IncomingVideoStream* const incoming_video_stream_;
+
+ I420FrameCallback* const pre_render_callback_;
+
+ int64_t last_rtt_ms_ GUARDED_BY(crit_);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VIDEO_VIDEO_STREAM_DECODER_H_
diff --git a/chromium/third_party/webrtc/video/vie_channel.cc b/chromium/third_party/webrtc/video/vie_channel.cc
deleted file mode 100644
index c0676bc83b1..00000000000
--- a/chromium/third_party/webrtc/video/vie_channel.cc
+++ /dev/null
@@ -1,515 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video/vie_channel.h"
-
-#include <algorithm>
-#include <map>
-#include <vector>
-
-#include "webrtc/base/checks.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/common_video/include/incoming_video_stream.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/frame_callback.h"
-#include "webrtc/modules/pacing/paced_sender.h"
-#include "webrtc/modules/pacing/packet_router.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
-#include "webrtc/modules/utility/include/process_thread.h"
-#include "webrtc/modules/video_coding/include/video_coding.h"
-#include "webrtc/modules/video_processing/include/video_processing.h"
-#include "webrtc/modules/video_render/video_render_defines.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/video/call_stats.h"
-#include "webrtc/video/payload_router.h"
-#include "webrtc/video/receive_statistics_proxy.h"
-
-namespace webrtc {
-
-static const int kMinSendSidePacketHistorySize = 600;
-static const int kMaxPacketAgeToNack = 450;
-static const int kMaxNackListSize = 250;
-
-// Helper class receiving statistics callbacks.
-class ChannelStatsObserver : public CallStatsObserver {
- public:
- explicit ChannelStatsObserver(ViEChannel* owner) : owner_(owner) {}
- virtual ~ChannelStatsObserver() {}
-
- // Implements StatsObserver.
- virtual void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
- owner_->OnRttUpdate(avg_rtt_ms, max_rtt_ms);
- }
-
- private:
- ViEChannel* const owner_;
-};
-
-class ViEChannelProtectionCallback : public VCMProtectionCallback {
- public:
- explicit ViEChannelProtectionCallback(ViEChannel* owner) : owner_(owner) {}
- ~ViEChannelProtectionCallback() {}
-
-
- int ProtectionRequest(
- const FecProtectionParams* delta_fec_params,
- const FecProtectionParams* key_fec_params,
- uint32_t* sent_video_rate_bps,
- uint32_t* sent_nack_rate_bps,
- uint32_t* sent_fec_rate_bps) override {
- return owner_->ProtectionRequest(delta_fec_params, key_fec_params,
- sent_video_rate_bps, sent_nack_rate_bps,
- sent_fec_rate_bps);
- }
- private:
- ViEChannel* owner_;
-};
-
-ViEChannel::ViEChannel(Transport* transport,
- ProcessThread* module_process_thread,
- PayloadRouter* send_payload_router,
- VideoCodingModule* vcm,
- RtcpIntraFrameObserver* intra_frame_observer,
- RtcpBandwidthObserver* bandwidth_observer,
- TransportFeedbackObserver* transport_feedback_observer,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtcpRttStats* rtt_stats,
- PacedSender* paced_sender,
- PacketRouter* packet_router,
- size_t max_rtp_streams,
- bool sender)
- : sender_(sender),
- module_process_thread_(module_process_thread),
- send_payload_router_(send_payload_router),
- vcm_protection_callback_(new ViEChannelProtectionCallback(this)),
- vcm_(vcm),
- vie_receiver_(vcm_, remote_bitrate_estimator, this),
- stats_observer_(new ChannelStatsObserver(this)),
- receive_stats_callback_(nullptr),
- incoming_video_stream_(nullptr),
- intra_frame_observer_(intra_frame_observer),
- rtt_stats_(rtt_stats),
- paced_sender_(paced_sender),
- packet_router_(packet_router),
- bandwidth_observer_(bandwidth_observer),
- transport_feedback_observer_(transport_feedback_observer),
- max_nack_reordering_threshold_(kMaxPacketAgeToNack),
- pre_render_callback_(nullptr),
- last_rtt_ms_(0),
- rtp_rtcp_modules_(
- CreateRtpRtcpModules(!sender,
- vie_receiver_.GetReceiveStatistics(),
- transport,
- intra_frame_observer_,
- bandwidth_observer_.get(),
- transport_feedback_observer_,
- rtt_stats_,
- &rtcp_packet_type_counter_observer_,
- remote_bitrate_estimator,
- paced_sender_,
- packet_router_,
- &send_bitrate_observer_,
- &send_frame_count_observer_,
- &send_side_delay_observer_,
- max_rtp_streams)) {
- vie_receiver_.Init(rtp_rtcp_modules_);
- if (sender_) {
- RTC_DCHECK(send_payload_router_);
- RTC_DCHECK(!vcm_);
- } else {
- RTC_DCHECK(!send_payload_router_);
- RTC_DCHECK(vcm_);
- vcm_->SetNackSettings(kMaxNackListSize, max_nack_reordering_threshold_, 0);
- }
-}
-
-int32_t ViEChannel::Init() {
- static const int kDefaultRenderDelayMs = 10;
- module_process_thread_->RegisterModule(vie_receiver_.GetReceiveStatistics());
-
- // RTP/RTCP initialization.
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- module_process_thread_->RegisterModule(rtp_rtcp);
- packet_router_->AddRtpModule(rtp_rtcp);
- }
-
- rtp_rtcp_modules_[0]->SetKeyFrameRequestMethod(kKeyFrameReqPliRtcp);
- if (paced_sender_) {
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetStorePacketsStatus(true, kMinSendSidePacketHistorySize);
- }
- if (sender_) {
- send_payload_router_->SetSendingRtpModules(1);
- RTC_DCHECK(!send_payload_router_->active());
- } else {
- if (vcm_->RegisterReceiveCallback(this) != 0) {
- return -1;
- }
- vcm_->RegisterFrameTypeCallback(this);
- vcm_->RegisterReceiveStatisticsCallback(this);
- vcm_->RegisterDecoderTimingCallback(this);
- vcm_->SetRenderDelay(kDefaultRenderDelayMs);
- }
- return 0;
-}
-
-ViEChannel::~ViEChannel() {
- // Make sure we don't get more callbacks from the RTP module.
- module_process_thread_->DeRegisterModule(
- vie_receiver_.GetReceiveStatistics());
- if (sender_) {
- send_payload_router_->SetSendingRtpModules(0);
- }
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- packet_router_->RemoveRtpModule(rtp_rtcp);
- module_process_thread_->DeRegisterModule(rtp_rtcp);
- delete rtp_rtcp;
- }
-}
-
-void ViEChannel::SetProtectionMode(bool enable_nack,
- bool enable_fec,
- int payload_type_red,
- int payload_type_fec) {
- // Validate payload types. If either RED or FEC payload types are set then
- // both should be. If FEC is enabled then they both have to be set.
- if (enable_fec || payload_type_red != -1 || payload_type_fec != -1) {
- RTC_DCHECK_GE(payload_type_red, 0);
- RTC_DCHECK_GE(payload_type_fec, 0);
- RTC_DCHECK_LE(payload_type_red, 127);
- RTC_DCHECK_LE(payload_type_fec, 127);
- } else {
- // Payload types unset.
- RTC_DCHECK_EQ(payload_type_red, -1);
- RTC_DCHECK_EQ(payload_type_fec, -1);
- // Set to valid uint8_ts to be castable later without signed overflows.
- payload_type_red = 0;
- payload_type_fec = 0;
- }
-
- VCMVideoProtection protection_method;
- if (enable_nack) {
- protection_method = enable_fec ? kProtectionNackFEC : kProtectionNack;
- } else {
- protection_method = kProtectionNone;
- }
-
- if (!sender_)
- vcm_->SetVideoProtection(protection_method, true);
-
- // Set NACK.
- ProcessNACKRequest(enable_nack);
-
- // Set FEC.
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- rtp_rtcp->SetGenericFECStatus(enable_fec,
- static_cast<uint8_t>(payload_type_red),
- static_cast<uint8_t>(payload_type_fec));
- }
-}
-
-void ViEChannel::ProcessNACKRequest(const bool enable) {
- if (enable) {
- // Turn on NACK.
- if (rtp_rtcp_modules_[0]->RTCP() == RtcpMode::kOff)
- return;
- vie_receiver_.SetNackStatus(true, max_nack_reordering_threshold_);
-
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetStorePacketsStatus(true, kMinSendSidePacketHistorySize);
-
- if (!sender_) {
- vcm_->RegisterPacketRequestCallback(this);
- // Don't introduce errors when NACK is enabled.
- vcm_->SetDecodeErrorMode(kNoErrors);
- }
- } else {
- if (!sender_) {
- vcm_->RegisterPacketRequestCallback(nullptr);
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_)
- rtp_rtcp->SetStorePacketsStatus(false, 0);
- // When NACK is off, allow decoding with errors. Otherwise, the video
- // will freeze, and will only recover with a complete key frame.
- vcm_->SetDecodeErrorMode(kWithErrors);
- }
- vie_receiver_.SetNackStatus(false, max_nack_reordering_threshold_);
- }
-}
-
-int ViEChannel::GetRequiredNackListSize(int target_delay_ms) {
- // The max size of the nack list should be large enough to accommodate the
- // the number of packets (frames) resulting from the increased delay.
- // Roughly estimating for ~40 packets per frame @ 30fps.
- return target_delay_ms * 40 * 30 / 1000;
-}
-
-RtpState ViEChannel::GetRtpStateForSsrc(uint32_t ssrc) const {
- RTC_DCHECK(!rtp_rtcp_modules_[0]->Sending());
- RtpState rtp_state;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- if (rtp_rtcp->GetRtpStateForSsrc(ssrc, &rtp_state))
- return rtp_state;
- }
- LOG(LS_ERROR) << "Couldn't get RTP state for ssrc: " << ssrc;
- return rtp_state;
-}
-
-void ViEChannel::RegisterRtcpPacketTypeCounterObserver(
- RtcpPacketTypeCounterObserver* observer) {
- rtcp_packet_type_counter_observer_.Set(observer);
-}
-
-void ViEChannel::GetSendStreamDataCounters(
- StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const {
- *rtp_counters = StreamDataCounters();
- *rtx_counters = StreamDataCounters();
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- StreamDataCounters rtp_data;
- StreamDataCounters rtx_data;
- rtp_rtcp->GetSendStreamDataCounters(&rtp_data, &rtx_data);
- rtp_counters->Add(rtp_data);
- rtx_counters->Add(rtx_data);
- }
-}
-
-void ViEChannel::RegisterSendSideDelayObserver(
- SendSideDelayObserver* observer) {
- send_side_delay_observer_.Set(observer);
-}
-
-void ViEChannel::RegisterSendBitrateObserver(
- BitrateStatisticsObserver* observer) {
- send_bitrate_observer_.Set(observer);
-}
-
-const std::vector<RtpRtcp*>& ViEChannel::rtp_rtcp() const {
- return rtp_rtcp_modules_;
-}
-
-ViEReceiver* ViEChannel::vie_receiver() {
- return &vie_receiver_;
-}
-
-VCMProtectionCallback* ViEChannel::vcm_protection_callback() {
- return vcm_protection_callback_.get();
-}
-
-CallStatsObserver* ViEChannel::GetStatsObserver() {
- return stats_observer_.get();
-}
-
-// Do not acquire the lock of |vcm_| in this function. Decode callback won't
-// necessarily be called from the decoding thread. The decoding thread may have
-// held the lock when calling VideoDecoder::Decode, Reset, or Release. Acquiring
-// the same lock in the path of decode callback can deadlock.
-int32_t ViEChannel::FrameToRender(VideoFrame& video_frame) { // NOLINT
- rtc::CritScope lock(&crit_);
-
- if (pre_render_callback_)
- pre_render_callback_->FrameCallback(&video_frame);
-
- // TODO(pbos): Remove stream id argument.
- incoming_video_stream_->RenderFrame(0xFFFFFFFF, video_frame);
- return 0;
-}
-
-int32_t ViEChannel::ReceivedDecodedReferenceFrame(
- const uint64_t picture_id) {
- return rtp_rtcp_modules_[0]->SendRTCPReferencePictureSelection(picture_id);
-}
-
-void ViEChannel::OnIncomingPayloadType(int payload_type) {
- rtc::CritScope lock(&crit_);
- if (receive_stats_callback_)
- receive_stats_callback_->OnIncomingPayloadType(payload_type);
-}
-
-void ViEChannel::OnDecoderImplementationName(const char* implementation_name) {
- rtc::CritScope lock(&crit_);
- if (receive_stats_callback_)
- receive_stats_callback_->OnDecoderImplementationName(implementation_name);
-}
-
-void ViEChannel::OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) {
- rtc::CritScope lock(&crit_);
- if (receive_stats_callback_)
- receive_stats_callback_->OnIncomingRate(frame_rate, bit_rate);
-}
-
-void ViEChannel::OnDiscardedPacketsUpdated(int discarded_packets) {
- rtc::CritScope lock(&crit_);
- if (receive_stats_callback_)
- receive_stats_callback_->OnDiscardedPacketsUpdated(discarded_packets);
-}
-
-void ViEChannel::OnFrameCountsUpdated(const FrameCounts& frame_counts) {
- rtc::CritScope lock(&crit_);
- receive_frame_counts_ = frame_counts;
- if (receive_stats_callback_)
- receive_stats_callback_->OnFrameCountsUpdated(frame_counts);
-}
-
-void ViEChannel::OnDecoderTiming(int decode_ms,
- int max_decode_ms,
- int current_delay_ms,
- int target_delay_ms,
- int jitter_buffer_ms,
- int min_playout_delay_ms,
- int render_delay_ms) {
- rtc::CritScope lock(&crit_);
- if (!receive_stats_callback_)
- return;
- receive_stats_callback_->OnDecoderTiming(
- decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
- jitter_buffer_ms, min_playout_delay_ms, render_delay_ms, last_rtt_ms_);
-}
-
-int32_t ViEChannel::RequestKeyFrame() {
- return rtp_rtcp_modules_[0]->RequestKeyFrame();
-}
-
-int32_t ViEChannel::SliceLossIndicationRequest(
- const uint64_t picture_id) {
- return rtp_rtcp_modules_[0]->SendRTCPSliceLossIndication(
- static_cast<uint8_t>(picture_id));
-}
-
-int32_t ViEChannel::ResendPackets(const uint16_t* sequence_numbers,
- uint16_t length) {
- return rtp_rtcp_modules_[0]->SendNACK(sequence_numbers, length);
-}
-
-void ViEChannel::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
- if (!sender_)
- vcm_->SetReceiveChannelParameters(max_rtt_ms);
-
- rtc::CritScope lock(&crit_);
- last_rtt_ms_ = avg_rtt_ms;
-}
-
-int ViEChannel::ProtectionRequest(const FecProtectionParams* delta_fec_params,
- const FecProtectionParams* key_fec_params,
- uint32_t* video_rate_bps,
- uint32_t* nack_rate_bps,
- uint32_t* fec_rate_bps) {
- *video_rate_bps = 0;
- *nack_rate_bps = 0;
- *fec_rate_bps = 0;
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_modules_) {
- uint32_t not_used = 0;
- uint32_t module_video_rate = 0;
- uint32_t module_fec_rate = 0;
- uint32_t module_nack_rate = 0;
- rtp_rtcp->SetFecParameters(delta_fec_params, key_fec_params);
- rtp_rtcp->BitrateSent(&not_used, &module_video_rate, &module_fec_rate,
- &module_nack_rate);
- *video_rate_bps += module_video_rate;
- *nack_rate_bps += module_nack_rate;
- *fec_rate_bps += module_fec_rate;
- }
- return 0;
-}
-
-std::vector<RtpRtcp*> ViEChannel::CreateRtpRtcpModules(
- bool receiver_only,
- ReceiveStatistics* receive_statistics,
- Transport* outgoing_transport,
- RtcpIntraFrameObserver* intra_frame_callback,
- RtcpBandwidthObserver* bandwidth_callback,
- TransportFeedbackObserver* transport_feedback_callback,
- RtcpRttStats* rtt_stats,
- RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtpPacketSender* paced_sender,
- TransportSequenceNumberAllocator* transport_sequence_number_allocator,
- BitrateStatisticsObserver* send_bitrate_observer,
- FrameCountObserver* send_frame_count_observer,
- SendSideDelayObserver* send_side_delay_observer,
- size_t num_modules) {
- RTC_DCHECK_GT(num_modules, 0u);
- RtpRtcp::Configuration configuration;
- ReceiveStatistics* null_receive_statistics = configuration.receive_statistics;
- configuration.audio = false;
- configuration.receiver_only = receiver_only;
- configuration.receive_statistics = receive_statistics;
- configuration.outgoing_transport = outgoing_transport;
- configuration.intra_frame_callback = intra_frame_callback;
- configuration.rtt_stats = rtt_stats;
- configuration.rtcp_packet_type_counter_observer =
- rtcp_packet_type_counter_observer;
- configuration.paced_sender = paced_sender;
- configuration.transport_sequence_number_allocator =
- transport_sequence_number_allocator;
- configuration.send_bitrate_observer = send_bitrate_observer;
- configuration.send_frame_count_observer = send_frame_count_observer;
- configuration.send_side_delay_observer = send_side_delay_observer;
- configuration.bandwidth_callback = bandwidth_callback;
- configuration.transport_feedback_callback = transport_feedback_callback;
-
- std::vector<RtpRtcp*> modules;
- for (size_t i = 0; i < num_modules; ++i) {
- RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration);
- rtp_rtcp->SetSendingStatus(false);
- rtp_rtcp->SetSendingMediaStatus(false);
- rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
- modules.push_back(rtp_rtcp);
- // Receive statistics and remote bitrate estimator should only be set for
- // the primary (first) module.
- configuration.receive_statistics = null_receive_statistics;
- configuration.remote_bitrate_estimator = nullptr;
- }
- return modules;
-}
-
-void ViEChannel::RegisterPreRenderCallback(
- I420FrameCallback* pre_render_callback) {
- RTC_DCHECK(!sender_);
- rtc::CritScope lock(&crit_);
- pre_render_callback_ = pre_render_callback;
-}
-
-// TODO(pbos): Remove as soon as audio can handle a changing payload type
-// without this callback.
-int32_t ViEChannel::OnInitializeDecoder(
- const int8_t payload_type,
- const char payload_name[RTP_PAYLOAD_NAME_SIZE],
- const int frequency,
- const size_t channels,
- const uint32_t rate) {
- RTC_NOTREACHED();
- return 0;
-}
-
-void ViEChannel::OnIncomingSSRCChanged(const uint32_t ssrc) {
- rtp_rtcp_modules_[0]->SetRemoteSSRC(ssrc);
-}
-
-void ViEChannel::OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) {}
-
-void ViEChannel::RegisterSendFrameCountObserver(
- FrameCountObserver* observer) {
- send_frame_count_observer_.Set(observer);
-}
-
-void ViEChannel::RegisterReceiveStatisticsProxy(
- ReceiveStatisticsProxy* receive_statistics_proxy) {
- rtc::CritScope lock(&crit_);
- receive_stats_callback_ = receive_statistics_proxy;
-}
-
-void ViEChannel::SetIncomingVideoStream(
- IncomingVideoStream* incoming_video_stream) {
- rtc::CritScope lock(&crit_);
- incoming_video_stream_ = incoming_video_stream;
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/vie_channel.h b/chromium/third_party/webrtc/video/vie_channel.h
deleted file mode 100644
index 8a9d70e58ed..00000000000
--- a/chromium/third_party/webrtc/video/vie_channel.h
+++ /dev/null
@@ -1,311 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_VIE_CHANNEL_H_
-#define WEBRTC_VIDEO_VIE_CHANNEL_H_
-
-#include <list>
-#include <map>
-#include <memory>
-#include <vector>
-
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/platform_thread.h"
-#include "webrtc/base/scoped_ref_ptr.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "webrtc/modules/video_coding/include/video_coding_defines.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/typedefs.h"
-#include "webrtc/video/vie_receiver.h"
-#include "webrtc/video/vie_sync_module.h"
-
-namespace webrtc {
-
-class CallStatsObserver;
-class ChannelStatsObserver;
-class Config;
-class EncodedImageCallback;
-class I420FrameCallback;
-class IncomingVideoStream;
-class PacedSender;
-class PacketRouter;
-class PayloadRouter;
-class ProcessThread;
-class ReceiveStatisticsProxy;
-class RtcpRttStats;
-class ViEChannelProtectionCallback;
-class ViERTPObserver;
-class VideoCodingModule;
-class VideoRenderCallback;
-class VoEVideoSync;
-
-enum StreamType {
- kViEStreamTypeNormal = 0, // Normal media stream
- kViEStreamTypeRtx = 1 // Retransmission media stream
-};
-
-class ViEChannel : public VCMFrameTypeCallback,
- public VCMReceiveCallback,
- public VCMReceiveStatisticsCallback,
- public VCMDecoderTimingCallback,
- public VCMPacketRequestCallback,
- public RtpFeedback {
- public:
- friend class ChannelStatsObserver;
- friend class ViEChannelProtectionCallback;
-
- ViEChannel(Transport* transport,
- ProcessThread* module_process_thread,
- PayloadRouter* send_payload_router,
- VideoCodingModule* vcm,
- RtcpIntraFrameObserver* intra_frame_observer,
- RtcpBandwidthObserver* bandwidth_observer,
- TransportFeedbackObserver* transport_feedback_observer,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtcpRttStats* rtt_stats,
- PacedSender* paced_sender,
- PacketRouter* packet_router,
- size_t max_rtp_streams,
- bool sender);
- ~ViEChannel();
-
- int32_t Init();
-
- void SetProtectionMode(bool enable_nack,
- bool enable_fec,
- int payload_type_red,
- int payload_type_fec);
-
- RtpState GetRtpStateForSsrc(uint32_t ssrc) const;
-
- // Gets send statistics for the rtp and rtx stream.
- void GetSendStreamDataCounters(StreamDataCounters* rtp_counters,
- StreamDataCounters* rtx_counters) const;
-
- void RegisterSendSideDelayObserver(SendSideDelayObserver* observer);
-
- // Called on any new send bitrate estimate.
- void RegisterSendBitrateObserver(BitrateStatisticsObserver* observer);
-
- // Implements RtpFeedback.
- int32_t OnInitializeDecoder(const int8_t payload_type,
- const char payload_name[RTP_PAYLOAD_NAME_SIZE],
- const int frequency,
- const size_t channels,
- const uint32_t rate) override;
- void OnIncomingSSRCChanged(const uint32_t ssrc) override;
- void OnIncomingCSRCChanged(const uint32_t CSRC, const bool added) override;
-
- // Gets the modules used by the channel.
- const std::vector<RtpRtcp*>& rtp_rtcp() const;
- ViEReceiver* vie_receiver();
- VCMProtectionCallback* vcm_protection_callback();
-
-
- CallStatsObserver* GetStatsObserver();
-
- // Implements VCMReceiveCallback.
- virtual int32_t FrameToRender(VideoFrame& video_frame); // NOLINT
-
- // Implements VCMReceiveCallback.
- virtual int32_t ReceivedDecodedReferenceFrame(
- const uint64_t picture_id);
-
- // Implements VCMReceiveCallback.
- void OnIncomingPayloadType(int payload_type) override;
- void OnDecoderImplementationName(const char* implementation_name) override;
-
- // Implements VCMReceiveStatisticsCallback.
- void OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) override;
- void OnDiscardedPacketsUpdated(int discarded_packets) override;
- void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
-
- // Implements VCMDecoderTimingCallback.
- virtual void OnDecoderTiming(int decode_ms,
- int max_decode_ms,
- int current_delay_ms,
- int target_delay_ms,
- int jitter_buffer_ms,
- int min_playout_delay_ms,
- int render_delay_ms);
-
- // Implements FrameTypeCallback.
- virtual int32_t RequestKeyFrame();
-
- // Implements FrameTypeCallback.
- virtual int32_t SliceLossIndicationRequest(
- const uint64_t picture_id);
-
- // Implements VideoPacketRequestCallback.
- int32_t ResendPackets(const uint16_t* sequence_numbers,
- uint16_t length) override;
-
- void RegisterPreRenderCallback(I420FrameCallback* pre_render_callback);
-
- void RegisterSendFrameCountObserver(FrameCountObserver* observer);
- void RegisterRtcpPacketTypeCounterObserver(
- RtcpPacketTypeCounterObserver* observer);
- void RegisterReceiveStatisticsProxy(
- ReceiveStatisticsProxy* receive_statistics_proxy);
- void SetIncomingVideoStream(IncomingVideoStream* incoming_video_stream);
-
- protected:
- void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms);
-
- int ProtectionRequest(const FecProtectionParams* delta_fec_params,
- const FecProtectionParams* key_fec_params,
- uint32_t* sent_video_rate_bps,
- uint32_t* sent_nack_rate_bps,
- uint32_t* sent_fec_rate_bps);
-
- private:
- static std::vector<RtpRtcp*> CreateRtpRtcpModules(
- bool receiver_only,
- ReceiveStatistics* receive_statistics,
- Transport* outgoing_transport,
- RtcpIntraFrameObserver* intra_frame_callback,
- RtcpBandwidthObserver* bandwidth_callback,
- TransportFeedbackObserver* transport_feedback_callback,
- RtcpRttStats* rtt_stats,
- RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtpPacketSender* paced_sender,
- TransportSequenceNumberAllocator* transport_sequence_number_allocator,
- BitrateStatisticsObserver* send_bitrate_observer,
- FrameCountObserver* send_frame_count_observer,
- SendSideDelayObserver* send_side_delay_observer,
- size_t num_modules);
-
- // Assumed to be protected.
- void StartDecodeThread();
- void StopDecodeThread();
-
- void ProcessNACKRequest(const bool enable);
- // Compute NACK list parameters for the buffering mode.
- int GetRequiredNackListSize(int target_delay_ms);
-
- // ViEChannel exposes methods that allow to modify observers and callbacks
- // to be modified. Such an API-style is cumbersome to implement and maintain
- // at all the levels when comparing to only setting them at construction. As
- // so this class instantiates its children with a wrapper that can be modified
- // at a later time.
- template <class T>
- class RegisterableCallback : public T {
- public:
- RegisterableCallback() : callback_(nullptr) {}
-
- void Set(T* callback) {
- rtc::CritScope lock(&critsect_);
- callback_ = callback;
- }
-
- protected:
- // Note: this should be implemented with a RW-lock to allow simultaneous
- // calls into the callback. However that doesn't seem to be needed for the
- // current type of callbacks covered by this class.
- rtc::CriticalSection critsect_;
- T* callback_ GUARDED_BY(critsect_);
-
- private:
- RTC_DISALLOW_COPY_AND_ASSIGN(RegisterableCallback);
- };
-
- class RegisterableBitrateStatisticsObserver:
- public RegisterableCallback<BitrateStatisticsObserver> {
- virtual void Notify(const BitrateStatistics& total_stats,
- const BitrateStatistics& retransmit_stats,
- uint32_t ssrc) {
- rtc::CritScope lock(&critsect_);
- if (callback_)
- callback_->Notify(total_stats, retransmit_stats, ssrc);
- }
- } send_bitrate_observer_;
-
- class RegisterableFrameCountObserver
- : public RegisterableCallback<FrameCountObserver> {
- public:
- virtual void FrameCountUpdated(const FrameCounts& frame_counts,
- uint32_t ssrc) {
- rtc::CritScope lock(&critsect_);
- if (callback_)
- callback_->FrameCountUpdated(frame_counts, ssrc);
- }
-
- private:
- } send_frame_count_observer_;
-
- class RegisterableSendSideDelayObserver :
- public RegisterableCallback<SendSideDelayObserver> {
- void SendSideDelayUpdated(int avg_delay_ms,
- int max_delay_ms,
- uint32_t ssrc) override {
- rtc::CritScope lock(&critsect_);
- if (callback_)
- callback_->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, ssrc);
- }
- } send_side_delay_observer_;
-
- class RegisterableRtcpPacketTypeCounterObserver
- : public RegisterableCallback<RtcpPacketTypeCounterObserver> {
- public:
- void RtcpPacketTypesCounterUpdated(
- uint32_t ssrc,
- const RtcpPacketTypeCounter& packet_counter) override {
- rtc::CritScope lock(&critsect_);
- if (callback_)
- callback_->RtcpPacketTypesCounterUpdated(ssrc, packet_counter);
- }
-
- private:
- } rtcp_packet_type_counter_observer_;
-
- const bool sender_;
-
- ProcessThread* const module_process_thread_;
- PayloadRouter* const send_payload_router_;
-
- // Used for all registered callbacks except rendering.
- rtc::CriticalSection crit_;
-
- // Owned modules/classes.
- std::unique_ptr<ViEChannelProtectionCallback> vcm_protection_callback_;
-
- VideoCodingModule* const vcm_;
- ViEReceiver vie_receiver_;
-
- // Helper to report call statistics.
- std::unique_ptr<ChannelStatsObserver> stats_observer_;
-
- // Not owned.
- ReceiveStatisticsProxy* receive_stats_callback_ GUARDED_BY(crit_);
- FrameCounts receive_frame_counts_ GUARDED_BY(crit_);
- IncomingVideoStream* incoming_video_stream_ GUARDED_BY(crit_);
- RtcpIntraFrameObserver* const intra_frame_observer_;
- RtcpRttStats* const rtt_stats_;
- PacedSender* const paced_sender_;
- PacketRouter* const packet_router_;
-
- const std::unique_ptr<RtcpBandwidthObserver> bandwidth_observer_;
- TransportFeedbackObserver* const transport_feedback_observer_;
-
- int max_nack_reordering_threshold_;
- I420FrameCallback* pre_render_callback_ GUARDED_BY(crit_);
-
- int64_t last_rtt_ms_ GUARDED_BY(crit_);
-
- // RtpRtcp modules, declared last as they use other members on construction.
- const std::vector<RtpRtcp*> rtp_rtcp_modules_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_VIE_CHANNEL_H_
diff --git a/chromium/third_party/webrtc/video/vie_encoder.cc b/chromium/third_party/webrtc/video/vie_encoder.cc
index 24b8ce3cd1c..2871c4045c9 100644
--- a/chromium/third_party/webrtc/video/vie_encoder.cc
+++ b/chromium/third_party/webrtc/video/vie_encoder.cc
@@ -17,132 +17,50 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
-#include "webrtc/common_video/include/video_image.h"
-#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
-#include "webrtc/frame_callback.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/modules/pacing/paced_sender.h"
-#include "webrtc/modules/utility/include/process_thread.h"
-#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
-#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/video/overuse_frame_detector.h"
-#include "webrtc/video/payload_router.h"
#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video_frame.h"
namespace webrtc {
static const float kStopPaddingThresholdMs = 2000;
-static const int kMinKeyFrameRequestIntervalMs = 300;
-
-std::vector<uint32_t> AllocateStreamBitrates(
- uint32_t total_bitrate,
- const SimulcastStream* stream_configs,
- size_t number_of_streams) {
- if (number_of_streams == 0) {
- std::vector<uint32_t> stream_bitrates(1, 0);
- stream_bitrates[0] = total_bitrate;
- return stream_bitrates;
- }
- std::vector<uint32_t> stream_bitrates(number_of_streams, 0);
- uint32_t bitrate_remainder = total_bitrate;
- for (size_t i = 0; i < stream_bitrates.size() && bitrate_remainder > 0; ++i) {
- if (stream_configs[i].maxBitrate * 1000 > bitrate_remainder) {
- stream_bitrates[i] = bitrate_remainder;
- } else {
- stream_bitrates[i] = stream_configs[i].maxBitrate * 1000;
- }
- bitrate_remainder -= stream_bitrates[i];
- }
- return stream_bitrates;
-}
-
-class QMVideoSettingsCallback : public VCMQMSettingsCallback {
- public:
- explicit QMVideoSettingsCallback(VideoProcessing* vpm);
-
- ~QMVideoSettingsCallback();
-
- // Update VPM with QM (quality modes: frame size & frame rate) settings.
- int32_t SetVideoQMSettings(const uint32_t frame_rate,
- const uint32_t width,
- const uint32_t height);
-
- // Update target frame rate.
- void SetTargetFramerate(int frame_rate);
-
- private:
- VideoProcessing* vp_;
-};
ViEEncoder::ViEEncoder(uint32_t number_of_cores,
- const std::vector<uint32_t>& ssrcs,
ProcessThread* module_process_thread,
SendStatisticsProxy* stats_proxy,
- I420FrameCallback* pre_encode_callback,
- OveruseFrameDetector* overuse_detector,
- PacedSender* pacer,
- PayloadRouter* payload_router)
+ OveruseFrameDetector* overuse_detector)
: number_of_cores_(number_of_cores),
- ssrcs_(ssrcs),
vp_(VideoProcessing::Create()),
- qm_callback_(new QMVideoSettingsCallback(vp_.get())),
- vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
- this,
- qm_callback_.get())),
+ video_sender_(Clock::GetRealTimeClock(), this, this, this),
stats_proxy_(stats_proxy),
- pre_encode_callback_(pre_encode_callback),
overuse_detector_(overuse_detector),
- pacer_(pacer),
- send_payload_router_(payload_router),
time_of_last_frame_activity_ms_(0),
encoder_config_(),
min_transmit_bitrate_bps_(0),
last_observed_bitrate_bps_(0),
- network_is_transmitting_(true),
- encoder_paused_(false),
+ encoder_paused_(true),
encoder_paused_and_dropped_frame_(false),
- time_last_intra_request_ms_(ssrcs.size(), -1),
module_process_thread_(module_process_thread),
has_received_sli_(false),
picture_id_sli_(0),
has_received_rpsi_(false),
picture_id_rpsi_(0),
video_suspended_(false) {
- module_process_thread_->RegisterModule(vcm_.get());
-}
-
-bool ViEEncoder::Init() {
+ module_process_thread_->RegisterModule(&video_sender_);
vp_->EnableTemporalDecimation(true);
-
- // Enable/disable content analysis: off by default for now.
- vp_->EnableContentAnalysis(false);
-
- if (vcm_->RegisterTransportCallback(this) != 0) {
- return false;
- }
- if (vcm_->RegisterSendStatisticsCallback(this) != 0) {
- return false;
- }
- return true;
}
-VideoCodingModule* ViEEncoder::vcm() const {
- return vcm_.get();
+vcm::VideoSender* ViEEncoder::video_sender() {
+ return &video_sender_;
}
ViEEncoder::~ViEEncoder() {
- module_process_thread_->DeRegisterModule(vcm_.get());
-}
-
-void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
- {
- rtc::CritScope lock(&data_cs_);
- network_is_transmitting_ = is_transmitting;
- }
+ module_process_thread_->DeRegisterModule(&video_sender_);
}
void ViEEncoder::Pause() {
@@ -150,7 +68,7 @@ void ViEEncoder::Pause() {
encoder_paused_ = true;
}
-void ViEEncoder::Restart() {
+void ViEEncoder::Start() {
rtc::CritScope lock(&data_cs_);
encoder_paused_ = false;
}
@@ -158,22 +76,18 @@ void ViEEncoder::Restart() {
int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
uint8_t pl_type,
bool internal_source) {
- if (vcm_->RegisterExternalEncoder(encoder, pl_type, internal_source) !=
- VCM_OK) {
- return -1;
- }
+ video_sender_.RegisterExternalEncoder(encoder, pl_type, internal_source);
return 0;
}
int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
- if (vcm_->RegisterExternalEncoder(nullptr, pl_type) != VCM_OK) {
- return -1;
- }
+ video_sender_.RegisterExternalEncoder(nullptr, pl_type, false);
return 0;
}
void ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec,
- int min_transmit_bitrate_bps) {
- RTC_DCHECK(send_payload_router_);
+ int min_transmit_bitrate_bps,
+ size_t max_data_payload_length,
+ EncodedImageCallback* sink) {
// Setting target width and height for VPM.
RTC_CHECK_EQ(VPM_OK,
vp_->SetTargetResolution(video_codec.width, video_codec.height,
@@ -184,12 +98,14 @@ void ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec,
{
rtc::CritScope lock(&data_cs_);
encoder_config_ = video_codec;
- encoder_paused_ = true;
min_transmit_bitrate_bps_ = min_transmit_bitrate_bps;
}
+ {
+ rtc::CritScope lock(&sink_cs_);
+ sink_ = sink;
+ }
- size_t max_data_payload_length = send_payload_router_->MaxPayloadLength();
- bool success = vcm_->RegisterSendCodec(
+ bool success = video_sender_.RegisterSendCodec(
&video_codec, number_of_cores_,
static_cast<uint32_t>(max_data_payload_length)) == VCM_OK;
if (!success) {
@@ -197,17 +113,7 @@ void ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec,
RTC_DCHECK(success);
}
- send_payload_router_->SetSendingRtpModules(
- video_codec.numberOfSimulcastStreams);
-
- // Restart the media flow
- Restart();
if (stats_proxy_) {
- // Clear stats for disabled layers.
- for (size_t i = video_codec.numberOfSimulcastStreams; i < ssrcs_.size();
- ++i) {
- stats_proxy_->OnInactiveSsrc(ssrcs_[i]);
- }
VideoEncoderConfig::ContentType content_type =
VideoEncoderConfig::ContentType::kRealtimeVideo;
switch (video_codec.mode) {
@@ -242,7 +148,7 @@ int ViEEncoder::GetPaddingNeededBps() const {
send_codec = encoder_config_;
}
- bool video_is_suspended = vcm_->VideoSuspended();
+ bool video_is_suspended = video_sender_.VideoSuspended();
// Find the max amount of padding we can allow ourselves to send at this
// point, based on which streams are currently active and what our current
@@ -267,7 +173,7 @@ int ViEEncoder::GetPaddingNeededBps() const {
// The amount of padding should decay to zero if no frames are being
// captured/encoded unless a min-transmit bitrate is used.
- int64_t now_ms = TickTime::MillisecondTimestamp();
+ int64_t now_ms = rtc::TimeMillis();
if (now_ms - time_of_last_frame_activity_ms > kStopPaddingThresholdMs)
pad_up_to_bitrate_bps = 0;
@@ -285,14 +191,9 @@ int ViEEncoder::GetPaddingNeededBps() const {
bool ViEEncoder::EncoderPaused() const {
// Pause video if paused by caller or as long as the network is down or the
// pacer queue has grown too large in buffered mode.
- if (encoder_paused_) {
- return true;
- }
- if (pacer_->ExpectedQueueTimeMs() > PacedSender::kMaxQueueLengthMs) {
- // Too much data in pacer queue, drop frame.
- return true;
- }
- return !network_is_transmitting_;
+ // If the pacer queue has grown to large or the network is down,
+ // last_observed_bitrate_bps_ will be 0.
+ return encoder_paused_ || video_suspended_ || last_observed_bitrate_bps_ == 0;
}
void ViEEncoder::TraceFrameDropStart() {
@@ -313,15 +214,10 @@ void ViEEncoder::TraceFrameDropEnd() {
}
void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) {
- if (!send_payload_router_->active()) {
- // We've paused or we have no channels attached, don't waste resources on
- // encoding.
- return;
- }
VideoCodecType codec_type;
{
rtc::CritScope lock(&data_cs_);
- time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+ time_of_last_frame_activity_ms_ = rtc::TimeMillis();
if (EncoderPaused()) {
TraceFrameDropStart();
return;
@@ -334,7 +230,7 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) {
"Encode");
const VideoFrame* frame_to_send = &video_frame;
// TODO(wuchengli): support texture frames.
- if (!video_frame.native_handle()) {
+ if (!video_frame.video_frame_buffer()->native_handle()) {
// Pass frame via preprocessor.
frame_to_send = vp_->PreprocessFrame(video_frame);
if (!frame_to_send) {
@@ -343,15 +239,6 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) {
}
}
- // If we haven't resampled the frame and we have a FrameCallback, we need to
- // make a deep copy of |video_frame|.
- VideoFrame copied_frame;
- if (pre_encode_callback_) {
- copied_frame.CopyFrame(*frame_to_send);
- pre_encode_callback_->FrameCallback(&copied_frame);
- frame_to_send = &copied_frame;
- }
-
if (codec_type == webrtc::kVideoCodecVP8) {
webrtc::CodecSpecificInfo codec_specific_info;
codec_specific_info.codecType = webrtc::kVideoCodecVP8;
@@ -369,26 +256,14 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) {
has_received_rpsi_ = false;
}
- vcm_->AddVideoFrame(*frame_to_send, vp_->GetContentMetrics(),
- &codec_specific_info);
+ video_sender_.AddVideoFrame(*frame_to_send, &codec_specific_info);
return;
}
- vcm_->AddVideoFrame(*frame_to_send);
+ video_sender_.AddVideoFrame(*frame_to_send, nullptr);
}
void ViEEncoder::SendKeyFrame() {
- vcm_->IntraFrameRequest(0);
-}
-
-uint32_t ViEEncoder::LastObservedBitrateBps() const {
- rtc::CritScope lock(&data_cs_);
- return last_observed_bitrate_bps_;
-}
-
-int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
- if (vcm_->Bitrate(bitrate) != 0)
- return -1;
- return 0;
+ video_sender_.IntraFrameRequest(0);
}
void ViEEncoder::SetProtectionMethod(bool nack, bool fec) {
@@ -400,7 +275,7 @@ void ViEEncoder::SetProtectionMethod(bool nack, bool fec) {
} else {
protection_mode = nack ? kProtectionNack : kProtectionNone;
}
- vcm_->SetVideoProtection(protection_mode, true);
+ video_sender_.SetVideoProtection(protection_mode);
}
void ViEEncoder::OnSetRates(uint32_t bitrate_bps, int framerate) {
@@ -408,132 +283,76 @@ void ViEEncoder::OnSetRates(uint32_t bitrate_bps, int framerate) {
stats_proxy_->OnSetRates(bitrate_bps, framerate);
}
-int32_t ViEEncoder::SendData(const uint8_t payload_type,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentation_header,
- const RTPVideoHeader* rtp_video_hdr) {
- RTC_DCHECK(send_payload_router_);
-
+int32_t ViEEncoder::Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) {
{
rtc::CritScope lock(&data_cs_);
- time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+ time_of_last_frame_activity_ms_ = rtc::TimeMillis();
+ }
+ if (stats_proxy_) {
+ stats_proxy_->OnSendEncodedImage(encoded_image, codec_specific_info);
}
- if (stats_proxy_)
- stats_proxy_->OnSendEncodedImage(encoded_image, rtp_video_hdr);
+ int success = 0;
+ {
+ rtc::CritScope lock(&sink_cs_);
+ success = sink_->Encoded(encoded_image, codec_specific_info, fragmentation);
+ }
- bool success = send_payload_router_->RoutePayload(
- encoded_image._frameType, payload_type, encoded_image._timeStamp,
- encoded_image.capture_time_ms_, encoded_image._buffer,
- encoded_image._length, fragmentation_header, rtp_video_hdr);
overuse_detector_->FrameSent(encoded_image._timeStamp);
- return success ? 0 : -1;
-}
-
-void ViEEncoder::OnEncoderImplementationName(
- const char* implementation_name) {
- if (stats_proxy_)
- stats_proxy_->OnEncoderImplementationName(implementation_name);
+ return success;
}
-int32_t ViEEncoder::SendStatistics(const uint32_t bit_rate,
- const uint32_t frame_rate) {
+void ViEEncoder::SendStatistics(uint32_t bit_rate,
+ uint32_t frame_rate,
+ const std::string& encoder_name) {
if (stats_proxy_)
- stats_proxy_->OnOutgoingRate(frame_rate, bit_rate);
- return 0;
+ stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate, encoder_name);
}
-void ViEEncoder::OnReceivedSLI(uint32_t /*ssrc*/,
- uint8_t picture_id) {
+void ViEEncoder::OnReceivedSLI(uint8_t picture_id) {
rtc::CritScope lock(&data_cs_);
picture_id_sli_ = picture_id;
has_received_sli_ = true;
}
-void ViEEncoder::OnReceivedRPSI(uint32_t /*ssrc*/,
- uint64_t picture_id) {
+void ViEEncoder::OnReceivedRPSI(uint64_t picture_id) {
rtc::CritScope lock(&data_cs_);
picture_id_rpsi_ = picture_id;
has_received_rpsi_ = true;
}
-void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
+void ViEEncoder::OnReceivedIntraFrameRequest(size_t stream_index) {
// Key frame request from remote side, signal to VCM.
TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
-
- for (size_t i = 0; i < ssrcs_.size(); ++i) {
- if (ssrcs_[i] != ssrc)
- continue;
- int64_t now_ms = TickTime::MillisecondTimestamp();
- {
- rtc::CritScope lock(&data_cs_);
- if (time_last_intra_request_ms_[i] + kMinKeyFrameRequestIntervalMs >
- now_ms) {
- return;
- }
- time_last_intra_request_ms_[i] = now_ms;
- }
- vcm_->IntraFrameRequest(static_cast<int>(i));
- return;
- }
- RTC_NOTREACHED() << "Should not receive keyframe requests on unknown SSRCs.";
+ video_sender_.IntraFrameRequest(stream_index);
}
void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps,
uint8_t fraction_lost,
int64_t round_trip_time_ms) {
- LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate" << bitrate_bps
+ LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate " << bitrate_bps
<< " packet loss " << static_cast<int>(fraction_lost)
<< " rtt " << round_trip_time_ms;
- RTC_DCHECK(send_payload_router_);
- vcm_->SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
- bool video_is_suspended = vcm_->VideoSuspended();
+ video_sender_.SetChannelParameters(bitrate_bps, fraction_lost,
+ round_trip_time_ms);
+ bool video_is_suspended = video_sender_.VideoSuspended();
bool video_suspension_changed;
- VideoCodec send_codec;
{
rtc::CritScope lock(&data_cs_);
last_observed_bitrate_bps_ = bitrate_bps;
video_suspension_changed = video_suspended_ != video_is_suspended;
video_suspended_ = video_is_suspended;
- send_codec = encoder_config_;
}
- SimulcastStream* stream_configs = send_codec.simulcastStream;
- // Allocate the bandwidth between the streams.
- std::vector<uint32_t> stream_bitrates = AllocateStreamBitrates(
- bitrate_bps, stream_configs, send_codec.numberOfSimulcastStreams);
- send_payload_router_->SetTargetSendBitrates(stream_bitrates);
-
if (!video_suspension_changed)
return;
// Video suspend-state changed, inform codec observer.
- LOG(LS_INFO) << "Video suspend state changed " << video_is_suspended
- << " for ssrc " << ssrcs_[0];
+ LOG(LS_INFO) << "Video suspend state changed " << video_is_suspended;
+
if (stats_proxy_)
stats_proxy_->OnSuspendChange(video_is_suspended);
}
-void ViEEncoder::RegisterPostEncodeImageCallback(
- EncodedImageCallback* post_encode_callback) {
- vcm_->RegisterPostEncodeImageCallback(post_encode_callback);
-}
-
-QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessing* vpm)
- : vp_(vpm) {
-}
-
-QMVideoSettingsCallback::~QMVideoSettingsCallback() {
-}
-
-int32_t QMVideoSettingsCallback::SetVideoQMSettings(
- const uint32_t frame_rate,
- const uint32_t width,
- const uint32_t height) {
- return vp_->SetTargetResolution(width, height, frame_rate);
-}
-
-void QMVideoSettingsCallback::SetTargetFramerate(int frame_rate) {
- vp_->SetTargetFramerate(frame_rate);
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/vie_encoder.h b/chromium/third_party/webrtc/video/vie_encoder.h
index ce1e508eba9..556ed449955 100644
--- a/chromium/third_party/webrtc/video/vie_encoder.h
+++ b/chromium/third_party/webrtc/video/vie_encoder.h
@@ -12,15 +12,18 @@
#define WEBRTC_VIDEO_VIE_ENCODER_H_
#include <memory>
+#include <string>
#include <vector>
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
-#include "webrtc/frame_callback.h"
+#include "webrtc/video_encoder.h"
+#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/typedefs.h"
@@ -30,84 +33,79 @@ class Config;
class EncodedImageCallback;
class OveruseFrameDetector;
class PacedSender;
-class PayloadRouter;
class ProcessThread;
-class QMVideoSettingsCallback;
class SendStatisticsProxy;
class ViEBitrateObserver;
class ViEEffectFilter;
-class VideoCodingModule;
class VideoEncoder;
+// VieEncoder represent a video encoder that accepts raw video frames as input
+// and produces an encoded bit stream.
+// Usage:
+// 1. Instantiate
+// 2. Call Init
+// 3. Call RegisterExternalEncoder if available.
+// 4. Call SetEncoder with the codec settings and the object that shall receive
+// the encoded bit stream.
+// 5. Call Start.
+// 6. For each available raw video frame call EncodeVideoFrame.
class ViEEncoder : public VideoEncoderRateObserver,
- public VCMPacketizationCallback,
+ public EncodedImageCallback,
public VCMSendStatisticsCallback {
public:
friend class ViEBitrateObserver;
ViEEncoder(uint32_t number_of_cores,
- const std::vector<uint32_t>& ssrcs,
ProcessThread* module_process_thread,
SendStatisticsProxy* stats_proxy,
- I420FrameCallback* pre_encode_callback,
- OveruseFrameDetector* overuse_detector,
- PacedSender* pacer,
- PayloadRouter* payload_router);
+ OveruseFrameDetector* overuse_detector);
~ViEEncoder();
- bool Init();
-
- VideoCodingModule* vcm() const;
-
- void SetNetworkTransmissionState(bool is_transmitting);
+ vcm::VideoSender* video_sender();
// Returns the id of the owning channel.
int Owner() const;
+ void Start();
// Drops incoming packets before they get to the encoder.
void Pause();
- void Restart();
// Codec settings.
int32_t RegisterExternalEncoder(VideoEncoder* encoder,
uint8_t pl_type,
bool internal_source);
int32_t DeRegisterExternalEncoder(uint8_t pl_type);
- void SetEncoder(const VideoCodec& video_codec, int min_transmit_bitrate_bps);
+ void SetEncoder(const VideoCodec& video_codec,
+ int min_transmit_bitrate_bps,
+ size_t max_data_payload_length,
+ EncodedImageCallback* sink);
void EncodeVideoFrame(const VideoFrame& video_frame);
void SendKeyFrame();
uint32_t LastObservedBitrateBps() const;
- int CodecTargetBitrate(uint32_t* bitrate) const;
// Loss protection. Must be called before SetEncoder() to have max packet size
// updated according to protection.
- // TODO(pbos): Set protection method on construction or extract vcm_ outside
- // this class and set it on construction there.
+ // TODO(pbos): Set protection method on construction.
void SetProtectionMethod(bool nack, bool fec);
// Implements VideoEncoderRateObserver.
void OnSetRates(uint32_t bitrate_bps, int framerate) override;
- // Implements VCMPacketizationCallback.
- int32_t SendData(uint8_t payload_type,
- const EncodedImage& encoded_image,
- const RTPFragmentationHeader* fragmentation_header,
- const RTPVideoHeader* rtp_video_hdr) override;
- void OnEncoderImplementationName(const char* implementation_name) override;
+ // Implements EncodedImageCallback.
+ int32_t Encoded(const EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) override;
// Implements VideoSendStatisticsCallback.
- int32_t SendStatistics(const uint32_t bit_rate,
- const uint32_t frame_rate) override;
+ void SendStatistics(uint32_t bit_rate,
+ uint32_t frame_rate,
+ const std::string& encoder_name) override;
// virtual to test EncoderStateFeedback with mocks.
- virtual void OnReceivedIntraFrameRequest(uint32_t ssrc);
- virtual void OnReceivedSLI(uint32_t ssrc, uint8_t picture_id);
- virtual void OnReceivedRPSI(uint32_t ssrc, uint64_t picture_id);
-
- // New-style callbacks, used by VideoSendStream.
- void RegisterPostEncodeImageCallback(
- EncodedImageCallback* post_encode_callback);
+ virtual void OnReceivedIntraFrameRequest(size_t stream_index);
+ virtual void OnReceivedSLI(uint8_t picture_id);
+ virtual void OnReceivedRPSI(uint64_t picture_id);
int GetPaddingNeededBps() const;
@@ -121,19 +119,14 @@ class ViEEncoder : public VideoEncoderRateObserver,
void TraceFrameDropEnd() EXCLUSIVE_LOCKS_REQUIRED(data_cs_);
const uint32_t number_of_cores_;
- const std::vector<uint32_t> ssrcs_;
const std::unique_ptr<VideoProcessing> vp_;
- const std::unique_ptr<QMVideoSettingsCallback> qm_callback_;
- const std::unique_ptr<VideoCodingModule> vcm_;
+ vcm::VideoSender video_sender_;
rtc::CriticalSection data_cs_;
SendStatisticsProxy* const stats_proxy_;
- I420FrameCallback* const pre_encode_callback_;
OveruseFrameDetector* const overuse_detector_;
- PacedSender* const pacer_;
- PayloadRouter* const send_payload_router_;
// The time we last received an input frame or encoded frame. This is used to
// track when video is stopped long enough that we also want to stop sending
@@ -142,10 +135,11 @@ class ViEEncoder : public VideoEncoderRateObserver,
VideoCodec encoder_config_ GUARDED_BY(data_cs_);
int min_transmit_bitrate_bps_ GUARDED_BY(data_cs_);
uint32_t last_observed_bitrate_bps_ GUARDED_BY(data_cs_);
- bool network_is_transmitting_ GUARDED_BY(data_cs_);
bool encoder_paused_ GUARDED_BY(data_cs_);
bool encoder_paused_and_dropped_frame_ GUARDED_BY(data_cs_);
- std::vector<int64_t> time_last_intra_request_ms_ GUARDED_BY(data_cs_);
+
+ rtc::CriticalSection sink_cs_;
+ EncodedImageCallback* sink_ GUARDED_BY(sink_cs_);
ProcessThread* module_process_thread_;
diff --git a/chromium/third_party/webrtc/video/vie_receiver.cc b/chromium/third_party/webrtc/video/vie_receiver.cc
deleted file mode 100644
index e5ec167fa7d..00000000000
--- a/chromium/third_party/webrtc/video/vie_receiver.cc
+++ /dev/null
@@ -1,389 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/video/vie_receiver.h"
-
-#include <vector>
-
-#include "webrtc/base/logging.h"
-#include "webrtc/config.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
-#include "webrtc/modules/rtp_rtcp/include/fec_receiver.h"
-#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_cvo.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
-#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
-#include "webrtc/modules/video_coding/include/video_coding.h"
-#include "webrtc/system_wrappers/include/metrics.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
-#include "webrtc/system_wrappers/include/timestamp_extrapolator.h"
-#include "webrtc/system_wrappers/include/trace.h"
-
-namespace webrtc {
-
-static const int kPacketLogIntervalMs = 10000;
-
-ViEReceiver::ViEReceiver(VideoCodingModule* module_vcm,
- RemoteBitrateEstimator* remote_bitrate_estimator,
- RtpFeedback* rtp_feedback)
- : clock_(Clock::GetRealTimeClock()),
- vcm_(module_vcm),
- remote_bitrate_estimator_(remote_bitrate_estimator),
- ntp_estimator_(clock_),
- rtp_payload_registry_(RTPPayloadStrategy::CreateStrategy(false)),
- rtp_header_parser_(RtpHeaderParser::Create()),
- rtp_receiver_(RtpReceiver::CreateVideoReceiver(clock_,
- this,
- rtp_feedback,
- &rtp_payload_registry_)),
- rtp_receive_statistics_(ReceiveStatistics::Create(clock_)),
- fec_receiver_(FecReceiver::Create(this)),
- receiving_(false),
- restored_packet_in_use_(false),
- last_packet_log_ms_(-1) {}
-
-ViEReceiver::~ViEReceiver() {
- UpdateHistograms();
-}
-
-void ViEReceiver::UpdateHistograms() {
- FecPacketCounter counter = fec_receiver_->GetPacketCounter();
- if (counter.num_packets > 0) {
- RTC_LOGGED_HISTOGRAM_PERCENTAGE(
- "WebRTC.Video.ReceivedFecPacketsInPercent",
- static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
- }
- if (counter.num_fec_packets > 0) {
- RTC_LOGGED_HISTOGRAM_PERCENTAGE(
- "WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
- static_cast<int>(counter.num_recovered_packets * 100 /
- counter.num_fec_packets));
- }
-}
-
-bool ViEReceiver::SetReceiveCodec(const VideoCodec& video_codec) {
- int8_t old_pltype = -1;
- if (rtp_payload_registry_.ReceivePayloadType(
- video_codec.plName, kVideoPayloadTypeFrequency, 0,
- video_codec.maxBitrate, &old_pltype) != -1) {
- rtp_payload_registry_.DeRegisterReceivePayload(old_pltype);
- }
-
- return rtp_receiver_->RegisterReceivePayload(
- video_codec.plName, video_codec.plType, kVideoPayloadTypeFrequency,
- 0, 0) == 0;
-}
-
-void ViEReceiver::SetNackStatus(bool enable,
- int max_nack_reordering_threshold) {
- if (!enable) {
- // Reset the threshold back to the lower default threshold when NACK is
- // disabled since we no longer will be receiving retransmissions.
- max_nack_reordering_threshold = kDefaultMaxReorderingThreshold;
- }
- rtp_receive_statistics_->SetMaxReorderingThreshold(
- max_nack_reordering_threshold);
- rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
-}
-
-void ViEReceiver::SetRtxPayloadType(int payload_type,
- int associated_payload_type) {
- rtp_payload_registry_.SetRtxPayloadType(payload_type,
- associated_payload_type);
-}
-
-void ViEReceiver::SetUseRtxPayloadMappingOnRestore(bool val) {
- rtp_payload_registry_.set_use_rtx_payload_mapping_on_restore(val);
-}
-
-void ViEReceiver::SetRtxSsrc(uint32_t ssrc) {
- rtp_payload_registry_.SetRtxSsrc(ssrc);
-}
-
-bool ViEReceiver::GetRtxSsrc(uint32_t* ssrc) const {
- return rtp_payload_registry_.GetRtxSsrc(ssrc);
-}
-
-bool ViEReceiver::IsFecEnabled() const {
- return rtp_payload_registry_.ulpfec_payload_type() > -1;
-}
-
-uint32_t ViEReceiver::GetRemoteSsrc() const {
- return rtp_receiver_->SSRC();
-}
-
-int ViEReceiver::GetCsrcs(uint32_t* csrcs) const {
- return rtp_receiver_->CSRCs(csrcs);
-}
-
-void ViEReceiver::Init(const std::vector<RtpRtcp*>& modules) {
- rtp_rtcp_ = modules;
-}
-
-RtpReceiver* ViEReceiver::GetRtpReceiver() const {
- return rtp_receiver_.get();
-}
-
-void ViEReceiver::EnableReceiveRtpHeaderExtension(const std::string& extension,
- int id) {
- RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension));
- RTC_CHECK(rtp_header_parser_->RegisterRtpHeaderExtension(
- StringToRtpExtensionType(extension), id));
-}
-
-int32_t ViEReceiver::OnReceivedPayloadData(const uint8_t* payload_data,
- const size_t payload_size,
- const WebRtcRTPHeader* rtp_header) {
- RTC_DCHECK(vcm_);
- WebRtcRTPHeader rtp_header_with_ntp = *rtp_header;
- rtp_header_with_ntp.ntp_time_ms =
- ntp_estimator_.Estimate(rtp_header->header.timestamp);
- if (vcm_->IncomingPacket(payload_data,
- payload_size,
- rtp_header_with_ntp) != 0) {
- // Check this...
- return -1;
- }
- return 0;
-}
-
-bool ViEReceiver::OnRecoveredPacket(const uint8_t* rtp_packet,
- size_t rtp_packet_length) {
- RTPHeader header;
- if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
- return false;
- }
- header.payload_type_frequency = kVideoPayloadTypeFrequency;
- bool in_order = IsPacketInOrder(header);
- return ReceivePacket(rtp_packet, rtp_packet_length, header, in_order);
-}
-
-bool ViEReceiver::DeliverRtp(const uint8_t* rtp_packet,
- size_t rtp_packet_length,
- const PacketTime& packet_time) {
- RTC_DCHECK(remote_bitrate_estimator_);
- {
- rtc::CritScope lock(&receive_cs_);
- if (!receiving_) {
- return false;
- }
- }
-
- RTPHeader header;
- if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length,
- &header)) {
- return false;
- }
- size_t payload_length = rtp_packet_length - header.headerLength;
- int64_t arrival_time_ms;
- int64_t now_ms = clock_->TimeInMilliseconds();
- if (packet_time.timestamp != -1)
- arrival_time_ms = (packet_time.timestamp + 500) / 1000;
- else
- arrival_time_ms = now_ms;
-
- {
- // Periodically log the RTP header of incoming packets.
- rtc::CritScope lock(&receive_cs_);
- if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
- std::stringstream ss;
- ss << "Packet received on SSRC: " << header.ssrc << " with payload type: "
- << static_cast<int>(header.payloadType) << ", timestamp: "
- << header.timestamp << ", sequence number: " << header.sequenceNumber
- << ", arrival time: " << arrival_time_ms;
- if (header.extension.hasTransmissionTimeOffset)
- ss << ", toffset: " << header.extension.transmissionTimeOffset;
- if (header.extension.hasAbsoluteSendTime)
- ss << ", abs send time: " << header.extension.absoluteSendTime;
- LOG(LS_INFO) << ss.str();
- last_packet_log_ms_ = now_ms;
- }
- }
-
- remote_bitrate_estimator_->IncomingPacket(arrival_time_ms, payload_length,
- header, true);
- header.payload_type_frequency = kVideoPayloadTypeFrequency;
-
- bool in_order = IsPacketInOrder(header);
- rtp_payload_registry_.SetIncomingPayloadType(header);
- bool ret = ReceivePacket(rtp_packet, rtp_packet_length, header, in_order);
- // Update receive statistics after ReceivePacket.
- // Receive statistics will be reset if the payload type changes (make sure
- // that the first packet is included in the stats).
- rtp_receive_statistics_->IncomingPacket(
- header, rtp_packet_length, IsPacketRetransmitted(header, in_order));
- return ret;
-}
-
-bool ViEReceiver::ReceivePacket(const uint8_t* packet,
- size_t packet_length,
- const RTPHeader& header,
- bool in_order) {
- if (rtp_payload_registry_.IsEncapsulated(header)) {
- return ParseAndHandleEncapsulatingHeader(packet, packet_length, header);
- }
- const uint8_t* payload = packet + header.headerLength;
- assert(packet_length >= header.headerLength);
- size_t payload_length = packet_length - header.headerLength;
- PayloadUnion payload_specific;
- if (!rtp_payload_registry_.GetPayloadSpecifics(header.payloadType,
- &payload_specific)) {
- return false;
- }
- return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
- payload_specific, in_order);
-}
-
-bool ViEReceiver::ParseAndHandleEncapsulatingHeader(const uint8_t* packet,
- size_t packet_length,
- const RTPHeader& header) {
- if (rtp_payload_registry_.IsRed(header)) {
- int8_t ulpfec_pt = rtp_payload_registry_.ulpfec_payload_type();
- if (packet[header.headerLength] == ulpfec_pt) {
- rtp_receive_statistics_->FecPacketReceived(header, packet_length);
- // Notify vcm about received FEC packets to avoid NACKing these packets.
- NotifyReceiverOfFecPacket(header);
- }
- if (fec_receiver_->AddReceivedRedPacket(
- header, packet, packet_length, ulpfec_pt) != 0) {
- return false;
- }
- return fec_receiver_->ProcessReceivedFec() == 0;
- } else if (rtp_payload_registry_.IsRtx(header)) {
- if (header.headerLength + header.paddingLength == packet_length) {
- // This is an empty packet and should be silently dropped before trying to
- // parse the RTX header.
- return true;
- }
- // Remove the RTX header and parse the original RTP header.
- if (packet_length < header.headerLength)
- return false;
- if (packet_length > sizeof(restored_packet_))
- return false;
- rtc::CritScope lock(&receive_cs_);
- if (restored_packet_in_use_) {
- LOG(LS_WARNING) << "Multiple RTX headers detected, dropping packet.";
- return false;
- }
- if (!rtp_payload_registry_.RestoreOriginalPacket(
- restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
- header)) {
- LOG(LS_WARNING) << "Incoming RTX packet: Invalid RTP header ssrc: "
- << header.ssrc << " payload type: "
- << static_cast<int>(header.payloadType);
- return false;
- }
- restored_packet_in_use_ = true;
- bool ret = OnRecoveredPacket(restored_packet_, packet_length);
- restored_packet_in_use_ = false;
- return ret;
- }
- return false;
-}
-
-void ViEReceiver::NotifyReceiverOfFecPacket(const RTPHeader& header) {
- int8_t last_media_payload_type =
- rtp_payload_registry_.last_received_media_payload_type();
- if (last_media_payload_type < 0) {
- LOG(LS_WARNING) << "Failed to get last media payload type.";
- return;
- }
- // Fake an empty media packet.
- WebRtcRTPHeader rtp_header = {};
- rtp_header.header = header;
- rtp_header.header.payloadType = last_media_payload_type;
- rtp_header.header.paddingLength = 0;
- PayloadUnion payload_specific;
- if (!rtp_payload_registry_.GetPayloadSpecifics(last_media_payload_type,
- &payload_specific)) {
- LOG(LS_WARNING) << "Failed to get payload specifics.";
- return;
- }
- rtp_header.type.Video.codec = payload_specific.Video.videoCodecType;
- rtp_header.type.Video.rotation = kVideoRotation_0;
- if (header.extension.hasVideoRotation) {
- rtp_header.type.Video.rotation =
- ConvertCVOByteToVideoRotation(header.extension.videoRotation);
- }
- OnReceivedPayloadData(nullptr, 0, &rtp_header);
-}
-
-bool ViEReceiver::DeliverRtcp(const uint8_t* rtcp_packet,
- size_t rtcp_packet_length) {
- // Should be set by owner at construction time.
- RTC_DCHECK(!rtp_rtcp_.empty());
- {
- rtc::CritScope lock(&receive_cs_);
- if (!receiving_) {
- return false;
- }
- }
-
- for (RtpRtcp* rtp_rtcp : rtp_rtcp_)
- rtp_rtcp->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
-
- int64_t rtt = 0;
- rtp_rtcp_[0]->RTT(rtp_receiver_->SSRC(), &rtt, nullptr, nullptr, nullptr);
- if (rtt == 0) {
- // Waiting for valid rtt.
- return true;
- }
- uint32_t ntp_secs = 0;
- uint32_t ntp_frac = 0;
- uint32_t rtp_timestamp = 0;
- if (rtp_rtcp_[0]->RemoteNTP(&ntp_secs, &ntp_frac, nullptr, nullptr,
- &rtp_timestamp) != 0) {
- // Waiting for RTCP.
- return true;
- }
- ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
-
- return true;
-}
-
-void ViEReceiver::StartReceive() {
- rtc::CritScope lock(&receive_cs_);
- receiving_ = true;
-}
-
-void ViEReceiver::StopReceive() {
- rtc::CritScope lock(&receive_cs_);
- receiving_ = false;
-}
-
-ReceiveStatistics* ViEReceiver::GetReceiveStatistics() const {
- return rtp_receive_statistics_.get();
-}
-
-bool ViEReceiver::IsPacketInOrder(const RTPHeader& header) const {
- StreamStatistician* statistician =
- rtp_receive_statistics_->GetStatistician(header.ssrc);
- if (!statistician)
- return false;
- return statistician->IsPacketInOrder(header.sequenceNumber);
-}
-
-bool ViEReceiver::IsPacketRetransmitted(const RTPHeader& header,
- bool in_order) const {
- // Retransmissions are handled separately if RTX is enabled.
- if (rtp_payload_registry_.RtxEnabled())
- return false;
- StreamStatistician* statistician =
- rtp_receive_statistics_->GetStatistician(header.ssrc);
- if (!statistician)
- return false;
- // Check if this is a retransmission.
- int64_t min_rtt = 0;
- rtp_rtcp_[0]->RTT(rtp_receiver_->SSRC(), nullptr, nullptr, &min_rtt, nullptr);
- return !in_order &&
- statistician->IsRetransmitOfOldPacket(header, min_rtt);
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video/vie_remb.cc b/chromium/third_party/webrtc/video/vie_remb.cc
index b759f5b72c4..a5ccc52ba6d 100644
--- a/chromium/third_party/webrtc/video/vie_remb.cc
+++ b/chromium/third_party/webrtc/video/vie_remb.cc
@@ -16,7 +16,6 @@
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/utility/include/process_thread.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/video/vie_remb_unittest.cc b/chromium/third_party/webrtc/video/vie_remb_unittest.cc
index 5f72b967cba..2c33401b45c 100644
--- a/chromium/third_party/webrtc/video/vie_remb_unittest.cc
+++ b/chromium/third_party/webrtc/video/vie_remb_unittest.cc
@@ -8,9 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-
-// This file includes unit tests for ViERemb.
-
#include <memory>
#include <vector>
@@ -19,7 +16,6 @@
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/video/vie_remb.h"
using ::testing::_;
diff --git a/chromium/third_party/webrtc/video/vie_sync_module.cc b/chromium/third_party/webrtc/video/vie_sync_module.cc
index af57ab4b76d..2e62ff81439 100644
--- a/chromium/third_party/webrtc/video/vie_sync_module.cc
+++ b/chromium/third_party/webrtc/video/vie_sync_module.cc
@@ -12,10 +12,11 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
-#include "webrtc/modules/video_coding/include/video_coding.h"
+#include "webrtc/modules/video_coding/video_coding_impl.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video/stream_synchronization.h"
#include "webrtc/video_frame.h"
@@ -48,14 +49,14 @@ int UpdateMeasurements(StreamSynchronization::Measurements* stream,
}
} // namespace
-ViESyncModule::ViESyncModule(VideoCodingModule* vcm)
- : vcm_(vcm),
+ViESyncModule::ViESyncModule(vcm::VideoReceiver* video_receiver)
+ : video_receiver_(video_receiver),
clock_(Clock::GetRealTimeClock()),
- video_receiver_(nullptr),
+ rtp_receiver_(nullptr),
video_rtp_rtcp_(nullptr),
voe_channel_id_(-1),
voe_sync_interface_(nullptr),
- last_sync_time_(TickTime::Now()),
+ last_sync_time_(rtc::TimeNanos()),
sync_() {}
ViESyncModule::~ViESyncModule() {
@@ -64,20 +65,19 @@ ViESyncModule::~ViESyncModule() {
void ViESyncModule::ConfigureSync(int voe_channel_id,
VoEVideoSync* voe_sync_interface,
RtpRtcp* video_rtcp_module,
- RtpReceiver* video_receiver) {
+ RtpReceiver* rtp_receiver) {
if (voe_channel_id != -1)
RTC_DCHECK(voe_sync_interface);
rtc::CritScope lock(&data_cs_);
// Prevent expensive no-ops.
if (voe_channel_id_ == voe_channel_id &&
voe_sync_interface_ == voe_sync_interface &&
- video_receiver_ == video_receiver &&
- video_rtp_rtcp_ == video_rtcp_module) {
+ rtp_receiver_ == rtp_receiver && video_rtp_rtcp_ == video_rtcp_module) {
return;
}
voe_channel_id_ = voe_channel_id;
voe_sync_interface_ = voe_sync_interface;
- video_receiver_ = video_receiver;
+ rtp_receiver_ = rtp_receiver;
video_rtp_rtcp_ = video_rtcp_module;
sync_.reset(
new StreamSynchronization(video_rtp_rtcp_->SSRC(), voe_channel_id));
@@ -85,14 +85,15 @@ void ViESyncModule::ConfigureSync(int voe_channel_id,
int64_t ViESyncModule::TimeUntilNextProcess() {
const int64_t kSyncIntervalMs = 1000;
- return kSyncIntervalMs - (TickTime::Now() - last_sync_time_).Milliseconds();
+ return kSyncIntervalMs -
+ (rtc::TimeNanos() - last_sync_time_) / rtc::kNumNanosecsPerMillisec;
}
void ViESyncModule::Process() {
rtc::CritScope lock(&data_cs_);
- last_sync_time_ = TickTime::Now();
+ last_sync_time_ = rtc::TimeNanos();
- const int current_video_delay_ms = vcm_->Delay();
+ const int current_video_delay_ms = video_receiver_->Delay();
if (voe_channel_id_ == -1) {
return;
@@ -120,7 +121,7 @@ void ViESyncModule::Process() {
assert(voice_receiver);
if (UpdateMeasurements(&video_measurement_, *video_rtp_rtcp_,
- *video_receiver_) != 0) {
+ *rtp_receiver_) != 0) {
return;
}
@@ -154,7 +155,7 @@ void ViESyncModule::Process() {
voe_channel_id_, target_audio_delay_ms) == -1) {
LOG(LS_ERROR) << "Error setting voice delay.";
}
- vcm_->SetMinimumPlayoutDelay(target_video_delay_ms);
+ video_receiver_->SetMinimumPlayoutDelay(target_video_delay_ms);
}
bool ViESyncModule::GetStreamSyncOffsetInMs(const VideoFrame& frame,
diff --git a/chromium/third_party/webrtc/video/vie_sync_module.h b/chromium/third_party/webrtc/video/vie_sync_module.h
index 2b499ff4d38..18b6c5d0948 100644
--- a/chromium/third_party/webrtc/video/vie_sync_module.h
+++ b/chromium/third_party/webrtc/video/vie_sync_module.h
@@ -18,7 +18,6 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/include/module.h"
-#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/video/stream_synchronization.h"
#include "webrtc/voice_engine/include/voe_video_sync.h"
@@ -26,20 +25,23 @@ namespace webrtc {
class Clock;
class RtpRtcp;
-class VideoCodingModule;
class VideoFrame;
class ViEChannel;
class VoEVideoSync;
+namespace vcm {
+class VideoReceiver;
+} // namespace vcm
+
class ViESyncModule : public Module {
public:
- explicit ViESyncModule(VideoCodingModule* vcm);
+ explicit ViESyncModule(vcm::VideoReceiver* vcm);
~ViESyncModule();
void ConfigureSync(int voe_channel_id,
VoEVideoSync* voe_sync_interface,
RtpRtcp* video_rtcp_module,
- RtpReceiver* video_receiver);
+ RtpReceiver* rtp_receiver);
// Implements Module.
int64_t TimeUntilNextProcess() override;
@@ -52,13 +54,13 @@ class ViESyncModule : public Module {
private:
rtc::CriticalSection data_cs_;
- VideoCodingModule* const vcm_;
+ vcm::VideoReceiver* const video_receiver_;
Clock* const clock_;
- RtpReceiver* video_receiver_;
+ RtpReceiver* rtp_receiver_;
RtpRtcp* video_rtp_rtcp_;
int voe_channel_id_;
VoEVideoSync* voe_sync_interface_;
- TickTime last_sync_time_;
+ int64_t last_sync_time_;
std::unique_ptr<StreamSynchronization> sync_;
StreamSynchronization::Measurements audio_measurement_;
StreamSynchronization::Measurements video_measurement_;
diff --git a/chromium/third_party/webrtc/video/webrtc_video.gypi b/chromium/third_party/webrtc/video/webrtc_video.gypi
index f11ce957270..3d6afdee071 100644
--- a/chromium/third_party/webrtc/video/webrtc_video.gypi
+++ b/chromium/third_party/webrtc/video/webrtc_video.gypi
@@ -17,7 +17,6 @@
'<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
'<(webrtc_root)/modules/modules.gyp:video_capture_module',
'<(webrtc_root)/modules/modules.gyp:video_processing',
- '<(webrtc_root)/modules/modules.gyp:video_render_module',
'<(webrtc_root)/modules/modules.gyp:webrtc_utility',
'<(webrtc_root)/modules/modules.gyp:webrtc_video_coding',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
@@ -39,6 +38,10 @@
'video/receive_statistics_proxy.h',
'video/report_block_stats.cc',
'video/report_block_stats.h',
+ 'video/rtp_stream_receiver.cc',
+ 'video/rtp_stream_receiver.h',
+ 'video/send_delay_stats.cc',
+ 'video/send_delay_stats.h',
'video/send_statistics_proxy.cc',
'video/send_statistics_proxy.h',
'video/stream_synchronization.cc',
@@ -51,12 +54,10 @@
'video/video_receive_stream.h',
'video/video_send_stream.cc',
'video/video_send_stream.h',
- 'video/vie_channel.cc',
- 'video/vie_channel.h',
+ 'video/video_stream_decoder.cc',
+ 'video/video_stream_decoder.h',
'video/vie_encoder.cc',
'video/vie_encoder.h',
- 'video/vie_receiver.cc',
- 'video/vie_receiver.h',
'video/vie_remb.cc',
'video/vie_remb.h',
'video/vie_sync_module.cc',
diff --git a/chromium/third_party/webrtc/video_encoder.h b/chromium/third_party/webrtc/video_encoder.h
index 89a6464b7c1..0100239e0a4 100644
--- a/chromium/third_party/webrtc/video_encoder.h
+++ b/chromium/third_party/webrtc/video_encoder.h
@@ -31,6 +31,7 @@ class EncodedImageCallback {
virtual ~EncodedImageCallback() {}
// Callback function which is called when an image has been encoded.
+ // TODO(perkj): Change this to return void.
virtual int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) = 0;
@@ -124,7 +125,6 @@ class VideoEncoder {
virtual int32_t SetPeriodicKeyFrames(bool enable) { return -1; }
virtual void OnDroppedFrame() {}
- virtual int GetTargetFramerate() { return -1; }
virtual bool SupportsNativeHandle() const { return false; }
virtual const char* ImplementationName() const { return "unknown"; }
};
@@ -152,7 +152,6 @@ class VideoEncoderSoftwareFallbackWrapper : public VideoEncoder {
int32_t SetRates(uint32_t bitrate, uint32_t framerate) override;
void OnDroppedFrame() override;
- int GetTargetFramerate() override;
bool SupportsNativeHandle() const override;
const char* ImplementationName() const override;
diff --git a/chromium/third_party/webrtc/video_engine_tests_apk.isolate b/chromium/third_party/webrtc/video_engine_tests_apk.isolate
new file mode 100644
index 00000000000..6f4c3d41cc2
--- /dev/null
+++ b/chromium/third_party/webrtc/video_engine_tests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../build/android/android.isolate',
+ 'video_engine_tests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_video_engine_tests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../build/config/',
+ '../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/video_engine_tests_apk/',
+ '<(PRODUCT_DIR)/bin/run_video_engine_tests',
+ 'video_engine_tests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/video_frame.h b/chromium/third_party/webrtc/video_frame.h
index 28a6b8716d3..4dc3411e98a 100644
--- a/chromium/third_party/webrtc/video_frame.h
+++ b/chromium/third_party/webrtc/video_frame.h
@@ -65,20 +65,9 @@ class VideoFrame {
// reference to the video buffer also retained by |videoFrame|.
void ShallowCopy(const VideoFrame& videoFrame);
- // Release frame buffer and reset time stamps.
- void Reset();
-
- // Get pointer to buffer per plane.
- uint8_t* buffer(PlaneType type);
- // Overloading with const.
- const uint8_t* buffer(PlaneType type) const;
-
// Get allocated size per plane.
int allocated_size(PlaneType type) const;
- // Get allocated stride per plane.
- int stride(PlaneType type) const;
-
// Get frame width.
int width() const;
@@ -125,13 +114,12 @@ class VideoFrame {
// Return true if underlying plane buffers are of zero size, false if not.
bool IsZeroSize() const;
- // Return the handle of the underlying video frame. This is used when the
- // frame is backed by a texture. The object should be destroyed when it is no
- // longer in use, so the underlying resource can be freed.
- void* native_handle() const;
-
- // Return the underlying buffer.
- rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() const;
+ // Return the underlying buffer. Never nullptr for a properly
+ // initialized VideoFrame.
+ // Creating a new reference breaks the HasOneRef and IsMutable
+ // logic. So return a const ref to our reference.
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer()
+ const;
// Set the underlying buffer.
void set_video_frame_buffer(
@@ -161,6 +149,7 @@ class EncodedImage {
static size_t GetBufferPaddingBytes(VideoCodecType codec_type);
EncodedImage() : EncodedImage(nullptr, 0, 0) {}
+
EncodedImage(uint8_t* buffer, size_t length, size_t size)
: _buffer(buffer), _length(length), _size(size) {}
@@ -186,6 +175,7 @@ class EncodedImage {
uint8_t* _buffer;
size_t _length;
size_t _size;
+ VideoRotation rotation_ = kVideoRotation_0;
bool _completeFrame = false;
AdaptReason adapt_reason_;
int qp_ = -1; // Quantizer value.
diff --git a/chromium/third_party/webrtc/video_receive_stream.h b/chromium/third_party/webrtc/video_receive_stream.h
index 109c2aa15b8..167df47dff4 100644
--- a/chromium/third_party/webrtc/video_receive_stream.h
+++ b/chromium/third_party/webrtc/video_receive_stream.h
@@ -17,17 +17,16 @@
#include <vector>
#include "webrtc/common_types.h"
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/config.h"
-#include "webrtc/frame_callback.h"
-#include "webrtc/stream.h"
-#include "webrtc/transport.h"
#include "webrtc/media/base/videosinkinterface.h"
+#include "webrtc/transport.h"
namespace webrtc {
class VideoDecoder;
-class VideoReceiveStream : public ReceiveStream {
+class VideoReceiveStream {
public:
// TODO(mflodman) Move all these settings to VideoDecoder and move the
// declaration to common_types.h.
@@ -176,8 +175,18 @@ class VideoReceiveStream : public ReceiveStream {
int target_delay_ms = 0;
};
+ // Starts stream activity.
+ // When a stream is active, it can receive, process and deliver packets.
+ virtual void Start() = 0;
+ // Stops stream activity.
+ // When a stream is stopped, it can't receive, process or deliver packets.
+ virtual void Stop() = 0;
+
// TODO(pbos): Add info on currently-received codec to Stats.
virtual Stats GetStats() const = 0;
+
+ protected:
+ virtual ~VideoReceiveStream() {}
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/video_send_stream.h b/chromium/third_party/webrtc/video_send_stream.h
index 7815acfc806..886367f0a50 100644
--- a/chromium/third_party/webrtc/video_send_stream.h
+++ b/chromium/third_party/webrtc/video_send_stream.h
@@ -15,9 +15,9 @@
#include <string>
#include "webrtc/common_types.h"
+#include "webrtc/common_video/include/frame_callback.h"
#include "webrtc/config.h"
-#include "webrtc/frame_callback.h"
-#include "webrtc/stream.h"
+#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/transport.h"
#include "webrtc/media/base/videosinkinterface.h"
@@ -38,7 +38,7 @@ class VideoCaptureInput {
virtual ~VideoCaptureInput() {}
};
-class VideoSendStream : public SendStream {
+class VideoSendStream {
public:
struct StreamStats {
FrameCounts frame_counts;
@@ -139,7 +139,7 @@ class VideoSendStream : public SendStream {
// Called for each I420 frame before encoding the frame. Can be used for
// effects, snapshots etc. 'nullptr' disables the callback.
- I420FrameCallback* pre_encode_callback = nullptr;
+ rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback = nullptr;
// Called for each encoded frame, e.g. used for file storage. 'nullptr'
// disables the callback. Also measures timing and passes the time
@@ -166,6 +166,13 @@ class VideoSendStream : public SendStream {
bool suspend_below_min_bitrate = false;
};
+ // Starts stream activity.
+ // When a stream is active, it can receive, process and deliver packets.
+ virtual void Start() = 0;
+ // Stops stream activity.
+ // When a stream is stopped, it can't receive, process or deliver packets.
+ virtual void Stop() = 0;
+
// Gets interface used to insert captured frames. Valid as long as the
// VideoSendStream is valid.
virtual VideoCaptureInput* Input() = 0;
@@ -176,6 +183,9 @@ class VideoSendStream : public SendStream {
virtual void ReconfigureVideoEncoder(const VideoEncoderConfig& config) = 0;
virtual Stats GetStats() = 0;
+
+ protected:
+ virtual ~VideoSendStream() {}
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/channel.cc b/chromium/third_party/webrtc/voice_engine/channel.cc
index b9adde7e3ec..d9184c828ed 100644
--- a/chromium/third_party/webrtc/voice_engine/channel.cc
+++ b/chromium/third_party/webrtc/voice_engine/channel.cc
@@ -72,13 +72,11 @@ class TransportFeedbackProxy : public TransportFeedbackObserver {
}
// Implements TransportFeedbackObserver.
- void AddPacket(uint16_t sequence_number,
- size_t length,
- bool was_paced) override {
+ void AddPacket(uint16_t sequence_number, size_t length) override {
RTC_DCHECK(pacer_thread_.CalledOnValidThread());
rtc::CritScope lock(&crit_);
if (feedback_observer_)
- feedback_observer_->AddPacket(sequence_number, length, was_paced);
+ feedback_observer_->AddPacket(sequence_number, length);
}
void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
RTC_DCHECK(network_thread_.CalledOnValidThread());
@@ -476,22 +474,32 @@ bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
}
-int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
+MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
+ int32_t id,
+ AudioFrame* audioFrame) {
if (event_log_) {
unsigned int ssrc;
RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
event_log_->LogAudioPlayout(ssrc);
}
// Get 10ms raw PCM data from the ACM (mixer limits output frequency)
- if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame) ==
- -1) {
+ bool muted;
+ if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
+ &muted) == -1) {
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::GetAudioFrame() PlayoutData10Ms() failed!");
// In all likelihood, the audio in this frame is garbage. We return an
// error so that the audio mixer module doesn't add it to the mix. As
// a result, it won't be played out and the actions skipped here are
// irrelevant.
- return -1;
+ return MixerParticipant::AudioFrameInfo::kError;
+ }
+
+ if (muted) {
+ // TODO(henrik.lundin): We should be able to do better than this. But we
+ // will have to go through all the cases below where the audio samples may
+ // be used, and handle the muted case in some way.
+ audioFrame->Mute();
}
if (_RxVadDetection) {
@@ -563,6 +571,7 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
// Mix decoded PCM output with file if file mixing is enabled
if (state.output_file_playing) {
MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
+ muted = false; // We may have added non-zero samples.
}
// External media
@@ -587,6 +596,7 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
}
// Measure audio level (0-9)
+ // TODO(henrik.lundin) Use the |muted| information here too.
_outputAudioLevel.ComputeLevel(*audioFrame);
if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
@@ -619,7 +629,8 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
}
}
- return 0;
+ return muted ? MixerParticipant::AudioFrameInfo::kMuted
+ : MixerParticipant::AudioFrameInfo::kNormal;
}
int32_t Channel::NeededFrequency(int32_t id) const {
@@ -811,6 +822,7 @@ Channel::Channel(int32_t channelId,
}
acm_config.neteq_config.enable_fast_accelerate =
config.Get<NetEqFastAccelerate>().enabled;
+ acm_config.neteq_config.enable_muted_state = true;
audio_coding_.reset(AudioCodingModule::Create(acm_config));
_outputAudioLevel.Clear();
@@ -1449,12 +1461,11 @@ int Channel::SetOpusDtx(bool enable_dtx) {
return 0;
}
-int32_t Channel::RegisterExternalTransport(Transport& transport) {
+int32_t Channel::RegisterExternalTransport(Transport* transport) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::RegisterExternalTransport()");
rtc::CritScope cs(&_callbackCritSect);
-
if (_externalTransport) {
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceError,
@@ -1462,7 +1473,7 @@ int32_t Channel::RegisterExternalTransport(Transport& transport) {
return -1;
}
_externalTransport = true;
- _transportPtr = &transport;
+ _transportPtr = transport;
return 0;
}
@@ -1471,22 +1482,21 @@ int32_t Channel::DeRegisterExternalTransport() {
"Channel::DeRegisterExternalTransport()");
rtc::CritScope cs(&_callbackCritSect);
-
- if (!_transportPtr) {
+ if (_transportPtr) {
+ WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+ "DeRegisterExternalTransport() all transport is disabled");
+ } else {
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceWarning,
"DeRegisterExternalTransport() external transport already "
"disabled");
- return 0;
}
_externalTransport = false;
_transportPtr = NULL;
- WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
- "DeRegisterExternalTransport() all transport is disabled");
return 0;
}
-int32_t Channel::ReceivedRTPPacket(const int8_t* data,
+int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
size_t length,
const PacketTime& packet_time) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
@@ -1495,7 +1505,6 @@ int32_t Channel::ReceivedRTPPacket(const int8_t* data,
// Store playout timestamp for the received RTP packet
UpdatePlayoutTimestamp(false);
- const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
RTPHeader header;
if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
@@ -1585,14 +1594,14 @@ bool Channel::IsPacketRetransmitted(const RTPHeader& header,
return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
}
-int32_t Channel::ReceivedRTCPPacket(const int8_t* data, size_t length) {
+int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::ReceivedRTCPPacket()");
// Store playout timestamp for the received RTCP packet
UpdatePlayoutTimestamp(true);
// Deliver RTCP packet to RTP/RTCP module for parsing
- if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data, length) == -1) {
+ if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
_engineStatisticsPtr->SetLastError(
VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
"Channel::IncomingRTPPacket() RTCP packet is invalid");
@@ -2917,7 +2926,6 @@ void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
if (!pacing_enabled_)
_rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
- rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
if (enable)
audio_coding_->EnableNack(maxNumberOfPackets);
else
diff --git a/chromium/third_party/webrtc/voice_engine/channel.h b/chromium/third_party/webrtc/voice_engine/channel.h
index d22da74ae5f..cd31e7702dd 100644
--- a/chromium/third_party/webrtc/voice_engine/channel.h
+++ b/chromium/third_party/webrtc/voice_engine/channel.h
@@ -218,12 +218,12 @@ class Channel
int SetOpusDtx(bool enable_dtx);
// VoENetwork
- int32_t RegisterExternalTransport(Transport& transport);
+ int32_t RegisterExternalTransport(Transport* transport);
int32_t DeRegisterExternalTransport();
- int32_t ReceivedRTPPacket(const int8_t* data,
+ int32_t ReceivedRTPPacket(const uint8_t* received_packet,
size_t length,
const PacketTime& packet_time);
- int32_t ReceivedRTCPPacket(const int8_t* data, size_t length);
+ int32_t ReceivedRTCPPacket(const uint8_t* data, size_t length);
// VoEFile
int StartPlayingFileLocally(const char* fileName,
@@ -394,7 +394,9 @@ class Channel
bool SendRtcp(const uint8_t* data, size_t len) override;
// From MixerParticipant
- int32_t GetAudioFrame(int32_t id, AudioFrame* audioFrame) override;
+ MixerParticipant::AudioFrameInfo GetAudioFrameWithMuted(
+ int32_t id,
+ AudioFrame* audioFrame) override;
int32_t NeededFrequency(int32_t id) const override;
// From FileCallback
diff --git a/chromium/third_party/webrtc/voice_engine/channel_manager.cc b/chromium/third_party/webrtc/voice_engine/channel_manager.cc
index eac2e50919d..6071f19548e 100644
--- a/chromium/third_party/webrtc/voice_engine/channel_manager.cc
+++ b/chromium/third_party/webrtc/voice_engine/channel_manager.cc
@@ -49,7 +49,7 @@ ChannelManager::ChannelManager(uint32_t instance_id, const Config& config)
: instance_id_(instance_id),
last_channel_id_(-1),
config_(config),
- event_log_(RtcEventLog::Create()) {}
+ event_log_(RtcEventLog::Create(Clock::GetRealTimeClock())) {}
ChannelOwner ChannelManager::CreateChannel() {
return CreateChannelInternal(config_);
diff --git a/chromium/third_party/webrtc/voice_engine/channel_proxy.cc b/chromium/third_party/webrtc/voice_engine/channel_proxy.cc
index 10c88212024..4cc7f5cbbcd 100644
--- a/chromium/third_party/webrtc/voice_engine/channel_proxy.cc
+++ b/chromium/third_party/webrtc/voice_engine/channel_proxy.cc
@@ -158,6 +158,29 @@ void ChannelProxy::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
channel()->SetSink(std::move(sink));
}
+void ChannelProxy::RegisterExternalTransport(Transport* transport) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int error = channel()->RegisterExternalTransport(transport);
+ RTC_DCHECK_EQ(0, error);
+}
+
+void ChannelProxy::DeRegisterExternalTransport() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ channel()->DeRegisterExternalTransport();
+}
+
+bool ChannelProxy::ReceivedRTPPacket(const uint8_t* packet,
+ size_t length,
+ const PacketTime& packet_time) {
+ // May be called on either worker thread or network thread.
+ return channel()->ReceivedRTPPacket(packet, length, packet_time) == 0;
+}
+
+bool ChannelProxy::ReceivedRTCPPacket(const uint8_t* packet, size_t length) {
+ // May be called on either worker thread or network thread.
+ return channel()->ReceivedRTCPPacket(packet, length) == 0;
+}
+
Channel* ChannelProxy::channel() const {
RTC_DCHECK(channel_owner_.channel());
return channel_owner_.channel();
diff --git a/chromium/third_party/webrtc/voice_engine/channel_proxy.h b/chromium/third_party/webrtc/voice_engine/channel_proxy.h
index 344769e1a91..df0c3f22ef6 100644
--- a/chromium/third_party/webrtc/voice_engine/channel_proxy.h
+++ b/chromium/third_party/webrtc/voice_engine/channel_proxy.h
@@ -25,6 +25,7 @@ namespace webrtc {
class AudioSinkInterface;
class PacketRouter;
class RtpPacketSender;
+class Transport;
class TransportFeedbackObserver;
namespace voe {
@@ -70,9 +71,15 @@ class ChannelProxy {
virtual bool SetSendTelephoneEventPayloadType(int payload_type);
virtual bool SendTelephoneEventOutband(int event, int duration_ms);
-
virtual void SetSink(std::unique_ptr<AudioSinkInterface> sink);
+ virtual void RegisterExternalTransport(Transport* transport);
+ virtual void DeRegisterExternalTransport();
+ virtual bool ReceivedRTPPacket(const uint8_t* packet,
+ size_t length,
+ const PacketTime& packet_time);
+ virtual bool ReceivedRTCPPacket(const uint8_t* packet, size_t length);
+
private:
Channel* channel() const;
diff --git a/chromium/third_party/webrtc/voice_engine/monitor_module.cc b/chromium/third_party/webrtc/voice_engine/monitor_module.cc
index fb3c2b49201..8a1865a95b4 100644
--- a/chromium/third_party/webrtc/voice_engine/monitor_module.cc
+++ b/chromium/third_party/webrtc/voice_engine/monitor_module.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/base/timeutils.h"
#include "webrtc/voice_engine/monitor_module.h"
namespace webrtc {
@@ -17,7 +17,7 @@ namespace voe {
MonitorModule::MonitorModule() :
_observerPtr(NULL),
- _lastProcessTime(TickTime::MillisecondTimestamp())
+ _lastProcessTime(rtc::TimeMillis())
{
}
@@ -52,7 +52,7 @@ MonitorModule::DeRegisterObserver()
int64_t
MonitorModule::TimeUntilNextProcess()
{
- int64_t now = TickTime::MillisecondTimestamp();
+ int64_t now = rtc::TimeMillis();
const int64_t kAverageProcessUpdateTimeMs = 1000;
return kAverageProcessUpdateTimeMs - (now - _lastProcessTime);
}
@@ -60,7 +60,7 @@ MonitorModule::TimeUntilNextProcess()
void
MonitorModule::Process()
{
- _lastProcessTime = TickTime::MillisecondTimestamp();
+ _lastProcessTime = rtc::TimeMillis();
rtc::CritScope lock(&_callbackCritSect);
if (_observerPtr)
{
diff --git a/chromium/third_party/webrtc/voice_engine/shared_data.cc b/chromium/third_party/webrtc/voice_engine/shared_data.cc
index 997f51b4396..7a67561d1ee 100644
--- a/chromium/third_party/webrtc/voice_engine/shared_data.cc
+++ b/chromium/third_party/webrtc/voice_engine/shared_data.cc
@@ -28,7 +28,7 @@ SharedData::SharedData(const Config& config)
_engineStatistics(_gInstanceCounter),
_audioDevicePtr(NULL),
_moduleProcessThreadPtr(
- rtc::ScopedToUnique(ProcessThread::Create("VoiceProcessThread"))) {
+ ProcessThread::Create("VoiceProcessThread")) {
Trace::CreateTrace();
if (OutputMixer::Create(_outputMixerPtr, _gInstanceCounter) == 0)
{
diff --git a/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc b/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc
index 086eeab7b1b..b15d72fecbb 100644
--- a/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc
+++ b/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.cc
@@ -131,7 +131,7 @@ void ConferenceTransport::StorePacket(Packet::Type type,
size_t len) {
{
rtc::CritScope lock(&pq_crit_);
- packet_queue_.push_back(Packet(type, data, len, rtc::Time()));
+ packet_queue_.push_back(Packet(type, data, len, rtc::TimeMillis()));
}
packet_event_->Set();
}
diff --git a/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h b/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h
index 8fd74577112..bbdf5015d4a 100644
--- a/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h
+++ b/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/conference_transport.h
@@ -108,7 +108,7 @@ class ConferenceTransport: public webrtc::Transport {
enum Type { Rtp, Rtcp, } type_;
Packet() : len_(0) {}
- Packet(Type type, const void* data, size_t len, uint32_t time_ms)
+ Packet(Type type, const void* data, size_t len, int64_t time_ms)
: type_(type), len_(len), send_time_ms_(time_ms) {
EXPECT_LE(len_, kMaxPacketSizeByte);
memcpy(data_, data, len_);
@@ -116,7 +116,7 @@ class ConferenceTransport: public webrtc::Transport {
uint8_t data_[kMaxPacketSizeByte];
size_t len_;
- uint32_t send_time_ms_;
+ int64_t send_time_ms_;
};
static bool Run(void* transport) {
diff --git a/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.cc b/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.cc
index d4438a4e158..1787915356a 100644
--- a/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.cc
+++ b/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.cc
@@ -14,11 +14,10 @@
namespace voetest {
-void LoudestFilter::RemoveTimeoutStreams(uint32_t time_ms) {
+void LoudestFilter::RemoveTimeoutStreams(int64_t time_ms) {
auto it = stream_levels_.begin();
while (it != stream_levels_.end()) {
- if (rtc::TimeDiff(time_ms, it->second.last_time_ms) >
- kStreamTimeOutMs) {
+ if (rtc::TimeDiff(time_ms, it->second.last_time_ms) > kStreamTimeOutMs) {
stream_levels_.erase(it++);
} else {
++it;
@@ -41,7 +40,7 @@ unsigned int LoudestFilter::FindQuietestStream() {
}
bool LoudestFilter::ForwardThisPacket(const webrtc::RTPHeader& rtp_header) {
- uint32_t time_now_ms = rtc::Time();
+ int64_t time_now_ms = rtc::TimeMillis();
RemoveTimeoutStreams(time_now_ms);
int source_ssrc = rtp_header.ssrc;
diff --git a/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.h b/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.h
index 73b801cc989..f862c818e72 100644
--- a/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.h
+++ b/chromium/third_party/webrtc/voice_engine/test/auto_test/fakes/loudest_filter.h
@@ -29,15 +29,15 @@ class LoudestFilter {
private:
struct Status {
- void Set(int audio_level, uint32_t last_time_ms) {
+ void Set(int audio_level, int64_t last_time_ms) {
this->audio_level = audio_level;
this->last_time_ms = last_time_ms;
}
int audio_level;
- uint32_t last_time_ms;
+ int64_t last_time_ms;
};
- void RemoveTimeoutStreams(uint32_t time_ms);
+ void RemoveTimeoutStreams(int64_t time_ms);
unsigned int FindQuietestStream();
// Keeps the streams being forwarded in pair<SSRC, Status>.
diff --git a/chromium/third_party/webrtc/voice_engine/test/auto_test/voe_conference_test.cc b/chromium/third_party/webrtc/voice_engine/test/auto_test/voe_conference_test.cc
index c70d92a9462..946eb4e525f 100644
--- a/chromium/third_party/webrtc/voice_engine/test/auto_test/voe_conference_test.cc
+++ b/chromium/third_party/webrtc/voice_engine/test/auto_test/voe_conference_test.cc
@@ -72,15 +72,15 @@ TEST(VoeConferenceTest, RttAndStartNtpTime) {
const int kStatsRequestIntervalMs = 1000;
const int kStatsBufferSize = 3;
- uint32_t deadline = rtc::TimeAfter(kMaxRunTimeMs);
+ int64_t deadline = rtc::TimeAfter(kMaxRunTimeMs);
// Run the following up to |kMaxRunTimeMs| milliseconds.
int successive_pass = 0;
webrtc::CallStatistics stats_1;
webrtc::CallStatistics stats_2;
std::queue<Stats> stats_buffer;
- while (rtc::TimeIsLater(rtc::Time(), deadline) &&
- successive_pass < kNeedSuccessivePass) {
+ while (rtc::TimeMillis() < deadline &&
+ successive_pass < kNeedSuccessivePass) {
webrtc::SleepMs(kStatsRequestIntervalMs);
EXPECT_TRUE(trans.GetReceiverStatistics(id_1, &stats_1));
diff --git a/chromium/third_party/webrtc/voice_engine/test/auto_test/voe_output_test.cc b/chromium/third_party/webrtc/voice_engine/test/auto_test/voe_output_test.cc
index d1bcf968b00..795dac5c0a9 100644
--- a/chromium/third_party/webrtc/voice_engine/test/auto_test/voe_output_test.cc
+++ b/chromium/third_party/webrtc/voice_engine/test/auto_test/voe_output_test.cc
@@ -183,7 +183,7 @@ TEST(OutputTest, DISABLED_OpusDtxHasNoNoisePump) {
OutputTest test(-kDtxBoundForSilence, kDtxBoundForSilence);
Random random(1234ull);
- uint32_t start_time = rtc::Time();
+ int64_t start_time = rtc::TimeMillis();
test.Start();
while (rtc::TimeSince(start_time) < kRuntimeMs) {
webrtc::SleepMs(random.Rand(kUnmuteTimeMs - kUnmuteTimeMs / 10,
diff --git a/chromium/third_party/webrtc/voice_engine/voe_base_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_base_impl.cc
index e3dee0f1a97..8e93778f7ed 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_base_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_base_impl.cc
@@ -229,7 +229,7 @@ int VoEBaseImpl::Init(AudioDeviceModule* external_adm,
return -1;
#else
// Create the internal ADM implementation.
- shared_->set_audio_device(AudioDeviceModuleImpl::Create(
+ shared_->set_audio_device(AudioDeviceModule::Create(
VoEId(shared_->instance_id(), -1), shared_->audio_device_layer()));
if (shared_->audio_device() == nullptr) {
@@ -620,11 +620,14 @@ int32_t VoEBaseImpl::StopPlayout() {
}
int32_t VoEBaseImpl::StartSend() {
- if (!shared_->audio_device()->Recording()) {
+ if (!shared_->audio_device()->RecordingIsInitialized() &&
+ !shared_->audio_device()->Recording()) {
if (shared_->audio_device()->InitRecording() != 0) {
LOG_F(LS_ERROR) << "Failed to initialize recording";
return -1;
}
+ }
+ if (!shared_->audio_device()->Recording()) {
if (shared_->audio_device()->StartRecording() != 0) {
LOG_F(LS_ERROR) << "Failed to start recording";
return -1;
diff --git a/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc
index 7b2b6a00f1e..cae0715ced2 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_external_media_impl.cc
@@ -153,7 +153,11 @@ int VoEExternalMediaImpl::GetAudioFrame(int channel, int desired_sample_rate_hz,
}
frame->sample_rate_hz_ =
desired_sample_rate_hz == 0 ? -1 : desired_sample_rate_hz;
- return channelPtr->GetAudioFrame(channel, frame);
+ auto ret = channelPtr->GetAudioFrameWithMuted(channel, frame);
+ if (ret == MixerParticipant::AudioFrameInfo::kMuted) {
+ frame->Mute();
+ }
+ return ret == MixerParticipant::AudioFrameInfo::kError ? -1 : 0;
}
int VoEExternalMediaImpl::SetExternalMixing(int channel, bool enable) {
diff --git a/chromium/third_party/webrtc/voice_engine/voe_network_impl.cc b/chromium/third_party/webrtc/voice_engine/voe_network_impl.cc
index 55620482951..6941629d79d 100644
--- a/chromium/third_party/webrtc/voice_engine/voe_network_impl.cc
+++ b/chromium/third_party/webrtc/voice_engine/voe_network_impl.cc
@@ -43,7 +43,7 @@ int VoENetworkImpl::RegisterExternalTransport(int channel,
LOG_F(LS_ERROR) << "Failed to locate channel: " << channel;
return -1;
}
- return channelPtr->RegisterExternalTransport(transport);
+ return channelPtr->RegisterExternalTransport(&transport);
}
int VoENetworkImpl::DeRegisterExternalTransport(int channel) {
@@ -84,8 +84,8 @@ int VoENetworkImpl::ReceivedRTPPacket(int channel,
LOG_F(LS_ERROR) << "No external transport for channel: " << channel;
return -1;
}
- return channelPtr->ReceivedRTPPacket((const int8_t*)data, length,
- packet_time);
+ return channelPtr->ReceivedRTPPacket(static_cast<const uint8_t*>(data),
+ length, packet_time);
}
int VoENetworkImpl::ReceivedRTCPPacket(int channel,
@@ -107,7 +107,8 @@ int VoENetworkImpl::ReceivedRTCPPacket(int channel,
LOG_F(LS_ERROR) << "No external transport for channel: " << channel;
return -1;
}
- return channelPtr->ReceivedRTCPPacket((const int8_t*)data, length);
+ return channelPtr->ReceivedRTCPPacket(static_cast<const uint8_t*>(data),
+ length);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/voice_engine/voice_engine.gyp b/chromium/third_party/webrtc/voice_engine/voice_engine.gyp
index b93c903d711..a728b1af3a6 100644
--- a/chromium/third_party/webrtc/voice_engine/voice_engine.gyp
+++ b/chromium/third_party/webrtc/voice_engine/voice_engine.gyp
@@ -255,6 +255,27 @@
],
},
],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'voice_engine_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):voice_engine_unittests_apk',
+ ],
+ 'includes': [
+ '../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'voice_engine_unittests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
}],
['test_isolation_mode != "noop"', {
'targets': [
diff --git a/chromium/third_party/webrtc/voice_engine/voice_engine_unittests_apk.isolate b/chromium/third_party/webrtc/voice_engine/voice_engine_unittests_apk.isolate
new file mode 100644
index 00000000000..6f0244c526d
--- /dev/null
+++ b/chromium/third_party/webrtc/voice_engine/voice_engine_unittests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../../build/android/android.isolate',
+ 'voice_engine_unittests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_voice_engine_unittests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../../build/config/',
+ '../../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/voice_engine_unittests_apk/',
+ '<(PRODUCT_DIR)/bin/run_voice_engine_unittests',
+ 'voice_engine_unittests.isolate',
+ ]
+ }
+}
diff --git a/chromium/third_party/webrtc/webrtc.gyp b/chromium/third_party/webrtc/webrtc.gyp
index ad4b3534d45..793bf96335e 100644
--- a/chromium/third_party/webrtc/webrtc.gyp
+++ b/chromium/third_party/webrtc/webrtc.gyp
@@ -35,6 +35,18 @@
],
},
}],
+ ['build_with_chromium==0 and'
+ '(OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7"))', {
+ # TODO(kjellander): Move this to webrtc_all_dependencies once all of talk/
+ # has been moved to webrtc/. It can't be processed by Chromium since the
+ # reference to buid/java.gypi is using an absolute path (and includes
+ # entries cannot contain variables).
+ 'variables': {
+ 'webrtc_all_dependencies': [
+ 'sdk/sdk.gyp:*',
+ ],
+ },
+ }],
['include_tests==1', {
'includes': [
'webrtc_tests.gypi',
@@ -55,6 +67,24 @@
},
],
}],
+ ['enable_protobuf==1', {
+ 'targets': [
+ {
+ 'target_name': 'rtc_event_log_parser',
+ 'type': 'static_library',
+ 'sources': [
+ 'call/rtc_event_log_parser.cc',
+ 'call/rtc_event_log_parser.h',
+ ],
+ 'dependencies': [
+ 'rtc_event_log_proto',
+ ],
+ 'export_dependent_settings': [
+ 'rtc_event_log_proto',
+ ],
+ },
+ ],
+ }],
['include_tests==1 and enable_protobuf==1', {
'targets': [
{
@@ -63,7 +93,7 @@
'sources': ['call/rtc_event_log2rtp_dump.cc',],
'dependencies': [
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- 'rtc_event_log',
+ 'rtc_event_log_parser',
'rtc_event_log_proto',
'test/test.gyp:rtp_test_utils'
],
@@ -97,6 +127,12 @@
'webrtc_tests',
],
}],
+ ['include_tests==1 and'
+ '(OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7"))', {
+ 'dependencies': [
+ 'sdk/sdk_tests.gyp:*',
+ ],
+ }],
],
},
{
@@ -108,8 +144,6 @@
'audio_state.h',
'call.h',
'config.h',
- 'frame_callback.h',
- 'stream.h',
'transport.h',
'video_receive_stream.h',
'video_send_stream.h',
@@ -131,7 +165,6 @@
['build_with_chromium==1', {
'dependencies': [
'<(webrtc_root)/modules/modules.gyp:video_capture',
- '<(webrtc_root)/modules/modules.gyp:video_render',
],
}],
],
@@ -142,6 +175,8 @@
'sources': [
'call/rtc_event_log.cc',
'call/rtc_event_log.h',
+ 'call/rtc_event_log_helper_thread.cc',
+ 'call/rtc_event_log_helper_thread.h',
],
'conditions': [
# If enable_protobuf is defined, we want to compile the protobuf
diff --git a/chromium/third_party/webrtc/webrtc_examples.gyp b/chromium/third_party/webrtc/webrtc_examples.gyp
index d5d8adefd9e..6f07b4d7fe3 100755
--- a/chromium/third_party/webrtc/webrtc_examples.gyp
+++ b/chromium/third_party/webrtc/webrtc_examples.gyp
@@ -154,7 +154,7 @@
'target_name': 'apprtc_common',
'type': 'static_library',
'dependencies': [
- '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_common_objc',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
],
'sources': [
@@ -201,8 +201,7 @@
'target_name': 'apprtc_signaling',
'type': 'static_library',
'dependencies': [
- '<(webrtc_root)/api/api.gyp:rtc_api_objc',
- '<(webrtc_root)/base/base.gyp:rtc_base_objc',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_peerconnection_objc',
'apprtc_common',
'socketrocket',
],
@@ -251,7 +250,7 @@
],
},
'export_dependent_settings': [
- '<(webrtc_root)/api/api.gyp:rtc_api_objc',
+ '<(webrtc_root)/sdk/sdk.gyp:rtc_sdk_peerconnection_objc',
],
'conditions': [
['OS=="ios"', {
@@ -409,6 +408,7 @@
'type': 'none',
'dependencies': [
'api/api.gyp:libjingle_peerconnection_java',
+ '<(DEPTH)/third_party/android_tools/android_tools.gyp:android_support_design_javalib'
],
'variables': {
'apk_name': 'AppRTCDemo',
@@ -419,9 +419,11 @@
'R_package_relpath': 'org/appspot/apprtc',
'input_jars_paths': [
'examples/androidapp/third_party/autobanh/autobanh.jar',
+ '<(DEPTH)/third_party/android_tools/sdk/extras/android/support/v4/android-support-v4.jar',
],
'library_dexed_jars_paths': [
'examples/androidapp/third_party/autobanh/autobanh.jar',
+ '<(DEPTH)/third_party/android_tools/sdk/extras/android/support/v4/android-support-v4.jar',
],
'native_lib_target': 'libjingle_peerconnection_so',
'add_to_dependents_classpaths':1,
@@ -454,8 +456,36 @@
'apk_name': 'AppRTCDemoTest',
'java_in_dir': 'examples/androidtests',
'is_test_apk': 1,
+ 'test_type': 'instrumentation',
+ 'test_runner_path': '<(DEPTH)/webrtc/build/android/test_runner.py',
},
- 'includes': [ '../build/java_apk.gypi' ],
+ 'includes': [
+ '../build/java_apk.gypi',
+ '../build/android/test_runner.gypi',
+ ],
+ },
+
+ {
+ 'target_name': 'AppRTCDemoJUnitTest',
+ 'type': 'none',
+ 'dependencies': [
+ 'AppRTCDemo_apk',
+ '<(DEPTH)/base/base.gyp:base_java',
+ '<(DEPTH)/base/base.gyp:base_java_test_support',
+ '<(DEPTH)/base/base.gyp:base_junit_test_support',
+ ],
+ 'variables': {
+ 'main_class': 'org.chromium.testing.local.JunitTestMain',
+ 'src_paths': [
+ 'examples/androidjunit/',
+ ],
+ 'test_type': 'junit',
+ 'wrapper_script_name': 'helper/<(_target_name)',
+ },
+ 'includes': [
+ '../build/host_jar.gypi',
+ '../build/android/test_runner.gypi',
+ ],
},
], # targets
}], # OS=="android"
diff --git a/chromium/third_party/webrtc/webrtc_nonparallel_tests_apk.isolate b/chromium/third_party/webrtc/webrtc_nonparallel_tests_apk.isolate
new file mode 100644
index 00000000000..ee83d108143
--- /dev/null
+++ b/chromium/third_party/webrtc/webrtc_nonparallel_tests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../build/android/android.isolate',
+ 'webrtc_nonparallel_tests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_webrtc_nonparallel_tests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../build/config/',
+ '../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/webrtc_nonparallel_tests_apk/',
+ '<(PRODUCT_DIR)/bin/run_webrtc_nonparallel_tests',
+ 'webrtc_nonparallel_tests.isolate',
+ ]
+ },
+}
diff --git a/chromium/third_party/webrtc/webrtc_perf_tests_apk.isolate b/chromium/third_party/webrtc/webrtc_perf_tests_apk.isolate
new file mode 100644
index 00000000000..d1be691a0b1
--- /dev/null
+++ b/chromium/third_party/webrtc/webrtc_perf_tests_apk.isolate
@@ -0,0 +1,26 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': [
+ '../build/android/android.isolate',
+ 'webrtc_perf_tests.isolate',
+ ],
+ 'variables': {
+ 'command': [
+ '<(PRODUCT_DIR)/bin/run_webrtc_perf_tests',
+ '--logcat-output-dir', '${ISOLATED_OUTDIR}/logcats',
+ ],
+ 'files': [
+ '../build/config/',
+ '../third_party/instrumented_libraries/instrumented_libraries.isolate',
+ '<(PRODUCT_DIR)/webrtc_perf_tests_apk/',
+ '<(PRODUCT_DIR)/bin/run_webrtc_perf_tests',
+ 'webrtc_perf_tests.isolate',
+ ]
+ },
+}
diff --git a/chromium/third_party/webrtc/webrtc_tests.gypi b/chromium/third_party/webrtc/webrtc_tests.gypi
index 7840e80b5ed..ae31c6c33cd 100644
--- a/chromium/third_party/webrtc/webrtc_tests.gypi
+++ b/chromium/third_party/webrtc/webrtc_tests.gypi
@@ -30,11 +30,11 @@
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
}],
- ['OS=="ios"', {
+ ['OS=="ios" or (OS=="mac" and mac_deployment_target=="10.7")', {
'dependencies': [
- 'api/api_tests.gyp:rtc_api_objc_tests',
- ]
- }]
+ 'sdk/sdk_tests.gyp:rtc_sdk_peerconnection_objc_tests',
+ ],
+ }],
],
},
{
@@ -57,7 +57,6 @@
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'webrtc',
@@ -144,7 +143,6 @@
'test/test.gyp:test_common',
'test/test.gyp:test_renderer',
'<(webrtc_root)/modules/modules.gyp:video_capture',
- '<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
'webrtc',
],
@@ -161,6 +159,7 @@
'call/bitrate_estimator_tests.cc',
'call/call_unittest.cc',
'call/packet_injection_tests.cc',
+ 'call/ringbuffer_unittest.cc',
'test/common_unittest.cc',
'test/testsupport/metrics/video_metrics_unittest.cc',
'video/call_stats_unittest.cc',
@@ -169,6 +168,7 @@
'video/overuse_frame_detector_unittest.cc',
'video/payload_router_unittest.cc',
'video/report_block_stats_unittest.cc',
+ 'video/send_delay_stats_unittest.cc',
'video/send_statistics_proxy_unittest.cc',
'video/stream_synchronization_unittest.cc',
'video/video_capture_input_unittest.cc',
@@ -183,7 +183,6 @@
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
'<(webrtc_root)/modules/modules.gyp:video_capture',
- '<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
'test/metrics.gyp:metrics',
@@ -214,10 +213,12 @@
],
'dependencies': [
'webrtc.gyp:rtc_event_log',
+ 'webrtc.gyp:rtc_event_log_parser',
'webrtc.gyp:rtc_event_log_proto',
],
'sources': [
'call/rtc_event_log_unittest.cc',
+ 'call/rtc_event_log_unittest_helper.cc'
],
}],
],
@@ -343,6 +344,66 @@
],
},
],
+ 'conditions': [
+ ['test_isolation_mode != "noop"',
+ {
+ 'targets': [
+ {
+ 'target_name': 'rtc_unittests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):rtc_unittests_apk',
+ ],
+ 'includes': [
+ 'build/isolate.gypi',
+ ],
+ 'sources': [
+ 'rtc_unittests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'video_engine_tests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):video_engine_tests_apk',
+ ],
+ 'includes': [
+ 'build/isolate.gypi',
+ ],
+ 'sources': [
+ 'video_engine_tests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'webrtc_perf_tests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):webrtc_perf_tests_apk',
+ ],
+ 'includes': [
+ 'build/isolate.gypi',
+ ],
+ 'sources': [
+ 'webrtc_perf_tests_apk.isolate',
+ ],
+ },
+ {
+ 'target_name': 'webrtc_nonparallel_tests_apk_run',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):webrtc_nonparallel_tests_apk',
+ ],
+ 'includes': [
+ 'build/isolate.gypi',
+ ],
+ 'sources': [
+ 'webrtc_nonparallel_tests_apk.isolate',
+ ],
+ },
+ ],
+ },
+ ],
+ ],
}],
['test_isolation_mode != "noop"', {
'targets': [