summaryrefslogtreecommitdiff
path: root/chromium/third_party/libjingle
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2015-08-14 11:38:45 +0200
committerAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2015-08-14 17:16:47 +0000
commit3a97ca8dd9b96b599ae2d33e40df0dd2f7ea5859 (patch)
tree43cc572ba067417c7341db81f71ae7cc6e0fcc3e /chromium/third_party/libjingle
parentf61ab1ac7f855cd281809255c0aedbb1895e1823 (diff)
downloadqtwebengine-chromium-3a97ca8dd9b96b599ae2d33e40df0dd2f7ea5859.tar.gz
BASELINE: Update chromium to 45.0.2454.40
Change-Id: Id2121d9f11a8fc633677236c65a3e41feef589e4 Reviewed-by: Andras Becsi <andras.becsi@theqtcompany.com>
Diffstat (limited to 'chromium/third_party/libjingle')
-rw-r--r--chromium/third_party/libjingle/BUILD.gn19
-rw-r--r--chromium/third_party/libjingle/README.chromium2
-rw-r--r--chromium/third_party/libjingle/libjingle.gyp13
-rw-r--r--chromium/third_party/libjingle/libjingle_nacl.gyp2
-rw-r--r--chromium/third_party/libjingle/overrides/allocator_shim/allocator_proxy.cc27
-rw-r--r--chromium/third_party/libjingle/overrides/allocator_shim/allocator_stub.cc17
-rw-r--r--chromium/third_party/libjingle/overrides/allocator_shim/allocator_stub.h24
-rw-r--r--chromium/third_party/libjingle/overrides/init_webrtc.cc122
-rw-r--r--chromium/third_party/libjingle/overrides/init_webrtc.h79
-rw-r--r--chromium/third_party/libjingle/overrides/initialize_module.cc147
-rw-r--r--chromium/third_party/libjingle/overrides/talk/media/webrtc/webrtcexport.h21
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/androidtests/jni/Android.mk2
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.cc35
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.h3
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc162
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h15
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc68
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h2
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.cc27
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.h4
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore_unittest.cc1
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/fakemetricsobserver.cc81
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/fakemetricsobserver.h68
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/androidmediacodeccommon.h12
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/classreferenceholder.cc9
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/native_handle_impl.h20
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription.cc3
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc60
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource_unittest.cc13
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.cc4
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h7
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/.clang-format10
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCDataChannel.mm12
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.h32
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.mm92
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection+Internal.h11
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection.mm15
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm14
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h37
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm87
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/avfoundationvideocapturer.mm6
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCDataChannel.h6
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h8
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h73
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m6
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc204
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.h11
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc143
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc2
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory_unittest.cc32
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface.h29
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc36
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.h7
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/statscollector_unittest.cc16
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.cc4
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h2
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.cc10
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.h4
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc38
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/test/fakedatachannelprovider.h14
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/test/mockpeerconnectionobservers.h12
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/umametrics.h6
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc2
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc118
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc81
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc106
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h30
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc233
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc38
-rw-r--r--chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h15
-rw-r--r--chromium/third_party/libjingle/source/talk/build/common.gypi11
-rw-r--r--chromium/third_party/libjingle/source/talk/build/isolate.gypi3
-rw-r--r--chromium/third_party/libjingle/source/talk/codereview.settings7
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/android/jni/Android.mk2
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/android/res/values/strings.xml5
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/android/res/xml/preferences.xml6
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDAppClient.m25
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDSDPUtils.h41
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDSDPUtils.m108
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m6
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/Info.plist155
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/Default-568h.pngbin11733 -> 0 bytes
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.pngbin0 -> 3640 bytes
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.pngbin0 -> 4856 bytes
-rw-r--r--chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.pngbin0 -> 11152 bytes
-rwxr-xr-xchromium/third_party/libjingle/source/talk/libjingle.gyp26
-rwxr-xr-xchromium/third_party/libjingle/source/talk/libjingle_examples.gyp9
-rwxr-xr-xchromium/third_party/libjingle/source/talk/libjingle_tests.gyp5
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/capturemanager.cc6
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/constants.cc2
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/constants.h2
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h17
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/filemediaengine.cc376
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/filemediaengine.h330
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/filemediaengine_unittest.cc459
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/mediachannel.h41
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/mediaengine.h26
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h3
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/videoframe.cc24
-rw-r--r--chromium/third_party/libjingle/source/talk/media/base/videoframe.h11
-rw-r--r--chromium/third_party/libjingle/source/talk/media/other/linphonemediaengine.cc276
-rw-r--r--chromium/third_party/libjingle/source/talk/media/other/linphonemediaengine.h171
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.cc18
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.h16
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideocapturemodule.h2
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h9
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h88
-rwxr-xr-xchromium/third_party/libjingle/source/talk/media/webrtc/simulcast.cc2
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcexport.h83
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.cc16
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.h146
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.cc2
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.h4
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender_unittest.cc4
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.cc129
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.h15
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine.cc1
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.cc59
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.h12
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2_unittest.cc91
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h4
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe_unittest.cc19
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframefactory_unittest.cc125
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc246
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h69
-rw-r--r--chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc125
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/channel.cc59
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/channel.h79
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc343
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc88
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/channelmanager.h28
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc45
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/mediarecorder.cc224
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/mediarecorder.h119
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/mediarecorder_unittest.cc356
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/mediasession.cc22
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc41
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.cc44
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.h3
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter_unittest.cc41
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/soundclip.cc83
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/soundclip.h70
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/srtpfilter.cc22
-rw-r--r--chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h2
144 files changed, 2906 insertions, 4872 deletions
diff --git a/chromium/third_party/libjingle/BUILD.gn b/chromium/third_party/libjingle/BUILD.gn
index ade2f66f00e..1d500d04eea 100644
--- a/chromium/third_party/libjingle/BUILD.gn
+++ b/chromium/third_party/libjingle/BUILD.gn
@@ -417,8 +417,6 @@ if (enable_webrtc) {
"source/talk/media/base/constants.cc",
"source/talk/media/base/constants.h",
"source/talk/media/base/cryptoparams.h",
- "source/talk/media/base/filemediaengine.cc",
- "source/talk/media/base/filemediaengine.h",
"source/talk/media/base/hybriddataengine.h",
"source/talk/media/base/mediachannel.h",
"source/talk/media/base/mediaengine.cc",
@@ -448,13 +446,10 @@ if (enable_webrtc) {
"source/talk/media/webrtc/webrtccommon.h",
"source/talk/media/webrtc/webrtcpassthroughrender.cc",
"source/talk/media/webrtc/webrtcpassthroughrender.h",
- "source/talk/media/webrtc/webrtcvideocapturer.cc",
- "source/talk/media/webrtc/webrtcvideocapturer.h",
"source/talk/media/webrtc/webrtcvideoframe.cc",
"source/talk/media/webrtc/webrtcvideoframe.h",
"source/talk/media/webrtc/webrtcvideoframefactory.cc",
"source/talk/media/webrtc/webrtcvideoframefactory.h",
- "source/talk/media/webrtc/webrtcvie.h",
"source/talk/media/webrtc/webrtcvoe.h",
"source/talk/session/media/audiomonitor.cc",
"source/talk/session/media/audiomonitor.h",
@@ -517,8 +512,6 @@ if (enable_webrtc) {
"source/talk/media/webrtc/simulcast.h",
"source/talk/media/webrtc/webrtcmediaengine.cc",
"source/talk/media/webrtc/webrtcmediaengine.h",
- "source/talk/media/webrtc/webrtcvideoengine.cc",
- "source/talk/media/webrtc/webrtcvideoengine.h",
"source/talk/media/webrtc/webrtcvideoengine2.cc",
"source/talk/media/webrtc/webrtcvideoengine2.h",
"source/talk/media/webrtc/webrtcvoiceengine.cc",
@@ -539,5 +532,17 @@ if (enable_webrtc) {
"//third_party/webrtc/voice_engine",
]
}
+
+ source_set("libstunprober") {
+ p2p_dir = "../webrtc/p2p"
+ sources = [
+ "$p2p_dir/stunprober/stunprober.cc",
+ ]
+
+ deps = [
+ "//third_party/webrtc/base:rtc_base",
+ ":libjingle_webrtc_common",
+ ]
+ }
} # enable_webrtc
# TODO(GYP): Port libjingle.gyp's enable_webrtc condition block.
diff --git a/chromium/third_party/libjingle/README.chromium b/chromium/third_party/libjingle/README.chromium
index 6028000d3df..2078ca7792b 100644
--- a/chromium/third_party/libjingle/README.chromium
+++ b/chromium/third_party/libjingle/README.chromium
@@ -1,7 +1,7 @@
Name: libjingle
URL: http://code.google.com/p/webrtc/
Version: unknown
-Revision: 9186
+Revision: 9564
License: BSD
License File: source/talk/COPYING
Security Critical: yes
diff --git a/chromium/third_party/libjingle/libjingle.gyp b/chromium/third_party/libjingle/libjingle.gyp
index 7cda19660a0..aec101989c4 100644
--- a/chromium/third_party/libjingle/libjingle.gyp
+++ b/chromium/third_party/libjingle/libjingle.gyp
@@ -26,7 +26,6 @@
'HAVE_SRTP',
'HAVE_WEBRTC_VIDEO',
'HAVE_WEBRTC_VOICE',
- 'LIBPEERCONNECTION_LIB=1',
'LOGGING_INSIDE_WEBRTC',
'NO_MAIN_THREAD_WRAPPING',
'NO_SOUND_SYSTEM',
@@ -325,12 +324,7 @@
# GN version: //third_party/libjingle:libjingle_webrtc_common
'target_name': 'libjingle_webrtc_common',
'type': 'static_library',
- 'all_dependent_settings': {
- 'defines': [ 'LIBPEERCONNECTION_LIB=1' ],
- },
'sources': [
- 'overrides/talk/media/webrtc/webrtcexport.h',
-
'<(libjingle_source)/talk/app/webrtc/audiotrack.cc',
'<(libjingle_source)/talk/app/webrtc/audiotrack.h',
'<(libjingle_source)/talk/app/webrtc/audiotrackrenderer.cc',
@@ -407,8 +401,6 @@
'<(libjingle_source)/talk/media/base/constants.cc',
'<(libjingle_source)/talk/media/base/constants.h',
'<(libjingle_source)/talk/media/base/cryptoparams.h',
- '<(libjingle_source)/talk/media/base/filemediaengine.cc',
- '<(libjingle_source)/talk/media/base/filemediaengine.h',
'<(libjingle_source)/talk/media/base/hybriddataengine.h',
'<(libjingle_source)/talk/media/base/mediachannel.h',
'<(libjingle_source)/talk/media/base/mediaengine.cc',
@@ -438,13 +430,10 @@
'<(libjingle_source)/talk/media/webrtc/webrtccommon.h',
'<(libjingle_source)/talk/media/webrtc/webrtcpassthroughrender.cc',
'<(libjingle_source)/talk/media/webrtc/webrtcpassthroughrender.h',
- '<(libjingle_source)/talk/media/webrtc/webrtcvideocapturer.cc',
- '<(libjingle_source)/talk/media/webrtc/webrtcvideocapturer.h',
'<(libjingle_source)/talk/media/webrtc/webrtcvideoframe.cc',
'<(libjingle_source)/talk/media/webrtc/webrtcvideoframe.h',
'<(libjingle_source)/talk/media/webrtc/webrtcvideoframefactory.cc',
'<(libjingle_source)/talk/media/webrtc/webrtcvideoframefactory.h',
- '<(libjingle_source)/talk/media/webrtc/webrtcvie.h',
'<(libjingle_source)/talk/media/webrtc/webrtcvoe.h',
'<(libjingle_source)/talk/session/media/audiomonitor.cc',
'<(libjingle_source)/talk/session/media/audiomonitor.h',
@@ -564,8 +553,6 @@
'<(libjingle_source)/talk/media/webrtc/simulcast.h',
'<(libjingle_source)/talk/media/webrtc/webrtcmediaengine.cc',
'<(libjingle_source)/talk/media/webrtc/webrtcmediaengine.h',
- '<(libjingle_source)/talk/media/webrtc/webrtcvideoengine.cc',
- '<(libjingle_source)/talk/media/webrtc/webrtcvideoengine.h',
'<(libjingle_source)/talk/media/webrtc/webrtcvideoengine2.cc',
'<(libjingle_source)/talk/media/webrtc/webrtcvideoengine2.h',
'<(libjingle_source)/talk/media/webrtc/webrtcvoiceengine.cc',
diff --git a/chromium/third_party/libjingle/libjingle_nacl.gyp b/chromium/third_party/libjingle/libjingle_nacl.gyp
index edcbdd29223..1ca1e810d0c 100644
--- a/chromium/third_party/libjingle/libjingle_nacl.gyp
+++ b/chromium/third_party/libjingle/libjingle_nacl.gyp
@@ -156,6 +156,8 @@
'<(webrtc_base)/pathutils.h',
'<(webrtc_base)/physicalsocketserver.cc',
'<(webrtc_base)/physicalsocketserver.h',
+ '<(webrtc_base)/platform_thread.cc',
+ '<(webrtc_base)/platform_thread.h',
'<(webrtc_base)/proxydetect.cc',
'<(webrtc_base)/proxydetect.h',
'<(webrtc_base)/proxyinfo.cc',
diff --git a/chromium/third_party/libjingle/overrides/allocator_shim/allocator_proxy.cc b/chromium/third_party/libjingle/overrides/allocator_shim/allocator_proxy.cc
deleted file mode 100644
index 3a6772cc9df..00000000000
--- a/chromium/third_party/libjingle/overrides/allocator_shim/allocator_proxy.cc
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "third_party/libjingle/overrides/allocator_shim/allocator_stub.h"
-
-#if !defined(LIBPEERCONNECTION_IMPLEMENTATION) || defined(LIBPEERCONNECTION_LIB)
-#error "Only compile the allocator proxy with the shared_library implementation"
-#endif
-
-#if defined(OS_MACOSX) || defined(OS_ANDROID)
-#error "The allocator proxy isn't supported (or needed) on mac or android."
-#endif
-
-extern AllocateFunction g_alloc;
-extern DellocateFunction g_dealloc;
-
-// Override the global new/delete routines and proxy them over to the allocator
-// routines handed to us via InitializeModule.
-
-void* operator new(std::size_t n) throw() {
- return g_alloc(n);
-}
-
-void operator delete(void* p) throw() {
- g_dealloc(p);
-}
diff --git a/chromium/third_party/libjingle/overrides/allocator_shim/allocator_stub.cc b/chromium/third_party/libjingle/overrides/allocator_shim/allocator_stub.cc
deleted file mode 100644
index 4664ed7cd09..00000000000
--- a/chromium/third_party/libjingle/overrides/allocator_shim/allocator_stub.cc
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "third_party/libjingle/overrides/allocator_shim/allocator_stub.h"
-
-#if defined(OS_MACOSX) || defined(OS_ANDROID)
-#error "The allocator stub isn't supported (or needed) on mac or android."
-#endif
-
-void* Allocate(std::size_t n) {
- return operator new(n);
-}
-
-void Dellocate(void* p) {
- return operator delete(p);
-}
diff --git a/chromium/third_party/libjingle/overrides/allocator_shim/allocator_stub.h b/chromium/third_party/libjingle/overrides/allocator_shim/allocator_stub.h
deleted file mode 100644
index 20b9c7d42d6..00000000000
--- a/chromium/third_party/libjingle/overrides/allocator_shim/allocator_stub.h
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef THIRD_PARTY_LIBJINGLE_OVERRIDES_ALLOCATOR_SHIM_ALLOCATOR_STUB_H_
-#define THIRD_PARTY_LIBJINGLE_OVERRIDES_ALLOCATOR_SHIM_ALLOCATOR_STUB_H_
-
-#include <new>
-
-#include "base/basictypes.h"
-
-#if !defined(OS_MACOSX) && !defined(OS_ANDROID)
-
-typedef void* (*AllocateFunction)(std::size_t);
-typedef void (*DellocateFunction)(void*);
-
-// The stub implementations that forward new / delete calls to the allocator
-// in the current binary (i.e. tcmalloc).
-void* Allocate(std::size_t n);
-void Dellocate(void* p);
-
-#endif // OS_MACOSX && OS_ANDROID
-
-#endif // THIRD_PARTY_LIBJINGLE_OVERRIDES_ALLOCATOR_SHIM_ALLOCATOR_STUB_H_
diff --git a/chromium/third_party/libjingle/overrides/init_webrtc.cc b/chromium/third_party/libjingle/overrides/init_webrtc.cc
index 3b2e4066a75..6ba7f04091b 100644
--- a/chromium/third_party/libjingle/overrides/init_webrtc.cc
+++ b/chromium/third_party/libjingle/overrides/init_webrtc.cc
@@ -7,6 +7,7 @@
#include "base/command_line.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
+#include "base/logging.h"
#include "base/metrics/field_trial.h"
#include "base/metrics/histogram.h"
#include "base/native_library.h"
@@ -14,6 +15,7 @@
#include "base/trace_event/trace_event.h"
#include "third_party/webrtc/overrides/webrtc/base/basictypes.h"
#include "third_party/webrtc/overrides/webrtc/base/logging.h"
+#include "third_party/webrtc/system_wrappers/interface/event_tracer.h"
const unsigned char* GetCategoryGroupEnabled(const char* category_group) {
return TRACE_EVENT_API_GET_CATEGORY_GROUP_ENABLED(category_group);
@@ -44,6 +46,12 @@ std::string FindFullName(const std::string& trial_name) {
// Define webrtc::metrics functions to provide webrtc with implementations.
namespace metrics {
+
+// This class doesn't actually exist, so don't go looking for it :)
+// This type is just fwd declared here in order to use it as an opaque type
+// between the Histogram functions in this file.
+class Histogram;
+
Histogram* HistogramFactoryGetCounts(
const std::string& name, int min, int max, int bucket_count) {
return reinterpret_cast<Histogram*>(
@@ -69,8 +77,6 @@ void HistogramAdd(
} // namespace metrics
} // namespace webrtc
-#if defined(LIBPEERCONNECTION_LIB)
-
// libpeerconnection is being compiled as a static lib. In this case
// we don't need to do any initializing but to keep things simple we
// provide an empty intialization routine so that this #ifdef doesn't
@@ -79,115 +85,3 @@ bool InitializeWebRtcModule() {
webrtc::SetupEventTracer(&GetCategoryGroupEnabled, &AddTraceEvent);
return true;
}
-
-#else // !LIBPEERCONNECTION_LIB
-
-// When being compiled as a shared library, we need to bridge the gap between
-// the current module and the libpeerconnection module, so things get a tad
-// more complicated.
-
-// Global function pointers to the factory functions in the shared library.
-CreateWebRtcMediaEngineFunction g_create_webrtc_media_engine = NULL;
-DestroyWebRtcMediaEngineFunction g_destroy_webrtc_media_engine = NULL;
-
-// Returns the full or relative path to the libpeerconnection module depending
-// on what platform we're on.
-static base::FilePath GetLibPeerConnectionPath() {
- base::FilePath path;
- CHECK(PathService::Get(base::DIR_MODULE, &path));
-#if defined(OS_WIN)
- path = path.Append(FILE_PATH_LITERAL("libpeerconnection.dll"));
-#elif defined(OS_MACOSX)
- // Simulate '@loader_path/Libraries'.
- path = path.Append(FILE_PATH_LITERAL("Libraries"))
- .Append(FILE_PATH_LITERAL("libpeerconnection.so"));
-#elif defined(OS_ANDROID)
- path = path.Append(FILE_PATH_LITERAL("libpeerconnection.so"));
-#else
- path = path.Append(FILE_PATH_LITERAL("lib"))
- .Append(FILE_PATH_LITERAL("libpeerconnection.so"));
-#endif
- return path;
-}
-
-bool InitializeWebRtcModule() {
- TRACE_EVENT0("webrtc", "InitializeWebRtcModule");
-
- if (g_create_webrtc_media_engine)
- return true; // InitializeWebRtcModule has already been called.
-
- base::FilePath path(GetLibPeerConnectionPath());
- DVLOG(1) << "Loading WebRTC module: " << path.value();
-
- base::NativeLibraryLoadError error;
- static base::NativeLibrary lib = base::LoadNativeLibrary(path, &error);
-#if defined(OS_WIN)
- // We've been seeing problems on Windows with loading the DLL and we're
- // not sure exactly why. It could be that AV programs are quarantining the
- // file or disallowing loading the DLL. To get a better picture of the errors
- // we're checking these specific error codes.
- if (error.code == ERROR_MOD_NOT_FOUND) {
- // It's possible that we get this error due to failure to load other
- // dependencies, so check first that libpeerconnection actually exists.
- CHECK(base::PathExists(path)); // libpeerconnection itself is missing.
- CHECK(lib); // If we hit this, a dependency is missing.
- } else if (error.code == ERROR_ACCESS_DENIED) {
- CHECK(lib); // AV blocking access?
- }
-#endif
-
- // Catch-all error handler for all other sorts of errors.
- CHECK(lib) << error.ToString();
-
- InitializeModuleFunction initialize_module =
- reinterpret_cast<InitializeModuleFunction>(
- base::GetFunctionPointerFromNativeLibrary(
- lib, "InitializeModule"));
-
- // Initialize the proxy by supplying it with a pointer to our
- // allocator/deallocator routines.
- // On mac we use malloc zones, which are global, so we provide NULLs for
- // the alloc/dealloc functions.
- // PS: This function is actually implemented in allocator_proxy.cc with the
- // new/delete overrides.
- InitDiagnosticLoggingDelegateFunctionFunction init_diagnostic_logging = NULL;
- bool init_ok = initialize_module(*base::CommandLine::ForCurrentProcess(),
-#if !defined(OS_MACOSX) && !defined(OS_ANDROID)
- &Allocate,
- &Dellocate,
-#endif
- &webrtc::field_trial::FindFullName,
- &webrtc::metrics::HistogramFactoryGetCounts,
- &webrtc::metrics::HistogramFactoryGetEnumeration,
- &webrtc::metrics::HistogramAdd,
- logging::GetLogMessageHandler(),
- &GetCategoryGroupEnabled,
- &AddTraceEvent,
- &g_create_webrtc_media_engine,
- &g_destroy_webrtc_media_engine,
- &init_diagnostic_logging);
-
- if (init_ok)
- rtc::SetExtraLoggingInit(init_diagnostic_logging);
- return init_ok;
-}
-
-cricket::MediaEngineInterface* CreateWebRtcMediaEngine(
- webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
- cricket::WebRtcVideoEncoderFactory* encoder_factory,
- cricket::WebRtcVideoDecoderFactory* decoder_factory) {
- // For convenience of tests etc, we call InitializeWebRtcModule here.
- // For Chrome however, InitializeWebRtcModule must be called
- // explicitly before the sandbox is initialized. In that case, this call is
- // effectively a noop.
- InitializeWebRtcModule();
- return g_create_webrtc_media_engine(adm, adm_sc, encoder_factory,
- decoder_factory);
-}
-
-void DestroyWebRtcMediaEngine(cricket::MediaEngineInterface* media_engine) {
- g_destroy_webrtc_media_engine(media_engine);
-}
-
-#endif // LIBPEERCONNECTION_LIB
diff --git a/chromium/third_party/libjingle/overrides/init_webrtc.h b/chromium/third_party/libjingle/overrides/init_webrtc.h
index 23a7f8a5533..4a7db383a15 100644
--- a/chromium/third_party/libjingle/overrides/init_webrtc.h
+++ b/chromium/third_party/libjingle/overrides/init_webrtc.h
@@ -5,82 +5,9 @@
#ifndef THIRD_PARTY_LIBJINGLE_OVERRIDES_INIT_WEBRTC_H_
#define THIRD_PARTY_LIBJINGLE_OVERRIDES_INIT_WEBRTC_H_
-#include <string>
-
-#include "third_party/libjingle/overrides/allocator_shim/allocator_stub.h"
-#include "base/logging.h"
-#include "third_party/webrtc/system_wrappers/interface/event_tracer.h"
-
-namespace base {
-class CommandLine;
-}
-
-namespace cricket {
-class MediaEngineInterface;
-class WebRtcVideoDecoderFactory;
-class WebRtcVideoEncoderFactory;
-} // namespace cricket
-
-namespace webrtc {
-class AudioDeviceModule;
-namespace metrics {
-class Histogram;
-} // namespace metrics
-} // namespace webrtc
-
-typedef std::string (*FieldTrialFindFullName)(const std::string& trial_name);
-
-typedef webrtc::metrics::Histogram* (*RtcHistogramFactoryGetCounts)(
- const std::string& name, int min, int max, int bucket_count);
-typedef webrtc::metrics::Histogram* (*RtcHistogramFactoryGetEnumeration)(
- const std::string& name, int boundary);
-typedef void (*RtcHistogramAdd)(
- webrtc::metrics::Histogram* histogram_pointer,
- const std::string& name,
- int sample);
-
-typedef cricket::MediaEngineInterface* (*CreateWebRtcMediaEngineFunction)(
- webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
- cricket::WebRtcVideoEncoderFactory* encoder_factory,
- cricket::WebRtcVideoDecoderFactory* decoder_factory);
-
-typedef void (*DestroyWebRtcMediaEngineFunction)(
- cricket::MediaEngineInterface* media_engine);
-
-typedef void (*InitDiagnosticLoggingDelegateFunctionFunction)(
- void (*DelegateFunction)(const std::string&));
-
-// A typedef for the main initialize function in libpeerconnection.
-// This will initialize logging in the module with the proper arguments
-// as well as provide pointers back to a couple webrtc factory functions.
-// The reason we get pointers to these functions this way is to avoid having
-// to go through GetProcAddress et al and rely on specific name mangling.
-// TODO(tommi): The number of functions is growing. Use a struct.
-typedef bool (*InitializeModuleFunction)(
- const base::CommandLine& command_line,
-#if !defined(OS_MACOSX) && !defined(OS_ANDROID)
- AllocateFunction alloc,
- DellocateFunction dealloc,
-#endif
- FieldTrialFindFullName field_trial_find,
- RtcHistogramFactoryGetCounts factory_get_counts,
- RtcHistogramFactoryGetEnumeration factory_get_enumeration,
- RtcHistogramAdd histogram_add,
- logging::LogMessageHandlerFunction log_handler,
- webrtc::GetCategoryEnabledPtr trace_get_category_enabled,
- webrtc::AddTraceEventPtr trace_add_trace_event,
- CreateWebRtcMediaEngineFunction* create_media_engine,
- DestroyWebRtcMediaEngineFunction* destroy_media_engine,
- InitDiagnosticLoggingDelegateFunctionFunction* init_diagnostic_logging);
-
-#if !defined(LIBPEERCONNECTION_IMPLEMENTATION)
-// Load and initialize the shared WebRTC module (libpeerconnection).
-// Call this explicitly to load and initialize the WebRTC module (e.g. before
-// initializing the sandbox in Chrome).
-// If not called explicitly, this function will still be called from the main
-// CreateWebRtcMediaEngine factory function the first time it is called.
+// Initialize WebRTC. Call this explicitly to initialize WebRTC module
+// (before initializing the sandbox in Chrome) and hook up Chrome+WebRTC
+// integration such as common logging and tracing.
bool InitializeWebRtcModule();
-#endif
#endif // THIRD_PARTY_LIBJINGLE_OVERRIDES_INIT_WEBRTC_H_
diff --git a/chromium/third_party/libjingle/overrides/initialize_module.cc b/chromium/third_party/libjingle/overrides/initialize_module.cc
deleted file mode 100644
index 89671c5252d..00000000000
--- a/chromium/third_party/libjingle/overrides/initialize_module.cc
+++ /dev/null
@@ -1,147 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#if defined(OS_WIN) && defined(ARCH_CPU_X86_64)
-#include <math.h> // needed for _set_FMA3_enable
-#endif // WIN && ARCH_CPU_X86_64
-
-#include "base/command_line.h"
-#include "base/files/file_path.h"
-#include "base/logging.h"
-#include "third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.h"
-#include "third_party/libjingle/allocator_shim/allocator_stub.h"
-#include "third_party/libjingle/overrides/init_webrtc.h"
-#include "third_party/webrtc/base/basictypes.h"
-#include "third_party/webrtc/base/logging.h"
-
-#if !defined(LIBPEERCONNECTION_IMPLEMENTATION) || defined(LIBPEERCONNECTION_LIB)
-#error "Only compile the allocator proxy with the shared_library implementation"
-#endif
-
-#if defined(OS_WIN)
-#define ALLOC_EXPORT __declspec(dllexport)
-#else
-#define ALLOC_EXPORT __attribute__((visibility("default")))
-#endif
-
-#if !defined(OS_MACOSX) && !defined(OS_ANDROID)
-// These are used by our new/delete overrides in
-// allocator_shim/allocator_proxy.cc
-AllocateFunction g_alloc = NULL;
-DellocateFunction g_dealloc = NULL;
-#endif
-
-// Forward declare of the libjingle internal factory and destroy methods for the
-// WebRTC media engine.
-cricket::MediaEngineInterface* CreateWebRtcMediaEngine(
- webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
- cricket::WebRtcVideoEncoderFactory* encoder_factory,
- cricket::WebRtcVideoDecoderFactory* decoder_factory);
-
-void DestroyWebRtcMediaEngine(cricket::MediaEngineInterface* media_engine);
-
-namespace {
-// Provide webrtc with a field trial and metrics implementations.
-// The implementations are provided by the loader via the InitializeModule.
-
-// Defines webrtc::field_trial::FindFullName.
-FieldTrialFindFullName g_field_trial_find_ = NULL;
-// Defines webrtc::metrics::RtcFactoryGetCounts.
-RtcHistogramFactoryGetCounts g_factory_get_counts = NULL;
-// Defines webrtc::metrics::RtcFactoryGetEnumeration.
-RtcHistogramFactoryGetEnumeration g_factory_get_enumeration = NULL;
-// Defines webrtc::metrics::RtcAdd.
-RtcHistogramAdd g_histogram_add = NULL;
-}
-
-namespace webrtc {
-namespace field_trial {
-std::string FindFullName(const std::string& trial_name) {
- return g_field_trial_find_(trial_name);
-}
-} // namespace field_trial
-
-namespace metrics {
-Histogram* HistogramFactoryGetCounts(
- const std::string& name, int min, int max, int bucket_count) {
- return g_factory_get_counts(name, min, max, bucket_count);
-}
-
-Histogram* HistogramFactoryGetEnumeration(
- const std::string& name, int boundary) {
- return g_factory_get_enumeration(name, boundary);
-}
-
-void HistogramAdd(
- Histogram* histogram_pointer, const std::string& name, int sample) {
- g_histogram_add(histogram_pointer, name, sample);
-}
-} // namespace metrics
-} // namespace webrtc
-
-extern "C" {
-
-// Initialize logging, set the forward allocator functions (not on mac), and
-// return pointers to libjingle's WebRTC factory methods.
-// Called from init_webrtc.cc.
-ALLOC_EXPORT
-bool InitializeModule(const base::CommandLine& command_line,
-#if !defined(OS_MACOSX) && !defined(OS_ANDROID)
- AllocateFunction alloc,
- DellocateFunction dealloc,
-#endif
- FieldTrialFindFullName field_trial_find,
- RtcHistogramFactoryGetCounts factory_get_counts,
- RtcHistogramFactoryGetEnumeration factory_get_enumeration,
- RtcHistogramAdd histogram_add,
- logging::LogMessageHandlerFunction log_handler,
- webrtc::GetCategoryEnabledPtr trace_get_category_enabled,
- webrtc::AddTraceEventPtr trace_add_trace_event,
- CreateWebRtcMediaEngineFunction* create_media_engine,
- DestroyWebRtcMediaEngineFunction* destroy_media_engine,
- InitDiagnosticLoggingDelegateFunctionFunction*
- init_diagnostic_logging) {
-#if !defined(OS_MACOSX) && !defined(OS_ANDROID)
- g_alloc = alloc;
- g_dealloc = dealloc;
-#endif
-
- g_field_trial_find_ = field_trial_find;
- g_factory_get_counts = factory_get_counts;
- g_factory_get_enumeration = factory_get_enumeration;
- g_histogram_add = histogram_add;
-
- *create_media_engine = &CreateWebRtcMediaEngine;
- *destroy_media_engine = &DestroyWebRtcMediaEngine;
- *init_diagnostic_logging = &rtc::InitDiagnosticLoggingDelegateFunction;
-
-#if defined(OS_WIN) && defined(ARCH_CPU_X86_64)
- // VS2013 only checks the existence of FMA3 instructions, not the enabled-ness
- // of them at the OS level (this is fixed in VS2015). We force off usage of
- // FMA3 instructions in the CRT to avoid using that path and hitting illegal
- // instructions when running on CPUs that support FMA3, but OSs that don't.
- // See http://crbug.com/436603 and http://crbug.com/446983.
- _set_FMA3_enable(0);
-#endif // WIN && ARCH_CPU_X86_64
-
- if (base::CommandLine::Init(0, NULL)) {
-#if !defined(OS_WIN)
- // This is not needed on Windows since CommandLine::Init has already
- // done the equivalent thing via the GetCommandLine() API.
- base::CommandLine::ForCurrentProcess()->AppendArguments(command_line, true);
-#endif
- logging::LoggingSettings settings;
- settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
- logging::InitLogging(settings);
-
- // Override the log message handler to forward logs to chrome's handler.
- logging::SetLogMessageHandler(log_handler);
- webrtc::SetupEventTracer(trace_get_category_enabled,
- trace_add_trace_event);
- }
-
- return true;
-}
-} // extern "C"
diff --git a/chromium/third_party/libjingle/overrides/talk/media/webrtc/webrtcexport.h b/chromium/third_party/libjingle/overrides/talk/media/webrtc/webrtcexport.h
deleted file mode 100644
index ffa60bb860c..00000000000
--- a/chromium/third_party/libjingle/overrides/talk/media/webrtc/webrtcexport.h
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file is overridden to disable exports and imports in libjingle
-// between the libpeerconnection and libjingle_webrtc targets.
-// TODO(tommi): Remove when a version of libjingle has been rolled in that
-// either removes this header file or offers an easy way to turn this off.
-
-#ifndef TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
-#define TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
-
-#ifndef NON_EXPORTED_BASE
-#define NON_EXPORTED_BASE(code) code
-#endif // NON_EXPORTED_BASE
-
-#ifndef WRME_EXPORT
-#define WRME_EXPORT
-#endif
-
-#endif // TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/androidtests/jni/Android.mk b/chromium/third_party/libjingle/source/talk/app/webrtc/androidtests/jni/Android.mk
deleted file mode 100644
index 8e80160039b..00000000000
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/androidtests/jni/Android.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-# This space intentionally left blank (required for Android build system).
-
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.cc
index 89ab4869699..65f883ef9d0 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.cc
@@ -89,10 +89,10 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
// |captured_frame_.data| is only guaranteed to be valid during the scope
// of |AndroidVideoCapturer::OnIncomingFrame_w|.
// Check that captured_frame is actually our frame.
- DCHECK(captured_frame == &captured_frame_);
+ CHECK(captured_frame == &captured_frame_);
if (!apply_rotation_ || captured_frame->rotation == kVideoRotation_0) {
- DCHECK(captured_frame->fourcc == cricket::FOURCC_YV12);
+ CHECK(captured_frame->fourcc == cricket::FOURCC_YV12);
const uint8_t* y_plane = static_cast<uint8_t*>(captured_frame_.data);
// Android guarantees that the stride is a multiple of 16.
@@ -160,7 +160,7 @@ AndroidVideoCapturer::AndroidVideoCapturer(
std::vector<cricket::VideoFormat> formats;
for (Json::ArrayIndex i = 0; i < json_values.size(); ++i) {
const Json::Value& json_value = json_values[i];
- DCHECK(!json_value["width"].isNull() && !json_value["height"].isNull() &&
+ CHECK(!json_value["width"].isNull() && !json_value["height"].isNull() &&
!json_value["framerate"].isNull());
cricket::VideoFormat format(
json_value["width"].asInt(),
@@ -173,16 +173,16 @@ AndroidVideoCapturer::AndroidVideoCapturer(
}
AndroidVideoCapturer::~AndroidVideoCapturer() {
- DCHECK(!running_);
+ CHECK(!running_);
}
cricket::CaptureState AndroidVideoCapturer::Start(
const cricket::VideoFormat& capture_format) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(!running_);
-
LOG(LS_INFO) << " AndroidVideoCapturer::Start w = " << capture_format.width
<< " h = " << capture_format.height;
+ CHECK(thread_checker_.CalledOnValidThread());
+ CHECK(!running_);
+
frame_factory_ = new AndroidVideoCapturer::FrameFactory(
capture_format.width, capture_format.height, delegate_.get());
set_frame_factory(frame_factory_);
@@ -197,9 +197,9 @@ cricket::CaptureState AndroidVideoCapturer::Start(
}
void AndroidVideoCapturer::Stop() {
- DCHECK(thread_checker_.CalledOnValidThread());
LOG(LS_INFO) << " AndroidVideoCapturer::Stop ";
- DCHECK(running_);
+ CHECK(thread_checker_.CalledOnValidThread());
+ CHECK(running_);
running_ = false;
SetCaptureFormat(NULL);
@@ -209,18 +209,18 @@ void AndroidVideoCapturer::Stop() {
}
bool AndroidVideoCapturer::IsRunning() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ CHECK(thread_checker_.CalledOnValidThread());
return running_;
}
bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ CHECK(thread_checker_.CalledOnValidThread());
fourccs->push_back(cricket::FOURCC_YV12);
return true;
}
void AndroidVideoCapturer::OnCapturerStarted(bool success) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ CHECK(thread_checker_.CalledOnValidThread());
cricket::CaptureState new_state =
success ? cricket::CS_RUNNING : cricket::CS_FAILED;
if (new_state == current_state_)
@@ -237,9 +237,18 @@ void AndroidVideoCapturer::OnIncomingFrame(void* frame_data,
int length,
int rotation,
int64 time_stamp) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ CHECK(thread_checker_.CalledOnValidThread());
frame_factory_->UpdateCapturedFrame(frame_data, length, rotation, time_stamp);
SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
}
+void AndroidVideoCapturer::OnOutputFormatRequest(
+ int width, int height, int fps) {
+ CHECK(thread_checker_.CalledOnValidThread());
+ const cricket::VideoFormat& current = video_adapter()->output_format();
+ cricket::VideoFormat format(
+ width, height, cricket::VideoFormat::FpsToInterval(fps), current.fourcc);
+ video_adapter()->OnOutputFormatRequest(format);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.h b/chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.h
index 2dffb9c1414..2cfbdd808a1 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/androidvideocapturer.h
@@ -74,6 +74,9 @@ class AndroidVideoCapturer : public cricket::VideoCapturer {
int rotation,
int64 time_stamp);
+ // Called from JNI to request a new video format.
+ void OnOutputFormatRequest(int width, int height, int fps);
+
AndroidVideoCapturerDelegate* delegate() { return delegate_.get(); }
private:
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc
index 1897b73fc88..690ee65d3b7 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.cc
@@ -109,24 +109,26 @@ DataChannel::DataChannel(
state_(kConnecting),
data_channel_type_(dct),
provider_(provider),
- waiting_for_open_ack_(false),
- was_ever_writable_(false),
+ handshake_state_(kHandshakeInit),
connected_to_provider_(false),
send_ssrc_set_(false),
receive_ssrc_set_(false),
+ writable_(false),
send_ssrc_(0),
receive_ssrc_(0) {
}
bool DataChannel::Init(const InternalDataChannelInit& config) {
- if (data_channel_type_ == cricket::DCT_RTP &&
- (config.reliable ||
- config.id != -1 ||
- config.maxRetransmits != -1 ||
- config.maxRetransmitTime != -1)) {
- LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
- << "invalid DataChannelInit.";
- return false;
+ if (data_channel_type_ == cricket::DCT_RTP) {
+ if (config.reliable ||
+ config.id != -1 ||
+ config.maxRetransmits != -1 ||
+ config.maxRetransmitTime != -1) {
+ LOG(LS_ERROR) << "Failed to initialize the RTP data channel due to "
+ << "invalid DataChannelInit.";
+ return false;
+ }
+ handshake_state_ = kHandshakeReady;
} else if (data_channel_type_ == cricket::DCT_SCTP) {
if (config.id < -1 ||
config.maxRetransmits < -1 ||
@@ -142,6 +144,18 @@ bool DataChannel::Init(const InternalDataChannelInit& config) {
}
config_ = config;
+ switch (config_.open_handshake_role) {
+ case webrtc::InternalDataChannelInit::kNone: // pre-negotiated
+ handshake_state_ = kHandshakeReady;
+ break;
+ case webrtc::InternalDataChannelInit::kOpener:
+ handshake_state_ = kHandshakeShouldSendOpen;
+ break;
+ case webrtc::InternalDataChannelInit::kAcker:
+ handshake_state_ = kHandshakeShouldSendAck;
+ break;
+ };
+
// Try to connect to the transport in case the transport channel already
// exists.
OnTransportChannelCreated();
@@ -298,7 +312,7 @@ void DataChannel::OnDataReceived(cricket::DataChannel* channel,
if (params.type == cricket::DMT_CONTROL) {
ASSERT(data_channel_type_ == cricket::DCT_SCTP);
- if (!waiting_for_open_ack_) {
+ if (handshake_state_ != kHandshakeWaitingForAck) {
// Ignore it if we are not expecting an ACK message.
LOG(LS_WARNING) << "DataChannel received unexpected CONTROL message, "
<< "sid = " << params.ssrc;
@@ -306,7 +320,7 @@ void DataChannel::OnDataReceived(cricket::DataChannel* channel,
}
if (ParseDataChannelOpenAckMessage(payload)) {
// We can send unordered as soon as we receive the ACK message.
- waiting_for_open_ack_ = false;
+ handshake_state_ = kHandshakeReady;
LOG(LS_INFO) << "DataChannel received OPEN_ACK message, sid = "
<< params.ssrc;
} else {
@@ -323,11 +337,13 @@ void DataChannel::OnDataReceived(cricket::DataChannel* channel,
// We can send unordered as soon as we receive any DATA message since the
// remote side must have received the OPEN (and old clients do not send
// OPEN_ACK).
- waiting_for_open_ack_ = false;
+ if (handshake_state_ == kHandshakeWaitingForAck) {
+ handshake_state_ = kHandshakeReady;
+ }
bool binary = (params.type == cricket::DMT_BINARY);
rtc::scoped_ptr<DataBuffer> buffer(new DataBuffer(payload, binary));
- if (was_ever_writable_ && observer_) {
+ if (state_ == kOpen && observer_) {
observer_->OnMessage(*buffer.get());
} else {
if (queued_received_data_.byte_count() + payload.size() >
@@ -346,38 +362,14 @@ void DataChannel::OnDataReceived(cricket::DataChannel* channel,
}
void DataChannel::OnChannelReady(bool writable) {
+ writable_ = writable;
if (!writable) {
return;
}
- // Update the readyState and send the queued control message if the channel
- // is writable for the first time; otherwise it means the channel was blocked
- // for sending and now unblocked, so send the queued data now.
- if (!was_ever_writable_) {
- was_ever_writable_ = true;
-
- if (data_channel_type_ == cricket::DCT_SCTP) {
- rtc::Buffer payload;
-
- if (config_.open_handshake_role == InternalDataChannelInit::kOpener) {
- WriteDataChannelOpenMessage(label_, config_, &payload);
- SendControlMessage(payload);
- } else if (config_.open_handshake_role ==
- InternalDataChannelInit::kAcker) {
- WriteDataChannelOpenAckMessage(&payload);
- SendControlMessage(payload);
- }
- }
-
- UpdateState();
- ASSERT(queued_send_data_.Empty());
- } else if (state_ == kOpen) {
- // TODO(jiayl): Sending OPEN message here contradicts with the pre-condition
- // that the readyState is open. According to the standard, the channel
- // should not become open before the OPEN message is sent.
- SendQueuedControlMessages();
- SendQueuedDataMessages();
- }
+ SendQueuedControlMessages();
+ SendQueuedDataMessages();
+ UpdateState();
}
void DataChannel::DoClose() {
@@ -391,20 +383,34 @@ void DataChannel::DoClose() {
}
void DataChannel::UpdateState() {
+ // UpdateState determines what to do from a few state variables. Include
+ // all conditions required for each state transition here for
+ // clarity. OnChannelReady(true) will send any queued data and then invoke
+ // UpdateState().
switch (state_) {
case kConnecting: {
if (send_ssrc_set_ == receive_ssrc_set_) {
if (data_channel_type_ == cricket::DCT_RTP && !connected_to_provider_) {
connected_to_provider_ = provider_->ConnectDataChannel(this);
}
- if (was_ever_writable_) {
- // TODO(jiayl): Do not transition to kOpen if we failed to send the
- // OPEN message.
- SendQueuedControlMessages();
- SetState(kOpen);
- // If we have received buffers before the channel got writable.
- // Deliver them now.
- DeliverQueuedReceivedData();
+ if (connected_to_provider_) {
+ if (handshake_state_ == kHandshakeShouldSendOpen) {
+ rtc::Buffer payload;
+ WriteDataChannelOpenMessage(label_, config_, &payload);
+ SendControlMessage(payload);
+ } else if (handshake_state_ == kHandshakeShouldSendAck) {
+ rtc::Buffer payload;
+ WriteDataChannelOpenAckMessage(&payload);
+ SendControlMessage(payload);
+ }
+ if (writable_ &&
+ (handshake_state_ == kHandshakeReady ||
+ handshake_state_ == kHandshakeWaitingForAck)) {
+ SetState(kOpen);
+ // If we have received buffers before the channel got writable.
+ // Deliver them now.
+ DeliverQueuedReceivedData();
+ }
}
}
break;
@@ -413,10 +419,14 @@ void DataChannel::UpdateState() {
break;
}
case kClosing: {
- DisconnectFromTransport();
+ if (queued_send_data_.Empty() && queued_control_data_.Empty()) {
+ if (connected_to_provider_) {
+ DisconnectFromProvider();
+ }
- if (!send_ssrc_set_ && !receive_ssrc_set_) {
- SetState(kClosed);
+ if (!connected_to_provider_ && !send_ssrc_set_ && !receive_ssrc_set_) {
+ SetState(kClosed);
+ }
}
break;
}
@@ -435,7 +445,7 @@ void DataChannel::SetState(DataState state) {
}
}
-void DataChannel::DisconnectFromTransport() {
+void DataChannel::DisconnectFromProvider() {
if (!connected_to_provider_)
return;
@@ -448,7 +458,7 @@ void DataChannel::DisconnectFromTransport() {
}
void DataChannel::DeliverQueuedReceivedData() {
- if (!was_ever_writable_ || !observer_) {
+ if (!observer_) {
return;
}
@@ -460,8 +470,13 @@ void DataChannel::DeliverQueuedReceivedData() {
}
void DataChannel::SendQueuedDataMessages() {
- ASSERT(was_ever_writable_ && state_ == kOpen);
+ if (queued_send_data_.Empty()) {
+ return;
+ }
+ ASSERT(state_ == kOpen || state_ == kClosing);
+
+ uint64 start_buffered_amount = buffered_amount();
while (!queued_send_data_.Empty()) {
DataBuffer* buffer = queued_send_data_.Front();
if (!SendDataMessage(*buffer, false)) {
@@ -471,6 +486,10 @@ void DataChannel::SendQueuedDataMessages() {
queued_send_data_.Pop();
delete buffer;
}
+
+ if (observer_ && buffered_amount() < start_buffered_amount) {
+ observer_->OnBufferedAmountChange(start_buffered_amount);
+ }
}
bool DataChannel::SendDataMessage(const DataBuffer& buffer,
@@ -479,8 +498,8 @@ bool DataChannel::SendDataMessage(const DataBuffer& buffer,
if (data_channel_type_ == cricket::DCT_SCTP) {
send_params.ordered = config_.ordered;
- // Send as ordered if it is waiting for the OPEN_ACK message.
- if (waiting_for_open_ack_ && !config_.ordered) {
+ // Send as ordered if it is still going through OPEN/ACK signaling.
+ if (handshake_state_ != kHandshakeReady && !config_.ordered) {
send_params.ordered = true;
LOG(LS_VERBOSE) << "Sending data as ordered for unordered DataChannel "
<< "because the OPEN_ACK message has not been received.";
@@ -520,17 +539,21 @@ bool DataChannel::SendDataMessage(const DataBuffer& buffer,
}
bool DataChannel::QueueSendDataMessage(const DataBuffer& buffer) {
- if (queued_send_data_.byte_count() >= kMaxQueuedSendDataBytes) {
+ size_t start_buffered_amount = buffered_amount();
+ if (start_buffered_amount >= kMaxQueuedSendDataBytes) {
LOG(LS_ERROR) << "Can't buffer any more data for the data channel.";
return false;
}
queued_send_data_.Push(new DataBuffer(buffer));
+
+ // The buffer can have length zero, in which case there is no change.
+ if (observer_ && buffered_amount() > start_buffered_amount) {
+ observer_->OnBufferedAmountChange(start_buffered_amount);
+ }
return true;
}
void DataChannel::SendQueuedControlMessages() {
- ASSERT(was_ever_writable_);
-
PacketQueue control_packets;
control_packets.Swap(&queued_control_data_);
@@ -546,16 +569,18 @@ void DataChannel::QueueControlMessage(const rtc::Buffer& buffer) {
}
bool DataChannel::SendControlMessage(const rtc::Buffer& buffer) {
- bool is_open_message =
- (config_.open_handshake_role == InternalDataChannelInit::kOpener);
+ bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen;
ASSERT(data_channel_type_ == cricket::DCT_SCTP &&
- was_ever_writable_ &&
+ writable_ &&
config_.id >= 0 &&
(!is_open_message || !config_.negotiated));
cricket::SendDataParams send_params;
send_params.ssrc = config_.id;
+ // Send data as ordered before we receive any message from the remote peer to
+ // make sure the remote peer will not receive any data before it receives the
+ // OPEN message.
send_params.ordered = config_.ordered || is_open_message;
send_params.type = cricket::DMT_CONTROL;
@@ -564,11 +589,10 @@ bool DataChannel::SendControlMessage(const rtc::Buffer& buffer) {
if (retval) {
LOG(LS_INFO) << "Sent CONTROL message on channel " << config_.id;
- if (is_open_message) {
- // Send data as ordered before we receive any message from the remote peer
- // to make sure the remote peer will not receive any data before it
- // receives the OPEN message.
- waiting_for_open_ack_ = true;
+ if (handshake_state_ == kHandshakeShouldSendAck) {
+ handshake_state_ = kHandshakeReady;
+ } else if (handshake_state_ == kHandshakeShouldSendOpen) {
+ handshake_state_ = kHandshakeWaitingForAck;
}
} else if (send_result == cricket::SDR_BLOCK) {
QueueControlMessage(buffer);
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h
index fe8fac1c67f..8e58d0664b2 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel.h
@@ -204,11 +204,20 @@ class DataChannel : public DataChannelInterface,
size_t byte_count_;
};
+ // The OPEN(_ACK) signaling state.
+ enum HandshakeState {
+ kHandshakeInit,
+ kHandshakeShouldSendOpen,
+ kHandshakeShouldSendAck,
+ kHandshakeWaitingForAck,
+ kHandshakeReady
+ };
+
bool Init(const InternalDataChannelInit& config);
void DoClose();
void UpdateState();
void SetState(DataState state);
- void DisconnectFromTransport();
+ void DisconnectFromProvider();
void DeliverQueuedReceivedData();
@@ -226,11 +235,11 @@ class DataChannel : public DataChannelInterface,
DataState state_;
cricket::DataChannelType data_channel_type_;
DataChannelProviderInterface* provider_;
- bool waiting_for_open_ack_;
- bool was_ever_writable_;
+ HandshakeState handshake_state_;
bool connected_to_provider_;
bool send_ssrc_set_;
bool receive_ssrc_set_;
+ bool writable_;
uint32 send_ssrc_;
uint32 receive_ssrc_;
// Control messages that always have to get sent out before any queued
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc
index ab5dbe9a1b5..e3c290bd9b5 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannel_unittest.cc
@@ -35,12 +35,18 @@ using webrtc::DataChannel;
class FakeDataChannelObserver : public webrtc::DataChannelObserver {
public:
FakeDataChannelObserver()
- : messages_received_(0), on_state_change_count_(0) {}
+ : messages_received_(0),
+ on_state_change_count_(0),
+ on_buffered_amount_change_count_(0) {}
void OnStateChange() {
++on_state_change_count_;
}
+ void OnBufferedAmountChange(uint64 previous_amount) {
+ ++on_buffered_amount_change_count_;
+ }
+
void OnMessage(const webrtc::DataBuffer& buffer) {
++messages_received_;
}
@@ -53,13 +59,22 @@ class FakeDataChannelObserver : public webrtc::DataChannelObserver {
on_state_change_count_ = 0;
}
+ void ResetOnBufferedAmountChangeCount() {
+ on_buffered_amount_change_count_ = 0;
+ }
+
size_t on_state_change_count() const {
return on_state_change_count_;
}
+ size_t on_buffered_amount_change_count() const {
+ return on_buffered_amount_change_count_;
+ }
+
private:
size_t messages_received_;
size_t on_state_change_count_;
+ size_t on_buffered_amount_change_count_;
};
class SctpDataChannelTest : public testing::Test {
@@ -133,11 +148,13 @@ TEST_F(SctpDataChannelTest, StateTransition) {
// Tests that DataChannel::buffered_amount() is correct after the channel is
// blocked.
TEST_F(SctpDataChannelTest, BufferedAmountWhenBlocked) {
+ AddObserver();
SetChannelReady();
webrtc::DataBuffer buffer("abcd");
EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(0U, observer_->on_buffered_amount_change_count());
provider_.set_send_blocked(true);
@@ -147,37 +164,46 @@ TEST_F(SctpDataChannelTest, BufferedAmountWhenBlocked) {
}
EXPECT_EQ(buffer.data.size() * number_of_packets,
webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(number_of_packets, observer_->on_buffered_amount_change_count());
}
// Tests that the queued data are sent when the channel transitions from blocked
// to unblocked.
TEST_F(SctpDataChannelTest, QueuedDataSentWhenUnblocked) {
+ AddObserver();
SetChannelReady();
webrtc::DataBuffer buffer("abcd");
provider_.set_send_blocked(true);
EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
+
provider_.set_send_blocked(false);
SetChannelReady();
EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(2U, observer_->on_buffered_amount_change_count());
}
// Tests that no crash when the channel is blocked right away while trying to
// send queued data.
TEST_F(SctpDataChannelTest, BlockedWhenSendQueuedDataNoCrash) {
+ AddObserver();
SetChannelReady();
webrtc::DataBuffer buffer("abcd");
provider_.set_send_blocked(true);
EXPECT_TRUE(webrtc_data_channel_->Send(buffer));
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
// Set channel ready while it is still blocked.
SetChannelReady();
EXPECT_EQ(buffer.size(), webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(1U, observer_->on_buffered_amount_change_count());
// Unblock the channel to send queued data again, there should be no crash.
provider_.set_send_blocked(false);
SetChannelReady();
EXPECT_EQ(0U, webrtc_data_channel_->buffered_amount());
+ EXPECT_EQ(2U, observer_->on_buffered_amount_change_count());
}
// Tests that the queued control message is sent when channel is ready.
@@ -269,6 +295,41 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) {
EXPECT_FALSE(provider_.last_send_data_params().ordered);
}
+// Tests that the channel can't open until it's successfully sent the OPEN
+// message.
+TEST_F(SctpDataChannelTest, OpenWaitsForOpenMesssage) {
+ webrtc::DataBuffer buffer("foo");
+
+ provider_.set_send_blocked(true);
+ SetChannelReady();
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
+ webrtc_data_channel_->state());
+ provider_.set_send_blocked(false);
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen,
+ webrtc_data_channel_->state(), 1000);
+ EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
+}
+
+// Tests that close first makes sure all queued data gets sent.
+TEST_F(SctpDataChannelTest, QueuedCloseFlushes) {
+ webrtc::DataBuffer buffer("foo");
+
+ provider_.set_send_blocked(true);
+ SetChannelReady();
+ EXPECT_EQ(webrtc::DataChannelInterface::kConnecting,
+ webrtc_data_channel_->state());
+ provider_.set_send_blocked(false);
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kOpen,
+ webrtc_data_channel_->state(), 1000);
+ provider_.set_send_blocked(true);
+ webrtc_data_channel_->Send(buffer);
+ webrtc_data_channel_->Close();
+ provider_.set_send_blocked(false);
+ EXPECT_EQ_WAIT(webrtc::DataChannelInterface::kClosed,
+ webrtc_data_channel_->state(), 1000);
+ EXPECT_EQ(cricket::DMT_TEXT, provider_.last_send_data_params().type);
+}
+
// Tests that messages are sent with the right ssrc.
TEST_F(SctpDataChannelTest, SendDataSsrc) {
webrtc_data_channel_->SetSctpSid(1);
@@ -369,8 +430,9 @@ TEST_F(SctpDataChannelTest, ClosedWhenSendBufferFull) {
EXPECT_TRUE(webrtc_data_channel_->Send(packet));
}
- EXPECT_EQ(webrtc::DataChannelInterface::kClosed,
- webrtc_data_channel_->state());
+ EXPECT_TRUE(
+ webrtc::DataChannelInterface::kClosed == webrtc_data_channel_->state() ||
+ webrtc::DataChannelInterface::kClosing == webrtc_data_channel_->state());
}
// Tests that the DataChannel is closed on transport errors.
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h
index 63122629f56..90573ebbf3e 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/datachannelinterface.h
@@ -91,6 +91,8 @@ class DataChannelObserver {
virtual void OnStateChange() = 0;
// A data buffer was successfully received.
virtual void OnMessage(const DataBuffer& buffer) = 0;
+ // The data channel's buffered_amount has changed.
+ virtual void OnBufferedAmountChange(uint64 previous_amount){};
protected:
virtual ~DataChannelObserver() {}
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.cc
index dd9bc5d68b5..554a84a06f1 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.cc
@@ -31,7 +31,6 @@
#include "webrtc/base/logging.h"
using webrtc::DTLSIdentityRequestObserver;
-using webrtc::WebRtcSessionDescriptionFactory;
namespace webrtc {
@@ -57,13 +56,14 @@ class DtlsIdentityStore::WorkerTask : public sigslot::has_slots<>,
explicit WorkerTask(DtlsIdentityStore* store)
: signaling_thread_(rtc::Thread::Current()), store_(store) {
store_->SignalDestroyed.connect(this, &WorkerTask::OnStoreDestroyed);
- };
+ }
virtual ~WorkerTask() { DCHECK(rtc::Thread::Current() == signaling_thread_); }
- void GenerateIdentity() {
+ private:
+ void GenerateIdentity_w() {
rtc::scoped_ptr<rtc::SSLIdentity> identity(
- rtc::SSLIdentity::Generate(DtlsIdentityStore::kIdentityName));
+ rtc::SSLIdentity::Generate(DtlsIdentityStore::kIdentityName));
{
rtc::CritScope cs(&cs_);
@@ -76,27 +76,29 @@ class DtlsIdentityStore::WorkerTask : public sigslot::has_slots<>,
void OnMessage(rtc::Message* msg) override {
switch (msg->message_id) {
case MSG_GENERATE_IDENTITY:
- GenerateIdentity();
+ // This message always runs on the worker thread.
+ GenerateIdentity_w();
// Must delete |this|, owned by msg->pdata, on the signaling thread to
// avoid races on disconnecting the signal.
signaling_thread_->Post(this, MSG_DESTROY, msg->pdata);
break;
case MSG_DESTROY:
+ DCHECK(rtc::Thread::Current() == signaling_thread_);
delete msg->pdata;
+ // |this| has now been deleted. Don't touch member variables.
break;
default:
CHECK(false) << "Unexpected message type";
}
}
- private:
void OnStoreDestroyed() {
rtc::CritScope cs(&cs_);
store_ = NULL;
}
- rtc::Thread* signaling_thread_;
+ rtc::Thread* const signaling_thread_;
rtc::CriticalSection cs_;
DtlsIdentityStore* store_;
};
@@ -116,6 +118,7 @@ DtlsIdentityStore::~DtlsIdentityStore() {
}
void DtlsIdentityStore::Initialize() {
+ DCHECK(rtc::Thread::Current() == signaling_thread_);
// Do not aggressively generate the free identity if the worker thread and the
// signaling thread are the same.
if (worker_thread_ != signaling_thread_) {
@@ -139,6 +142,7 @@ void DtlsIdentityStore::RequestIdentity(DTLSIdentityRequestObserver* observer) {
}
void DtlsIdentityStore::OnMessage(rtc::Message* msg) {
+ DCHECK(rtc::Thread::Current() == signaling_thread_);
switch (msg->message_id) {
case MSG_GENERATE_IDENTITY_RESULT: {
rtc::scoped_ptr<IdentityResultMessageData> pdata(
@@ -156,10 +160,12 @@ void DtlsIdentityStore::OnMessage(rtc::Message* msg) {
}
bool DtlsIdentityStore::HasFreeIdentityForTesting() const {
- return free_identity_.get();
+ DCHECK(rtc::Thread::Current() == signaling_thread_);
+ return free_identity_.get() != nullptr;
}
void DtlsIdentityStore::GenerateIdentity() {
+ DCHECK(rtc::Thread::Current() == signaling_thread_);
pending_jobs_++;
LOG(LS_VERBOSE) << "New DTLS identity generation is posted, "
<< "pending_identities=" << pending_jobs_;
@@ -191,6 +197,7 @@ void DtlsIdentityStore::OnIdentityGenerated(
void DtlsIdentityStore::ReturnIdentity(
rtc::scoped_ptr<rtc::SSLIdentity> identity) {
+ DCHECK(rtc::Thread::Current() == signaling_thread_);
DCHECK(!free_identity_.get());
DCHECK(!pending_observers_.empty());
@@ -211,8 +218,8 @@ void DtlsIdentityStore::ReturnIdentity(
if (worker_thread_ != signaling_thread_ &&
pending_observers_.empty() &&
pending_jobs_ == 0) {
- // Generate a free identity in the background.
- GenerateIdentity();
+ // Generate a free identity in the background.
+ GenerateIdentity();
}
}
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.h b/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.h
index 1ceaa825885..b2a797462fb 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore.h
@@ -79,8 +79,8 @@ class DtlsIdentityStore : public rtc::MessageHandler {
void PostGenerateIdentityResult_w(rtc::scoped_ptr<rtc::SSLIdentity> identity);
- rtc::Thread* signaling_thread_;
- rtc::Thread* worker_thread_;
+ rtc::Thread* const signaling_thread_;
+ rtc::Thread* const worker_thread_;
// These members should be accessed on the signaling thread only.
int pending_jobs_;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore_unittest.cc
index c0b204a85f9..12f58feea56 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/dtlsidentitystore_unittest.cc
@@ -33,7 +33,6 @@
#include "webrtc/base/ssladapter.h"
using webrtc::DtlsIdentityStore;
-using webrtc::WebRtcSessionDescriptionFactory;
static const int kTimeoutMs = 10000;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/fakemetricsobserver.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/fakemetricsobserver.cc
new file mode 100644
index 00000000000..66e1c51f2a4
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/fakemetricsobserver.cc
@@ -0,0 +1,81 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/fakemetricsobserver.h"
+#include "webrtc/base/checks.h"
+
+namespace webrtc {
+
+FakeMetricsObserver::FakeMetricsObserver() {
+ Reset();
+}
+
+void FakeMetricsObserver::Reset() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ memset(counters_, 0, sizeof(counters_));
+ memset(int_histogram_samples_, 0, sizeof(int_histogram_samples_));
+ for (std::string& type : string_histogram_samples_) {
+ type.clear();
+ }
+}
+
+void FakeMetricsObserver::IncrementCounter(PeerConnectionMetricsCounter type) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ ++counters_[type];
+}
+
+void FakeMetricsObserver::AddHistogramSample(PeerConnectionMetricsName type,
+ int value) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_EQ(int_histogram_samples_[type], 0);
+ int_histogram_samples_[type] = value;
+}
+
+void FakeMetricsObserver::AddHistogramSample(PeerConnectionMetricsName type,
+ const std::string& value) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ string_histogram_samples_[type].assign(value);
+}
+
+int FakeMetricsObserver::GetCounter(PeerConnectionMetricsCounter type) const {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ return counters_[type];
+}
+
+int FakeMetricsObserver::GetIntHistogramSample(
+ PeerConnectionMetricsName type) const {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ return int_histogram_samples_[type];
+}
+
+const std::string& FakeMetricsObserver::GetStringHistogramSample(
+ PeerConnectionMetricsName type) const {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ return string_histogram_samples_[type];
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/fakemetricsobserver.h b/chromium/third_party/libjingle/source/talk/app/webrtc/fakemetricsobserver.h
new file mode 100644
index 00000000000..e9e49749bf1
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/fakemetricsobserver.h
@@ -0,0 +1,68 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_FAKEMETRICSOBSERVER_H_
+#define TALK_APP_WEBRTC_FAKEMETRICSOBSERVER_H_
+
+#include <map>
+#include <string>
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc {
+
+class FakeMetricsObserver : public MetricsObserverInterface {
+ public:
+ FakeMetricsObserver();
+ void Reset();
+
+ void IncrementCounter(PeerConnectionMetricsCounter type) override;
+ void AddHistogramSample(PeerConnectionMetricsName type,
+ int value) override;
+ void AddHistogramSample(PeerConnectionMetricsName type,
+ const std::string& value) override;
+
+ // Accessors to be used by the tests.
+ int GetCounter(PeerConnectionMetricsCounter type) const;
+ int GetIntHistogramSample(PeerConnectionMetricsName type) const;
+ const std::string& GetStringHistogramSample(
+ PeerConnectionMetricsName type) const;
+
+ protected:
+ ~FakeMetricsObserver() {}
+
+ private:
+ rtc::ThreadChecker thread_checker_;
+ int counters_[kPeerConnectionMetricsCounter_Max];
+ int int_histogram_samples_[kPeerConnectionMetricsCounter_Max];
+ std::string string_histogram_samples_[kPeerConnectionMetricsName_Max];
+};
+
+} // namespace webrtc
+
+#endif // TALK_APP_WEBRTC_FAKEMETRICSOBSERVER_H_
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/androidmediacodeccommon.h b/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/androidmediacodeccommon.h
index 23f6c527574..d9a3ebef988 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/androidmediacodeccommon.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/androidmediacodeccommon.h
@@ -92,6 +92,18 @@ static inline jobject JavaEnumFromIndex(
state_class, index);
}
+// Checks for any Java exception, prints stack backtrace and clears
+// currently thrown exception.
+static inline bool CheckException(JNIEnv* jni) {
+ if (jni->ExceptionCheck()) {
+ ALOGE("Java JNI exception.");
+ jni->ExceptionDescribe();
+ jni->ExceptionClear();
+ return true;
+ }
+ return false;
+}
+
} // namespace webrtc_jni
#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/classreferenceholder.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/classreferenceholder.cc
index 7ff48b972c5..2c4f1e6358b 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/classreferenceholder.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/classreferenceholder.cc
@@ -73,17 +73,18 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "android/graphics/SurfaceTexture");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid");
LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
+ LoadClass(jni, "org/webrtc/EglBase");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
- jclass j_decoder_class = GetClass("org/webrtc/MediaCodecVideoDecoder");
+ jclass j_egl_base_class = GetClass("org/webrtc/EglBase");
jmethodID j_is_egl14_supported_method = jni->GetStaticMethodID(
- j_decoder_class, "isEGL14Supported", "()Z");
+ j_egl_base_class, "isEGL14Supported", "()Z");
bool is_egl14_supported = jni->CallStaticBooleanMethod(
- j_decoder_class, j_is_egl14_supported_method);
+ j_egl_base_class, j_is_egl14_supported_method);
CHECK_EXCEPTION(jni);
if (is_egl14_supported) {
LoadClass(jni, "android/opengl/EGLContext");
@@ -93,6 +94,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/MediaStream");
LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
LoadClass(jni, "org/webrtc/PeerConnection$BundlePolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$RtcpMuxPolicy");
LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
LoadClass(jni, "org/webrtc/PeerConnection$IceTransportsType");
@@ -143,4 +145,3 @@ jclass FindClass(JNIEnv* jni, const char* name) {
}
} // namespace webrtc_jni
-
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/native_handle_impl.h b/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/native_handle_impl.h
index 8c876961187..cdb72ff4d59 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/native_handle_impl.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -29,6 +29,9 @@
#ifndef TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
#define TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+#include "webrtc/base/checks.h"
+#include "webrtc/common_video/interface/video_frame_buffer.h"
+
namespace webrtc_jni {
// Wrapper for texture object.
@@ -52,6 +55,23 @@ class NativeHandleImpl {
int32_t texture_id_;
};
+class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer {
+ public:
+ JniNativeHandleBuffer(void* native_handle, int width, int height)
+ : NativeHandleBuffer(native_handle, width, height) {}
+
+ // TODO(pbos): Override destructor to release native handle, at the moment the
+ // native handle is not released based on refcount.
+
+ private:
+ rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override {
+ // TODO(pbos): Implement before using this in the encoder pipeline (or
+ // remove the CHECK() in VideoCapture).
+ RTC_NOTREACHED();
+ return nullptr;
+ }
+};
+
} // namespace webrtc_jni
#endif // TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription.cc
index 60467ba620a..697f332c27c 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/jsepsessiondescription.cc
@@ -62,10 +62,11 @@ const int JsepSessionDescription::kDefaultVideoCodecId = 100;
const int JsepSessionDescription::kDefaultVideoCodecFramerate = 60;
const char JsepSessionDescription::kDefaultVideoCodecName[] = "VP8";
// Used as default max video codec size before we have it in signaling.
-#if defined(ANDROID)
+#if defined(ANDROID) || defined(WEBRTC_IOS)
// Limit default max video codec size for Android to avoid
// HW VP8 codec initialization failure for resolutions higher
// than 1280x720 or 720x1280.
+// Same patch for iOS to support 720P in portrait mode.
const int JsepSessionDescription::kMaxVideoCodecWidth = 1280;
const int JsepSessionDescription::kMaxVideoCodecHeight = 1280;
#else
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc
index b37d1e3e41f..63c6f13a3d7 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource.cc
@@ -43,43 +43,43 @@ namespace {
// invalid.
void FromConstraints(const MediaConstraintsInterface::Constraints& constraints,
cricket::AudioOptions* options) {
- MediaConstraintsInterface::Constraints::const_iterator iter;
-
// This design relies on the fact that all the audio constraints are actually
// "options", i.e. boolean-valued and always satisfiable. If the constraints
// are extended to include non-boolean values or actual format constraints,
// a different algorithm will be required.
- for (iter = constraints.begin(); iter != constraints.end(); ++iter) {
+ struct {
+ const char* name;
+ cricket::Settable<bool>& value;
+ } key_to_value[] = {
+ {MediaConstraintsInterface::kGoogEchoCancellation,
+ options->echo_cancellation},
+ {MediaConstraintsInterface::kExtendedFilterEchoCancellation,
+ options->extended_filter_aec},
+ {MediaConstraintsInterface::kDAEchoCancellation,
+ options->delay_agnostic_aec},
+ {MediaConstraintsInterface::kAutoGainControl, options->auto_gain_control},
+ {MediaConstraintsInterface::kExperimentalAutoGainControl,
+ options->experimental_agc},
+ {MediaConstraintsInterface::kNoiseSuppression,
+ options->noise_suppression},
+ {MediaConstraintsInterface::kExperimentalNoiseSuppression,
+ options->experimental_ns},
+ {MediaConstraintsInterface::kHighpassFilter, options->highpass_filter},
+ {MediaConstraintsInterface::kTypingNoiseDetection,
+ options->typing_detection},
+ {MediaConstraintsInterface::kAudioMirroring, options->stereo_swapping},
+ {MediaConstraintsInterface::kAecDump, options->aec_dump}
+ };
+
+ for (const auto& constraint : constraints) {
bool value = false;
-
- if (!rtc::FromString(iter->value, &value))
+ if (!rtc::FromString(constraint.value, &value))
continue;
- if (iter->key == MediaConstraintsInterface::kEchoCancellation)
- options->echo_cancellation.Set(value);
- else if (iter->key ==
- MediaConstraintsInterface::kExperimentalEchoCancellation)
- options->experimental_aec.Set(value);
- else if (iter->key == MediaConstraintsInterface::kDAEchoCancellation)
- options->delay_agnostic_aec.Set(value);
- else if (iter->key == MediaConstraintsInterface::kAutoGainControl)
- options->auto_gain_control.Set(value);
- else if (iter->key ==
- MediaConstraintsInterface::kExperimentalAutoGainControl)
- options->experimental_agc.Set(value);
- else if (iter->key == MediaConstraintsInterface::kNoiseSuppression)
- options->noise_suppression.Set(value);
- else if (iter->key ==
- MediaConstraintsInterface::kExperimentalNoiseSuppression)
- options->experimental_ns.Set(value);
- else if (iter->key == MediaConstraintsInterface::kHighpassFilter)
- options->highpass_filter.Set(value);
- else if (iter->key == MediaConstraintsInterface::kTypingNoiseDetection)
- options->typing_detection.Set(value);
- else if (iter->key == MediaConstraintsInterface::kAudioMirroring)
- options->stereo_swapping.Set(value);
- else if (iter->key == MediaConstraintsInterface::kAecDump)
- options->aec_dump.Set(value);
+ for (auto& entry : key_to_value) {
+ if (constraint.key.compare(entry.name) == 0)
+ entry.value.Set(value);
+ }
}
}
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource_unittest.cc
index 569f814a52c..62620259b60 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/localaudiosource_unittest.cc
@@ -43,9 +43,10 @@ using webrtc::PeerConnectionFactoryInterface;
TEST(LocalAudioSourceTest, SetValidOptions) {
webrtc::FakeConstraints constraints;
- constraints.AddMandatory(MediaConstraintsInterface::kEchoCancellation, false);
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kGoogEchoCancellation, false);
constraints.AddOptional(
- MediaConstraintsInterface::kExperimentalEchoCancellation, true);
+ MediaConstraintsInterface::kExtendedFilterEchoCancellation, true);
constraints.AddOptional(MediaConstraintsInterface::kDAEchoCancellation, true);
constraints.AddOptional(MediaConstraintsInterface::kAutoGainControl, true);
constraints.AddOptional(
@@ -61,7 +62,7 @@ TEST(LocalAudioSourceTest, SetValidOptions) {
bool value;
EXPECT_TRUE(source->options().echo_cancellation.Get(&value));
EXPECT_FALSE(value);
- EXPECT_TRUE(source->options().experimental_aec.Get(&value));
+ EXPECT_TRUE(source->options().extended_filter_aec.Get(&value));
EXPECT_TRUE(value);
EXPECT_TRUE(source->options().delay_agnostic_aec.Get(&value));
EXPECT_TRUE(value);
@@ -88,8 +89,10 @@ TEST(LocalAudioSourceTest, OptionNotSet) {
TEST(LocalAudioSourceTest, MandatoryOverridesOptional) {
webrtc::FakeConstraints constraints;
- constraints.AddMandatory(MediaConstraintsInterface::kEchoCancellation, false);
- constraints.AddOptional(MediaConstraintsInterface::kEchoCancellation, true);
+ constraints.AddMandatory(
+ MediaConstraintsInterface::kGoogEchoCancellation, false);
+ constraints.AddOptional(
+ MediaConstraintsInterface::kGoogEchoCancellation, true);
rtc::scoped_refptr<LocalAudioSource> source =
LocalAudioSource::Create(PeerConnectionFactoryInterface::Options(),
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.cc
index a84dde27b38..66a2ad2cba3 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.cc
@@ -47,8 +47,10 @@ const char MediaConstraintsInterface::kMinFrameRate[] = "minFrameRate";
// Audio constraints.
const char MediaConstraintsInterface::kEchoCancellation[] =
+ "echoCancellation";
+const char MediaConstraintsInterface::kGoogEchoCancellation[] =
"googEchoCancellation";
-const char MediaConstraintsInterface::kExperimentalEchoCancellation[] =
+const char MediaConstraintsInterface::kExtendedFilterEchoCancellation[] =
"googEchoCancellation2";
const char MediaConstraintsInterface::kDAEchoCancellation[] =
"googDAEchoCancellation";
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h b/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h
index cc682097f76..777c3cc5526 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h
@@ -72,9 +72,12 @@ class MediaConstraintsInterface {
static const char kMinFrameRate[]; // minFrameRate
// Constraint keys used by a local audio source.
+ static const char kEchoCancellation[]; // echoCancellation
+
// These keys are google specific.
- static const char kEchoCancellation[]; // googEchoCancellation
- static const char kExperimentalEchoCancellation[]; // googEchoCancellation2
+ static const char kGoogEchoCancellation[]; // googEchoCancellation
+
+ static const char kExtendedFilterEchoCancellation[]; // googEchoCancellation2
static const char kDAEchoCancellation[]; // googDAEchoCancellation
static const char kAutoGainControl[]; // googAutoGainControl
static const char kExperimentalAutoGainControl[]; // googAutoGainControl2
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/.clang-format b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/.clang-format
new file mode 100644
index 00000000000..34694e57dcf
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/.clang-format
@@ -0,0 +1,10 @@
+BasedOnStyle: Chromium
+ColumnLimit: 100
+BinPackParameters: false
+AllowAllParametersOfDeclarationOnNextLine: true
+DerivePointerAlignment: false
+PointerAlignment: Right
+SpacesBeforeTrailingComments: 1
+ObjCBlockIndentWidth: 2
+ObjCSpaceAfterProperty: false
+ObjCSpaceBeforeProtocolList: true
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCDataChannel.mm b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCDataChannel.mm
index 94d22d25952..8a9b6b6095a 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCDataChannel.mm
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCDataChannel.mm
@@ -43,6 +43,15 @@ class RTCDataChannelObserver : public DataChannelObserver {
[_channel.delegate channelDidChangeState:_channel];
}
+ void OnBufferedAmountChange(uint64 previousAmount) override {
+ RTCDataChannel* channel = _channel;
+ id<RTCDataChannelDelegate> delegate = channel.delegate;
+ if ([delegate
+ respondsToSelector:@selector(channel:didChangeBufferedAmount:)]) {
+ [delegate channel:channel didChangeBufferedAmount:previousAmount];
+ }
+ }
+
void OnMessage(const DataBuffer& buffer) override {
if (!_channel.delegate) {
return;
@@ -57,7 +66,8 @@ class RTCDataChannelObserver : public DataChannelObserver {
};
}
-// TODO(tkchin): move to shared location
+// TODO(henrika): move to shared location.
+// See https://code.google.com/p/webrtc/issues/detail?id=4773 for details.
NSString* NSStringFromStdString(const std::string& stdString) {
// std::string may contain null termination character so we construct
// using length.
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.h b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.h
index 42ea45bdcfa..eb06c186453 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.h
@@ -25,14 +25,16 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#import <Foundation/Foundation.h>
-
+// TODO(tkchin): remove this in favor of having objc headers mirror their C++ counterparts.
+// TODO(tkchin): see if we can move C++ enums into their own file so we can avoid all this
+// conversion code.
#import "RTCTypes.h"
-#include "talk/app/webrtc/peerconnectioninterface.h"
+#import "talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h"
@interface RTCEnumConverter : NSObject
+// TODO(tkchin): rename these.
+ (RTCICEConnectionState)convertIceConnectionStateToObjC:
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
@@ -54,4 +56,28 @@
+ (RTCTrackState)convertTrackStateToObjC:
(webrtc::MediaStreamTrackInterface::TrackState)nativeState;
++ (RTCIceTransportsType)iceTransportsTypeForNativeEnum:
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeEnum;
+
++ (webrtc::PeerConnectionInterface::IceTransportsType)nativeEnumForIceTransportsType:
+ (RTCIceTransportsType)iceTransportsType;
+
++ (RTCBundlePolicy)bundlePolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeEnum;
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeEnumForBundlePolicy:
+ (RTCBundlePolicy)bundlePolicy;
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeEnum;
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeEnumForRtcpMuxPolicy:
+ (RTCRtcpMuxPolicy)rtcpMuxPolicy;
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeEnum;
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeEnumForTcpCandidatePolicy:
+ (RTCTcpCandidatePolicy)tcpCandidatePolicy;
+
@end
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.mm b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.mm
index f893fc871f1..6254b95e807 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.mm
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCEnumConverter.mm
@@ -133,4 +133,96 @@
}
}
++ (RTCIceTransportsType)iceTransportsTypeForNativeEnum:
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeEnum {
+ switch (nativeEnum) {
+ case webrtc::PeerConnectionInterface::kNone:
+ return kRTCIceTransportsTypeNone;
+ case webrtc::PeerConnectionInterface::kRelay:
+ return kRTCIceTransportsTypeRelay;
+ case webrtc::PeerConnectionInterface::kNoHost:
+ return kRTCIceTransportsTypeNoHost;
+ case webrtc::PeerConnectionInterface::kAll:
+ return kRTCIceTransportsTypeAll;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::IceTransportsType)nativeEnumForIceTransportsType:
+ (RTCIceTransportsType)iceTransportsType {
+ switch (iceTransportsType) {
+ case kRTCIceTransportsTypeNone:
+ return webrtc::PeerConnectionInterface::kNone;
+ case kRTCIceTransportsTypeRelay:
+ return webrtc::PeerConnectionInterface::kRelay;
+ case kRTCIceTransportsTypeNoHost:
+ return webrtc::PeerConnectionInterface::kNoHost;
+ case kRTCIceTransportsTypeAll:
+ return webrtc::PeerConnectionInterface::kAll;
+ }
+}
+
++ (RTCBundlePolicy)bundlePolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeEnum {
+ switch (nativeEnum) {
+ case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
+ return kRTCBundlePolicyBalanced;
+ case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
+ return kRTCBundlePolicyMaxBundle;
+ case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
+ return kRTCBundlePolicyMaxCompat;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::BundlePolicy)nativeEnumForBundlePolicy:
+ (RTCBundlePolicy)bundlePolicy {
+ switch (bundlePolicy) {
+ case kRTCBundlePolicyBalanced:
+ return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
+ case kRTCBundlePolicyMaxBundle:
+ return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
+ case kRTCBundlePolicyMaxCompat:
+ return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
+ }
+}
+
++ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeEnum {
+ switch (nativeEnum) {
+ case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
+ return kRTCRtcpMuxPolicyNegotiate;
+ case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
+ return kRTCRtcpMuxPolicyRequire;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeEnumForRtcpMuxPolicy:
+ (RTCRtcpMuxPolicy)rtcpMuxPolicy {
+ switch (rtcpMuxPolicy) {
+ case kRTCRtcpMuxPolicyNegotiate:
+ return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+ case kRTCRtcpMuxPolicyRequire:
+ return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
+ }
+}
+
++ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativeEnum:
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeEnum {
+ switch (nativeEnum) {
+ case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
+ return kRTCTcpCandidatePolicyEnabled;
+ case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
+ return kRTCTcpCandidatePolicyDisabled;
+ }
+}
+
++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeEnumForTcpCandidatePolicy:
+ (RTCTcpCandidatePolicy)tcpCandidatePolicy {
+ switch (tcpCandidatePolicy) {
+ case kRTCTcpCandidatePolicyEnabled:
+ return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+ case kRTCTcpCandidatePolicyDisabled:
+ return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+ }
+}
+
@end
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection+Internal.h b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection+Internal.h
index 8d7201f811f..96d63ab4121 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection+Internal.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection+Internal.h
@@ -36,8 +36,13 @@
@property(nonatomic, assign, readonly)
rtc::scoped_refptr<webrtc::PeerConnectionInterface> peerConnection;
-- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface*)factory
- iceServers:(const webrtc::PeerConnectionInterface::IceServers&)iceServers
- constraints:(const webrtc::MediaConstraintsInterface*)constraints;
+- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface *)factory
+ iceServers:(const webrtc::PeerConnectionInterface::IceServers &)iceServers
+ constraints:(const webrtc::MediaConstraintsInterface *)constraints;
+
+- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface *)factory
+ config:(const webrtc::PeerConnectionInterface::RTCConfiguration &)config
+ constraints:(const webrtc::MediaConstraintsInterface *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate;
@end
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection.mm b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection.mm
index 769219e1eda..0d30acc0eb2 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection.mm
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnection.mm
@@ -285,6 +285,21 @@ class RTCStatsObserver : public StatsObserver {
return self;
}
+- (instancetype)initWithFactory:(webrtc::PeerConnectionFactoryInterface *)factory
+ config:(const webrtc::PeerConnectionInterface::RTCConfiguration &)config
+ constraints:(const webrtc::MediaConstraintsInterface *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate {
+ NSParameterAssert(factory);
+ if (self = [super init]) {
+ _observer.reset(new webrtc::RTCPeerConnectionObserver(self));
+ _peerConnection =
+ factory->CreatePeerConnection(config, constraints, nullptr, nullptr, _observer.get());
+ _localStreams = [[NSMutableArray alloc] init];
+ _delegate = delegate;
+ }
+ return self;
+}
+
- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)peerConnection {
return _peerConnection;
}
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm
index a4a12addd9f..b7b89662394 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionFactory.mm
@@ -41,6 +41,7 @@
#import "RTCMediaStreamTrack+Internal.h"
#import "RTCPeerConnection+Internal.h"
#import "RTCPeerConnectionDelegate.h"
+#import "RTCPeerConnectionInterface+Internal.h"
#import "RTCVideoCapturer+Internal.h"
#import "RTCVideoSource+Internal.h"
#import "RTCVideoTrack+Internal.h"
@@ -53,7 +54,6 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/ssladapter.h"
-
@implementation RTCPeerConnectionFactory {
rtc::scoped_ptr<rtc::Thread> _signalingThread;
rtc::scoped_ptr<rtc::Thread> _workerThread;
@@ -79,8 +79,9 @@
_workerThread.reset(new rtc::Thread());
result = _workerThread->Start();
NSAssert(result, @"Failed to start worker thread.");
+
_nativeFactory = webrtc::CreatePeerConnectionFactory(
- _signalingThread.get(), _workerThread.get(), NULL, NULL, NULL);
+ _signalingThread.get(), _workerThread.get(), nullptr, nullptr, nullptr);
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
// Uncomment to get sensitive logs emitted (to stderr or logcat).
// rtc::LogMessage::LogToDebug(rtc::LS_SENSITIVE);
@@ -88,6 +89,15 @@
return self;
}
+- (RTCPeerConnection *)peerConnectionWithConfiguration:(RTCConfiguration *)configuration
+ constraints:(RTCMediaConstraints *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate {
+ return [[RTCPeerConnection alloc] initWithFactory:self.nativeFactory.get()
+ config:configuration.nativeConfiguration
+ constraints:constraints.constraints
+ delegate:delegate];
+}
+
- (RTCPeerConnection*)
peerConnectionWithICEServers:(NSArray*)servers
constraints:(RTCMediaConstraints*)constraints
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h
new file mode 100644
index 00000000000..5e8dbbf604e
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h
@@ -0,0 +1,37 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h"
+
+#include "talk/app/webrtc/peerconnectioninterface.h"
+
+@interface RTCConfiguration ()
+
+@property(nonatomic, readonly)
+ webrtc::PeerConnectionInterface::RTCConfiguration nativeConfiguration;
+
+@end
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm
new file mode 100644
index 00000000000..32298c8d725
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/RTCPeerConnectionInterface.mm
@@ -0,0 +1,87 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "talk/app/webrtc/objc/RTCPeerConnectionInterface+Internal.h"
+
+#import "talk/app/webrtc/objc/RTCEnumConverter.h"
+#import "talk/app/webrtc/objc/RTCICEServer+Internal.h"
+
+@implementation RTCConfiguration
+
+@synthesize iceTransportsType = _iceTransportsType;
+@synthesize iceServers = _iceServers;
+@synthesize bundlePolicy = _bundlePolicy;
+@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
+@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
+@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
+
+- (instancetype)init {
+ if (self = [super init]) {
+ // Copy defaults.
+ webrtc::PeerConnectionInterface::RTCConfiguration config;
+ _iceTransportsType = [RTCEnumConverter iceTransportsTypeForNativeEnum:config.type];
+ _bundlePolicy = [RTCEnumConverter bundlePolicyForNativeEnum:config.bundle_policy];
+ _rtcpMuxPolicy = [RTCEnumConverter rtcpMuxPolicyForNativeEnum:config.rtcp_mux_policy];
+ _tcpCandidatePolicy =
+ [RTCEnumConverter tcpCandidatePolicyForNativeEnum:config.tcp_candidate_policy];
+ _audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
+ }
+ return self;
+}
+
+- (instancetype)initWithIceTransportsType:(RTCIceTransportsType)iceTransportsType
+ bundlePolicy:(RTCBundlePolicy)bundlePolicy
+ rtcpMuxPolicy:(RTCRtcpMuxPolicy)rtcpMuxPolicy
+ tcpCandidatePolicy:(RTCTcpCandidatePolicy)tcpCandidatePolicy
+ audioJitterBufferMaxPackets:(int)audioJitterBufferMaxPackets {
+ if (self = [super init]) {
+ _iceTransportsType = iceTransportsType;
+ _bundlePolicy = bundlePolicy;
+ _rtcpMuxPolicy = rtcpMuxPolicy;
+ _tcpCandidatePolicy = tcpCandidatePolicy;
+ _audioJitterBufferMaxPackets = audioJitterBufferMaxPackets;
+ }
+ return self;
+}
+
+#pragma mark - Private
+
+- (webrtc::PeerConnectionInterface::RTCConfiguration)nativeConfiguration {
+ webrtc::PeerConnectionInterface::RTCConfiguration nativeConfig;
+ nativeConfig.type = [RTCEnumConverter nativeEnumForIceTransportsType:_iceTransportsType];
+ for (RTCICEServer *iceServer : _iceServers) {
+ nativeConfig.servers.push_back(iceServer.iceServer);
+ }
+ nativeConfig.bundle_policy = [RTCEnumConverter nativeEnumForBundlePolicy:_bundlePolicy];
+ nativeConfig.rtcp_mux_policy = [RTCEnumConverter nativeEnumForRtcpMuxPolicy:_rtcpMuxPolicy];
+ nativeConfig.tcp_candidate_policy =
+ [RTCEnumConverter nativeEnumForTcpCandidatePolicy:_tcpCandidatePolicy];
+ nativeConfig.audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
+ return nativeConfig;
+}
+
+@end
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/avfoundationvideocapturer.mm
index 9f5361a60b7..d68fdff79a0 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/avfoundationvideocapturer.mm
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/avfoundationvideocapturer.mm
@@ -142,6 +142,7 @@ static dispatch_queue_t kBackgroundQueue = nil;
if (!_isRunning) {
return;
}
+ [_videoOutput setSampleBufferDelegate:nil queue:nullptr];
AVCaptureSession* session = _captureSession;
dispatch_async(kBackgroundQueue, ^{
[session stopRunning];
@@ -262,10 +263,11 @@ static dispatch_queue_t kBackgroundQueue = nil;
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationPortrait:
- orientation = AVCaptureVideoOrientationPortraitUpsideDown;
- case UIDeviceOrientationPortraitUpsideDown:
orientation = AVCaptureVideoOrientationPortrait;
break;
+ case UIDeviceOrientationPortraitUpsideDown:
+ orientation = AVCaptureVideoOrientationPortraitUpsideDown;
+ break;
case UIDeviceOrientationLandscapeLeft:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCDataChannel.h b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCDataChannel.h
index 7c225802455..24a46f655c8 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCDataChannel.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCDataChannel.h
@@ -82,6 +82,12 @@ typedef enum {
- (void)channel:(RTCDataChannel*)channel
didReceiveMessageWithBuffer:(RTCDataBuffer*)buffer;
+@optional
+
+// Called when the buffered amount has changed.
+- (void)channel:(RTCDataChannel*)channel
+ didChangeBufferedAmount:(NSUInteger)amount;
+
@end
// ObjectiveC wrapper for a DataChannel object.
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h
index f0b2e3ad991..e1e69b4b7f1 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionFactory.h
@@ -28,6 +28,7 @@
#import <Foundation/Foundation.h>
@class RTCAudioTrack;
+@class RTCConfiguration;
@class RTCMediaConstraints;
@class RTCMediaStream;
@class RTCPeerConnection;
@@ -44,7 +45,7 @@
+ (void)initializeSSL;
+ (void)deinitializeSSL;
-// Create an RTCPeerConnection object. RTCPeerConnectionFactory will create
+// Create an RTCPeerConnection object. RTCPeerConnectionFactory will create
// required libjingle threads, socket and network manager factory classes for
// networking.
- (RTCPeerConnection *)
@@ -52,6 +53,11 @@
constraints:(RTCMediaConstraints *)constraints
delegate:(id<RTCPeerConnectionDelegate>)delegate;
+// Creates a peer connection using the default port allocator factory and identity service.
+- (RTCPeerConnection *)peerConnectionWithConfiguration:(RTCConfiguration *)configuration
+ constraints:(RTCMediaConstraints *)constraints
+ delegate:(id<RTCPeerConnectionDelegate>)delegate;
+
// Create an RTCMediaStream named |label|.
- (RTCMediaStream *)mediaStreamWithLabel:(NSString *)label;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h
new file mode 100644
index 00000000000..fd7af620d9d
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objc/public/RTCPeerConnectionInterface.h
@@ -0,0 +1,73 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// See talk/app/webrtc/peerconnectioninterface.h.
+
+#import <Foundation/Foundation.h>
+
+typedef NS_ENUM(NSInteger, RTCIceTransportsType) {
+ kRTCIceTransportsTypeNone,
+ kRTCIceTransportsTypeRelay,
+ kRTCIceTransportsTypeNoHost,
+ kRTCIceTransportsTypeAll,
+};
+
+// https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-08#section-4.1.1
+typedef NS_ENUM(NSInteger, RTCBundlePolicy) {
+ kRTCBundlePolicyBalanced,
+ kRTCBundlePolicyMaxBundle,
+ kRTCBundlePolicyMaxCompat,
+};
+
+// https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-09#section-4.1.1
+typedef NS_ENUM(NSInteger, RTCRtcpMuxPolicy) {
+ kRTCRtcpMuxPolicyNegotiate,
+ kRTCRtcpMuxPolicyRequire,
+};
+
+typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) {
+ kRTCTcpCandidatePolicyEnabled,
+ kRTCTcpCandidatePolicyDisabled,
+};
+
+// Configuration object used for creating a peer connection.
+@interface RTCConfiguration : NSObject
+
+@property(nonatomic, assign) RTCIceTransportsType iceTransportsType;
+@property(nonatomic, copy) NSArray *iceServers;
+@property(nonatomic, assign) RTCBundlePolicy bundlePolicy;
+@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy;
+@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy;
+@property(nonatomic, assign) int audioJitterBufferMaxPackets;
+
+- (instancetype)initWithIceTransportsType:(RTCIceTransportsType)iceTransportsType
+ bundlePolicy:(RTCBundlePolicy)bundlePolicy
+ rtcpMuxPolicy:(RTCRtcpMuxPolicy)rtcpMuxPolicy
+ tcpCandidatePolicy:(RTCTcpCandidatePolicy)tcpCandidatePolicy
+ audioJitterBufferMaxPackets:(int)audioJitterBufferMaxPackets;
+
+@end
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m b/chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m
index 5c766724142..5070b789b89 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/objctests/RTCPeerConnectionSyncObserver.m
@@ -231,6 +231,12 @@
}
- (void)channel:(RTCDataChannel*)channel
+ didChangeBufferedAmount:(NSUInteger)previousAmount {
+ NSAssert(channel.bufferedAmount != previousAmount,
+ @"Invalid bufferedAmount change");
+}
+
+- (void)channel:(RTCDataChannel*)channel
didReceiveMessageWithBuffer:(RTCDataBuffer*)buffer {
NSAssert([_expectedMessages count] > 0,
@"Unexpected message received");
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc
index 52260413f7c..0a243077bcf 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.cc
@@ -166,9 +166,10 @@ typedef webrtc::PortAllocatorFactoryInterface::StunConfiguration
typedef webrtc::PortAllocatorFactoryInterface::TurnConfiguration
TurnConfiguration;
-bool ParseIceServers(const PeerConnectionInterface::IceServers& configuration,
- std::vector<StunConfiguration>* stun_config,
- std::vector<TurnConfiguration>* turn_config) {
+bool ParseIceServerUrl(const PeerConnectionInterface::IceServer& server,
+ const std::string& url,
+ std::vector<StunConfiguration>* stun_config,
+ std::vector<TurnConfiguration>* turn_config) {
// draft-nandakumar-rtcweb-stun-uri-01
// stunURI = scheme ":" stun-host [ ":" stun-port ]
// scheme = "stun" / "stuns"
@@ -183,103 +184,124 @@ bool ParseIceServers(const PeerConnectionInterface::IceServers& configuration,
// transport-ext = 1*unreserved
// turn-host = IP-literal / IPv4address / reg-name
// turn-port = *DIGIT
- for (size_t i = 0; i < configuration.size(); ++i) {
- webrtc::PeerConnectionInterface::IceServer server = configuration[i];
- if (server.uri.empty()) {
- LOG(WARNING) << "Empty uri.";
- continue;
- }
- std::vector<std::string> tokens;
- std::string turn_transport_type = kUdpTransportType;
- rtc::tokenize(server.uri, '?', &tokens);
- std::string uri_without_transport = tokens[0];
- // Let's look into transport= param, if it exists.
- if (tokens.size() == kTurnTransportTokensNum) { // ?transport= is present.
- std::string uri_transport_param = tokens[1];
- rtc::tokenize(uri_transport_param, '=', &tokens);
- if (tokens[0] == kTransport) {
- // As per above grammar transport param will be consist of lower case
- // letters.
- if (tokens[1] != kUdpTransportType && tokens[1] != kTcpTransportType) {
- LOG(LS_WARNING) << "Transport param should always be udp or tcp.";
- continue;
- }
- turn_transport_type = tokens[1];
+ std::vector<std::string> tokens;
+ std::string turn_transport_type = kUdpTransportType;
+ ASSERT(!url.empty());
+ rtc::tokenize(url, '?', &tokens);
+ std::string uri_without_transport = tokens[0];
+ // Let's look into transport= param, if it exists.
+ if (tokens.size() == kTurnTransportTokensNum) { // ?transport= is present.
+ std::string uri_transport_param = tokens[1];
+ rtc::tokenize(uri_transport_param, '=', &tokens);
+ if (tokens[0] == kTransport) {
+ // As per above grammar transport param will be consist of lower case
+ // letters.
+ if (tokens[1] != kUdpTransportType && tokens[1] != kTcpTransportType) {
+ LOG(LS_WARNING) << "Transport param should always be udp or tcp.";
+ return true;
}
+ turn_transport_type = tokens[1];
}
+ }
- std::string hoststring;
- ServiceType service_type = INVALID;
- if (!GetServiceTypeAndHostnameFromUri(uri_without_transport,
- &service_type,
- &hoststring)) {
- LOG(LS_WARNING) << "Invalid transport parameter in ICE URI: "
- << uri_without_transport;
- continue;
- }
-
- ASSERT(!hoststring.empty());
-
- // Let's break hostname.
- tokens.clear();
- rtc::tokenize(hoststring, '@', &tokens);
- ASSERT(!tokens.empty());
- // TODO(pthatcher): What's the right thing to do if tokens.size() is >2?
- // E.g. a string like "foo@bar@bat".
- if (tokens.size() >= kTurnHostTokensNum) {
- server.username = rtc::s_url_decode(tokens[0]);
- hoststring = tokens[1];
- } else {
- hoststring = tokens[0];
- }
+ std::string hoststring;
+ ServiceType service_type = INVALID;
+ if (!GetServiceTypeAndHostnameFromUri(uri_without_transport,
+ &service_type,
+ &hoststring)) {
+ LOG(LS_WARNING) << "Invalid transport parameter in ICE URI: "
+ << uri_without_transport;
+ return true;
+ }
+
+ ASSERT(!hoststring.empty());
+
+ // Let's break hostname.
+ tokens.clear();
+ rtc::tokenize(hoststring, '@', &tokens);
+ ASSERT(!tokens.empty());
+ std::string username(server.username);
+ // TODO(pthatcher): What's the right thing to do if tokens.size() is >2?
+ // E.g. a string like "foo@bar@bat".
+ if (tokens.size() >= kTurnHostTokensNum) {
+ username.assign(rtc::s_url_decode(tokens[0]));
+ hoststring = tokens[1];
+ } else {
+ hoststring = tokens[0];
+ }
- int port = kDefaultStunPort;
- if (service_type == TURNS) {
- port = kDefaultStunTlsPort;
- turn_transport_type = kTcpTransportType;
- }
+ int port = kDefaultStunPort;
+ if (service_type == TURNS) {
+ port = kDefaultStunTlsPort;
+ turn_transport_type = kTcpTransportType;
+ }
- std::string address;
- if (!ParseHostnameAndPortFromString(hoststring, &address, &port)) {
- LOG(WARNING) << "Invalid Hostname format: " << uri_without_transport;
- continue;
- }
+ std::string address;
+ if (!ParseHostnameAndPortFromString(hoststring, &address, &port)) {
+ LOG(WARNING) << "Invalid Hostname format: " << uri_without_transport;
+ return true;
+ }
- if (port <= 0 || port > 0xffff) {
- LOG(WARNING) << "Invalid port: " << port;
- continue;
- }
+ if (port <= 0 || port > 0xffff) {
+ LOG(WARNING) << "Invalid port: " << port;
+ return true;
+ }
- switch (service_type) {
- case STUN:
- case STUNS:
- stun_config->push_back(StunConfiguration(address, port));
- break;
- case TURN:
- case TURNS: {
- if (server.username.empty()) {
- // Turn url example from the spec |url:"turn:user@turn.example.org"|.
- std::vector<std::string> turn_tokens;
- rtc::tokenize(address, '@', &turn_tokens);
- if (turn_tokens.size() == kTurnHostTokensNum) {
- server.username = rtc::s_url_decode(turn_tokens[0]);
- address = turn_tokens[1];
- }
+ switch (service_type) {
+ case STUN:
+ case STUNS:
+ stun_config->push_back(StunConfiguration(address, port));
+ break;
+ case TURN:
+ case TURNS: {
+ if (username.empty()) {
+ // Turn url example from the spec |url:"turn:user@turn.example.org"|.
+ std::vector<std::string> turn_tokens;
+ rtc::tokenize(address, '@', &turn_tokens);
+ if (turn_tokens.size() == kTurnHostTokensNum) {
+ username.assign(rtc::s_url_decode(turn_tokens[0]));
+ address = turn_tokens[1];
}
+ }
- bool secure = (service_type == TURNS);
+ bool secure = (service_type == TURNS);
- turn_config->push_back(TurnConfiguration(address, port,
- server.username,
- server.password,
- turn_transport_type,
- secure));
- break;
+ turn_config->push_back(TurnConfiguration(address, port,
+ username,
+ server.password,
+ turn_transport_type,
+ secure));
+ break;
+ }
+ case INVALID:
+ default:
+ LOG(WARNING) << "Configuration not supported: " << url;
+ return false;
+ }
+ return true;
+}
+
+bool ParseIceServers(const PeerConnectionInterface::IceServers& servers,
+ std::vector<StunConfiguration>* stun_config,
+ std::vector<TurnConfiguration>* turn_config) {
+ for (const webrtc::PeerConnectionInterface::IceServer& server : servers) {
+ if (!server.urls.empty()) {
+ for (const std::string& url : server.urls) {
+ if (url.empty()) {
+ LOG(WARNING) << "Empty uri.";
+ continue;
+ }
+ if (!ParseIceServerUrl(server, url, stun_config, turn_config)) {
+ return false;
+ }
}
- case INVALID:
- default:
- LOG(WARNING) << "Configuration not supported: " << server.uri;
+ } else if (!server.uri.empty()) {
+ // Fallback to old .uri if new .urls isn't present.
+ if (!ParseIceServerUrl(server, server.uri, stun_config, turn_config)) {
return false;
+ }
+ } else {
+ LOG(WARNING) << "Empty uri.";
}
}
return true;
@@ -316,6 +338,7 @@ PeerConnection::PeerConnection(PeerConnectionFactory* factory)
}
PeerConnection::~PeerConnection() {
+ ASSERT(signaling_thread()->IsCurrent());
if (mediastream_signaling_)
mediastream_signaling_->TearDown();
if (stream_handler_container_)
@@ -864,6 +887,11 @@ void PeerConnection::OnIceComplete() {
observer_->OnIceComplete();
}
+void PeerConnection::OnIceConnectionReceivingChange(bool receiving) {
+ ASSERT(signaling_thread()->IsCurrent());
+ observer_->OnIceConnectionReceivingChange(receiving);
+}
+
void PeerConnection::ChangeSignalingState(
PeerConnectionInterface::SignalingState signaling_state) {
signaling_state_ = signaling_state;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.h b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.h
index bea9c2f61b6..1f6b59f832e 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection.h
@@ -142,13 +142,14 @@ class PeerConnection : public PeerConnectionInterface,
uint32 ssrc) override;
void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track) override;
- virtual void OnRemoveLocalStream(MediaStreamInterface* stream);
+ void OnRemoveLocalStream(MediaStreamInterface* stream) override;
// Implements IceObserver
- virtual void OnIceConnectionChange(IceConnectionState new_state);
- virtual void OnIceGatheringChange(IceGatheringState new_state);
- virtual void OnIceCandidate(const IceCandidateInterface* candidate);
- virtual void OnIceComplete();
+ void OnIceConnectionChange(IceConnectionState new_state) override;
+ void OnIceGatheringChange(IceGatheringState new_state) override;
+ void OnIceCandidate(const IceCandidateInterface* candidate) override;
+ void OnIceComplete() override;
+ void OnIceConnectionReceivingChange(bool receiving) override;
// Signals from WebRtcSession.
void OnSessionStateChange(cricket::BaseSession* session,
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc
index 93793cfc3e3..fd58ecdff6f 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnection_unittest.cc
@@ -33,6 +33,7 @@
#include <vector>
#include "talk/app/webrtc/dtmfsender.h"
+#include "talk/app/webrtc/fakemetricsobserver.h"
#include "talk/app/webrtc/fakeportallocatorfactory.h"
#include "talk/app/webrtc/localaudiosource.h"
#include "talk/app/webrtc/mediastreaminterface.h"
@@ -82,6 +83,7 @@ using webrtc::MockDataChannelObserver;
using webrtc::MockSetSessionDescriptionObserver;
using webrtc::MockStatsObserver;
using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionFactory;
using webrtc::SessionDescriptionInterface;
using webrtc::StreamCollectionInterface;
@@ -349,6 +351,7 @@ class PeerConnectionTestClientBase
EXPECT_TRUE(peer_connection_->GetStats(
observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
return observer->AudioOutputLevel();
}
@@ -358,6 +361,7 @@ class PeerConnectionTestClientBase
EXPECT_TRUE(peer_connection_->GetStats(
observer, NULL, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
return observer->AudioInputLevel();
}
@@ -367,6 +371,7 @@ class PeerConnectionTestClientBase
EXPECT_TRUE(peer_connection_->GetStats(
observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
return observer->BytesReceived();
}
@@ -376,6 +381,7 @@ class PeerConnectionTestClientBase
EXPECT_TRUE(peer_connection_->GetStats(
observer, track, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
return observer->BytesSent();
}
@@ -385,6 +391,7 @@ class PeerConnectionTestClientBase
EXPECT_TRUE(peer_connection_->GetStats(
observer, NULL, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
int bw = observer->AvailableReceiveBandwidth();
return bw;
}
@@ -395,6 +402,7 @@ class PeerConnectionTestClientBase
EXPECT_TRUE(peer_connection_->GetStats(
observer, NULL, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
return observer->DtlsCipher();
}
@@ -404,6 +412,7 @@ class PeerConnectionTestClientBase
EXPECT_TRUE(peer_connection_->GetStats(
observer, NULL, PeerConnectionInterface::kStatsOutputLevelStandard));
EXPECT_TRUE_WAIT(observer->called(), kMaxWaitMs);
+ EXPECT_NE(0, observer->timestamp());
return observer->SrtpCipher();
}
@@ -501,7 +510,8 @@ class PeerConnectionTestClientBase
video_decoder_factory_enabled_(false),
signaling_message_receiver_(NULL) {
}
- bool Init(const MediaConstraintsInterface* constraints) {
+ bool Init(const MediaConstraintsInterface* constraints,
+ const PeerConnectionFactory::Options* options) {
EXPECT_TRUE(!peer_connection_);
EXPECT_TRUE(!peer_connection_factory_);
allocator_factory_ = webrtc::FakePortAllocatorFactory::Create();
@@ -523,6 +533,9 @@ class PeerConnectionTestClientBase
if (!peer_connection_factory_) {
return false;
}
+ if (options) {
+ peer_connection_factory_->SetOptions(*options);
+ }
peer_connection_ = CreatePeerConnection(allocator_factory_.get(),
constraints);
return peer_connection_.get() != NULL;
@@ -619,9 +632,10 @@ class JsepTestClient
public:
static JsepTestClient* CreateClient(
const std::string& id,
- const MediaConstraintsInterface* constraints) {
+ const MediaConstraintsInterface* constraints,
+ const PeerConnectionFactory::Options* options) {
JsepTestClient* client(new JsepTestClient(id));
- if (!client->Init(constraints)) {
+ if (!client->Init(constraints, options)) {
delete client;
return NULL;
}
@@ -967,10 +981,19 @@ class P2PTestConductor : public testing::Test {
bool CreateTestClients(MediaConstraintsInterface* init_constraints,
MediaConstraintsInterface* recv_constraints) {
+ return CreateTestClients(init_constraints, NULL, recv_constraints, NULL);
+ }
+
+ bool CreateTestClients(MediaConstraintsInterface* init_constraints,
+ PeerConnectionFactory::Options* init_options,
+ MediaConstraintsInterface* recv_constraints,
+ PeerConnectionFactory::Options* recv_options) {
initiating_client_.reset(SignalingClass::CreateClient("Caller: ",
- init_constraints));
+ init_constraints,
+ init_options));
receiving_client_.reset(SignalingClass::CreateClient("Callee: ",
- recv_constraints));
+ recv_constraints,
+ recv_options));
if (!initiating_client_ || !receiving_client_) {
return false;
}
@@ -1307,20 +1330,122 @@ TEST_F(JsepPeerConnectionP2PTestClient, GetBytesSentStats) {
kMaxWaitForStatsMs);
}
-// Test that we can get negotiated ciphers.
-TEST_F(JsepPeerConnectionP2PTestClient, GetNegotiatedCiphersStats) {
- ASSERT_TRUE(CreateTestClients());
+// Test that DTLS 1.0 is used if both sides only support DTLS 1.0.
+TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12None) {
+ PeerConnectionFactory::Options init_options;
+ init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ PeerConnectionFactory::Options recv_options;
+ recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ ASSERT_TRUE(CreateTestClients(NULL, &init_options, NULL, &recv_options));
+ rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+ init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+ initializing_client()->pc()->RegisterUMAObserver(init_observer);
LocalP2PTest();
EXPECT_EQ_WAIT(
- rtc::SSLStreamAdapter::GetDefaultSslCipher(),
+ rtc::SSLStreamAdapter::GetDefaultSslCipher(rtc::SSL_PROTOCOL_DTLS_10),
initializing_client()->GetDtlsCipherStats(),
kMaxWaitForStatsMs);
+ EXPECT_EQ(
+ rtc::SSLStreamAdapter::GetDefaultSslCipher(rtc::SSL_PROTOCOL_DTLS_10),
+ init_observer->GetStringHistogramSample(webrtc::kAudioSslCipher));
EXPECT_EQ_WAIT(
kDefaultSrtpCipher,
initializing_client()->GetSrtpCipherStats(),
kMaxWaitForStatsMs);
+ EXPECT_EQ(
+ kDefaultSrtpCipher,
+ init_observer->GetStringHistogramSample(webrtc::kAudioSrtpCipher));
+}
+
+// Test that DTLS 1.2 is used if both ends support it.
+TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Both) {
+ PeerConnectionFactory::Options init_options;
+ init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ PeerConnectionFactory::Options recv_options;
+ recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ ASSERT_TRUE(CreateTestClients(NULL, &init_options, NULL, &recv_options));
+ rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+ init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+ initializing_client()->pc()->RegisterUMAObserver(init_observer);
+ LocalP2PTest();
+
+ EXPECT_EQ_WAIT(
+ rtc::SSLStreamAdapter::GetDefaultSslCipher(rtc::SSL_PROTOCOL_DTLS_12),
+ initializing_client()->GetDtlsCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(
+ rtc::SSLStreamAdapter::GetDefaultSslCipher(rtc::SSL_PROTOCOL_DTLS_12),
+ init_observer->GetStringHistogramSample(webrtc::kAudioSslCipher));
+
+ EXPECT_EQ_WAIT(
+ kDefaultSrtpCipher,
+ initializing_client()->GetSrtpCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(
+ kDefaultSrtpCipher,
+ init_observer->GetStringHistogramSample(webrtc::kAudioSrtpCipher));
+}
+
+// Test that DTLS 1.0 is used if the initator supports DTLS 1.2 and the
+// received supports 1.0.
+TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Init) {
+ PeerConnectionFactory::Options init_options;
+ init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ PeerConnectionFactory::Options recv_options;
+ recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ ASSERT_TRUE(CreateTestClients(NULL, &init_options, NULL, &recv_options));
+ rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+ init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+ initializing_client()->pc()->RegisterUMAObserver(init_observer);
+ LocalP2PTest();
+
+ EXPECT_EQ_WAIT(
+ rtc::SSLStreamAdapter::GetDefaultSslCipher(rtc::SSL_PROTOCOL_DTLS_10),
+ initializing_client()->GetDtlsCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(
+ rtc::SSLStreamAdapter::GetDefaultSslCipher(rtc::SSL_PROTOCOL_DTLS_10),
+ init_observer->GetStringHistogramSample(webrtc::kAudioSslCipher));
+
+ EXPECT_EQ_WAIT(
+ kDefaultSrtpCipher,
+ initializing_client()->GetSrtpCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(
+ kDefaultSrtpCipher,
+ init_observer->GetStringHistogramSample(webrtc::kAudioSrtpCipher));
+}
+
+// Test that DTLS 1.0 is used if the initator supports DTLS 1.0 and the
+// received supports 1.2.
+TEST_F(JsepPeerConnectionP2PTestClient, GetDtls12Recv) {
+ PeerConnectionFactory::Options init_options;
+ init_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10;
+ PeerConnectionFactory::Options recv_options;
+ recv_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
+ ASSERT_TRUE(CreateTestClients(NULL, &init_options, NULL, &recv_options));
+ rtc::scoped_refptr<webrtc::FakeMetricsObserver>
+ init_observer = new rtc::RefCountedObject<webrtc::FakeMetricsObserver>();
+ initializing_client()->pc()->RegisterUMAObserver(init_observer);
+ LocalP2PTest();
+
+ EXPECT_EQ_WAIT(
+ rtc::SSLStreamAdapter::GetDefaultSslCipher(rtc::SSL_PROTOCOL_DTLS_10),
+ initializing_client()->GetDtlsCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(
+ rtc::SSLStreamAdapter::GetDefaultSslCipher(rtc::SSL_PROTOCOL_DTLS_10),
+ init_observer->GetStringHistogramSample(webrtc::kAudioSslCipher));
+
+ EXPECT_EQ_WAIT(
+ kDefaultSrtpCipher,
+ initializing_client()->GetSrtpCipherStats(),
+ kMaxWaitForStatsMs);
+ EXPECT_EQ(
+ kDefaultSrtpCipher,
+ init_observer->GetStringHistogramSample(webrtc::kAudioSrtpCipher));
}
// This test sets up a call between two parties with audio, video and data.
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc
index 1c933764ac7..3524af7932a 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory.cc
@@ -276,7 +276,7 @@ rtc::Thread* PeerConnectionFactory::worker_thread() {
cricket::MediaEngineInterface* PeerConnectionFactory::CreateMediaEngine_w() {
ASSERT(worker_thread_ == rtc::Thread::Current());
return cricket::WebRtcMediaEngineFactory::Create(
- default_adm_.get(), NULL, video_encoder_factory_.get(),
+ default_adm_.get(), video_encoder_factory_.get(),
video_decoder_factory_.get());
}
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory_unittest.cc
index 67a20331dac..80885888522 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectionfactory_unittest.cc
@@ -199,6 +199,38 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) {
}
// This test verifies creation of PeerConnection with valid STUN and TURN
+// configuration. Also verifies the list of URL's parsed correctly as expected.
+TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersUrls) {
+ PeerConnectionInterface::RTCConfiguration config;
+ webrtc::PeerConnectionInterface::IceServer ice_server;
+ ice_server.urls.push_back(""); // Empty URLs should be ignored.
+ ice_server.urls.push_back(kStunIceServer);
+ ice_server.urls.push_back(kTurnIceServer);
+ ice_server.urls.push_back(kTurnIceServerWithTransport);
+ ice_server.password = kTurnPassword;
+ config.servers.push_back(ice_server);
+ rtc::scoped_refptr<PeerConnectionInterface> pc(
+ factory_->CreatePeerConnection(config, NULL,
+ allocator_factory_.get(),
+ new FakeIdentityService(),
+ &observer_));
+ EXPECT_TRUE(pc.get() != NULL);
+ StunConfigurations stun_configs;
+ webrtc::PortAllocatorFactoryInterface::StunConfiguration stun1(
+ "stun.l.google.com", 19302);
+ stun_configs.push_back(stun1);
+ VerifyStunConfigurations(stun_configs);
+ TurnConfigurations turn_configs;
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn1(
+ "test.com", 1234, "test@hello.com", kTurnPassword, "udp", false);
+ turn_configs.push_back(turn1);
+ webrtc::PortAllocatorFactoryInterface::TurnConfiguration turn2(
+ "hello.com", kDefaultStunPort, "test", kTurnPassword, "tcp", false);
+ turn_configs.push_back(turn2);
+ VerifyTurnConfigurations(turn_configs);
+}
+
+// This test verifies creation of PeerConnection with valid STUN and TURN
// configuration. Also verifies the URL's parsed correctly as expected.
// This version doesn't use RTCConfiguration.
// TODO(mallinath) - Remove this method after clients start using RTCConfig.
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface.h b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface.h
index e32676e259f..521dad26956 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/peerconnectioninterface.h
@@ -79,6 +79,7 @@
#include "talk/app/webrtc/umametrics.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/network.h"
+#include "webrtc/base/sslstreamadapter.h"
#include "webrtc/base/socketaddress.h"
namespace rtc {
@@ -126,6 +127,9 @@ class MetricsObserverInterface : public rtc::RefCountInterface {
virtual void IncrementCounter(PeerConnectionMetricsCounter type) = 0;
virtual void AddHistogramSample(PeerConnectionMetricsName type,
int value) = 0;
+ // TODO(jbauch): Make method abstract when it is implemented by Chromium.
+ virtual void AddHistogramSample(PeerConnectionMetricsName type,
+ const std::string& value) {}
protected:
virtual ~MetricsObserverInterface() {}
@@ -175,7 +179,9 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
};
struct IceServer {
+ // TODO(jbauch): Remove uri when all code using it has switched to urls.
std::string uri;
+ std::vector<std::string> urls;
std::string username;
std::string password;
};
@@ -197,6 +203,12 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
kBundlePolicyMaxCompat
};
+ // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-09#section-4.1.1
+ enum RtcpMuxPolicy {
+ kRtcpMuxPolicyNegotiate,
+ kRtcpMuxPolicyRequire,
+ };
+
enum TcpCandidatePolicy {
kTcpCandidatePolicyEnabled,
kTcpCandidatePolicyDisabled
@@ -210,14 +222,18 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
// at the same time.
IceServers servers;
BundlePolicy bundle_policy;
+ RtcpMuxPolicy rtcp_mux_policy;
TcpCandidatePolicy tcp_candidate_policy;
int audio_jitter_buffer_max_packets;
+ bool audio_jitter_buffer_fast_accelerate;
RTCConfiguration()
: type(kAll),
bundle_policy(kBundlePolicyBalanced),
+ rtcp_mux_policy(kRtcpMuxPolicyNegotiate),
tcp_candidate_policy(kTcpCandidatePolicyEnabled),
- audio_jitter_buffer_max_packets(50) {}
+ audio_jitter_buffer_max_packets(50),
+ audio_jitter_buffer_fast_accelerate(false) {}
};
struct RTCOfferAnswerOptions {
@@ -395,6 +411,9 @@ class PeerConnectionObserver {
// All Ice candidates have been found.
virtual void OnIceComplete() {}
+ // Called when the ICE connection receiving status changes.
+ virtual void OnIceConnectionReceivingChange(bool receiving) {}
+
protected:
// Dtor protected as objects shouldn't be deleted via this interface.
~PeerConnectionObserver() {}
@@ -508,7 +527,8 @@ class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
Options() :
disable_encryption(false),
disable_sctp_data_channels(false),
- network_ignore_mask(rtc::kDefaultNetworkIgnoreMask) {
+ network_ignore_mask(rtc::kDefaultNetworkIgnoreMask),
+ ssl_max_version(rtc::SSL_PROTOCOL_DTLS_10) {
}
bool disable_encryption;
bool disable_sctp_data_channels;
@@ -517,6 +537,11 @@ class PeerConnectionFactoryInterface : public rtc::RefCountInterface {
// ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK will ignore Ethernet and
// loopback interfaces.
int network_ignore_mask;
+
+ // Sets the maximum supported protocol version. The highest version
+ // supported by both ends will be used for the connection, i.e. if one
+ // party supports DTLS 1.0 and the other DTLS 1.2, DTLS 1.0 will be used.
+ rtc::SSLProtocolVersion ssl_max_version;
};
virtual void SetOptions(const Options& options) = 0;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc
index fb1595236b1..ad64639eb93 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.cc
@@ -80,18 +80,25 @@ StatsReport::Id GetTransportIdFromProxy(const cricket::ProxyTransportMap& map,
found->second, cricket::ICE_CANDIDATE_COMPONENT_RTP);
}
-void AddTrackReport(StatsCollection* reports, const std::string& track_id) {
+StatsReport* AddTrackReport(StatsCollection* reports,
+ const std::string& track_id) {
// Adds an empty track report.
StatsReport::Id id(
StatsReport::NewTypedId(StatsReport::kStatsReportTypeTrack, track_id));
StatsReport* report = reports->ReplaceOrAddNew(id);
report->AddString(StatsReport::kStatsValueNameTrackId, track_id);
+ return report;
}
template <class TrackVector>
-void CreateTrackReports(const TrackVector& tracks, StatsCollection* reports) {
- for (const auto& track : tracks)
- AddTrackReport(reports, track->id());
+void CreateTrackReports(const TrackVector& tracks, StatsCollection* reports,
+ TrackIdMap& track_ids) {
+ for (const auto& track : tracks) {
+ const std::string& track_id = track->id();
+ StatsReport* report = AddTrackReport(reports, track_id);
+ DCHECK(report != nullptr);
+ track_ids[track_id] = report;
+ }
}
void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info,
@@ -129,6 +136,9 @@ void ExtractStats(const cricket::VoiceReceiverInfo& info, StatsReport* report) {
{ StatsReport::kStatsValueNameSecondaryDecodedRate,
info.secondary_decoded_rate },
{ StatsReport::kStatsValueNameSpeechExpandRate, info.speech_expand_rate },
+ { StatsReport::kStatsValueNameAccelerateRate, info.accelerate_rate },
+ { StatsReport::kStatsValueNamePreemptiveExpandRate,
+ info.preemptive_expand_rate },
};
const IntForAdd ints[] = {
@@ -362,9 +372,9 @@ void StatsCollector::AddStream(MediaStreamInterface* stream) {
DCHECK(stream != NULL);
CreateTrackReports<AudioTrackVector>(stream->GetAudioTracks(),
- &reports_);
+ &reports_, track_ids_);
CreateTrackReports<VideoTrackVector>(stream->GetVideoTracks(),
- &reports_);
+ &reports_, track_ids_);
}
void StatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track,
@@ -463,6 +473,7 @@ StatsCollector::UpdateStats(PeerConnectionInterface::StatsOutputLevel level) {
ExtractVoiceInfo();
ExtractVideoInfo(level);
ExtractDataInfo();
+ UpdateTrackReports();
}
}
@@ -866,6 +877,7 @@ void StatsCollector::UpdateStatsFromExistingLocalAudioTracks() {
if (!v || v->string_val() != track->id())
continue;
+ report->set_timestamp(stats_gathering_started_);
UpdateReportFromAudioTrack(track, report);
}
}
@@ -913,6 +925,18 @@ bool StatsCollector::GetTrackIdBySsrc(uint32 ssrc, std::string* track_id,
return true;
}
+void StatsCollector::UpdateTrackReports() {
+ DCHECK(session_->signaling_thread()->IsCurrent());
+
+ rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls;
+
+ for (const auto& entry : track_ids_) {
+ StatsReport* report = entry.second;
+ report->set_timestamp(stats_gathering_started_);
+ }
+
+}
+
void StatsCollector::ClearUpdateStatsCacheForTest() {
stats_gathering_started_ = 0;
}
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.h b/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.h
index 3c0aaf9b8ec..99130a3f013 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector.h
@@ -52,6 +52,9 @@ const char* IceCandidateTypeToStatsType(const std::string& candidate_type);
// only used by stats collector.
const char* AdapterTypeToStatsType(rtc::AdapterType type);
+// A mapping between track ids and their StatsReport.
+typedef std::map<std::string, StatsReport*> TrackIdMap;
+
class StatsCollector {
public:
// The caller is responsible for ensuring that the session outlives the
@@ -139,8 +142,12 @@ class StatsCollector {
bool GetTrackIdBySsrc(uint32 ssrc, std::string* track_id,
StatsReport::Direction direction);
+ // Helper method to update the timestamp of track records.
+ void UpdateTrackReports();
+
// A collection for all of our stats reports.
StatsCollection reports_;
+ TrackIdMap track_ids_;
// Raw pointer to the session the statistics are gathered from.
WebRtcSession* const session_;
double stats_gathering_started_;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector_unittest.cc
index 4e2bbedf7c9..315366ceca3 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/statscollector_unittest.cc
@@ -313,6 +313,12 @@ void VerifyVoiceReceiverInfoReport(
EXPECT_TRUE(GetValue(
report, StatsReport::kStatsValueNameSpeechExpandRate, &value_in_report));
EXPECT_EQ(rtc::ToString<float>(info.speech_expand_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAccelerateRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString<float>(info.accelerate_rate), value_in_report);
+ EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePreemptiveExpandRate,
+ &value_in_report));
+ EXPECT_EQ(rtc::ToString<float>(info.preemptive_expand_rate), value_in_report);
EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameSecondaryDecodedRate,
&value_in_report));
EXPECT_EQ(rtc::ToString<float>(info.secondary_decoded_rate), value_in_report);
@@ -451,6 +457,8 @@ void InitVoiceReceiverInfo(cricket::VoiceReceiverInfo* voice_receiver_info) {
voice_receiver_info->expand_rate = 121;
voice_receiver_info->speech_expand_rate = 122;
voice_receiver_info->secondary_decoded_rate = 123;
+ voice_receiver_info->accelerate_rate = 124;
+ voice_receiver_info->preemptive_expand_rate = 125;
}
class StatsCollectorForTest : public webrtc::StatsCollector {
@@ -589,6 +597,7 @@ class StatsCollectorTest : public testing::Test {
const StatsReport* report = FindNthReportByType(
*reports, StatsReport::kStatsReportTypeSsrc, 1);
EXPECT_FALSE(report == NULL);
+ EXPECT_EQ(stats->GetTimeNow(), report->timestamp());
std::string track_id = ExtractSsrcStatsValue(
*reports, StatsReport::kStatsValueNameTrackId);
EXPECT_EQ(audio_track->id(), track_id);
@@ -611,6 +620,7 @@ class StatsCollectorTest : public testing::Test {
const StatsReport* track_report = FindNthReportByType(
track_reports, StatsReport::kStatsReportTypeSsrc, 1);
EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats->GetTimeNow(), track_report->timestamp());
track_id = ExtractSsrcStatsValue(track_reports,
StatsReport::kStatsValueNameTrackId);
EXPECT_EQ(audio_track->id(), track_id);
@@ -921,6 +931,7 @@ TEST_F(StatsCollectorTest, TrackObjectExistsWithoutUpdateStats) {
stats.GetStats(NULL, &reports);
EXPECT_EQ((size_t)1, reports.size());
EXPECT_EQ(StatsReport::kStatsReportTypeTrack, reports[0]->type());
+ EXPECT_EQ(0, reports[0]->timestamp());
std::string trackValue =
ExtractStatsValue(StatsReport::kStatsReportTypeTrack,
@@ -983,6 +994,7 @@ TEST_F(StatsCollectorTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) {
track_report = FindNthReportByType(
reports, StatsReport::kStatsReportTypeTrack, 1);
EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
std::string ssrc_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameSsrc);
@@ -1171,6 +1183,7 @@ TEST_F(StatsCollectorTest, ReportsFromRemoteTrack) {
const StatsReport* track_report = FindNthReportByType(
reports, StatsReport::kStatsReportTypeTrack, 1);
EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
std::string ssrc_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameSsrc);
@@ -1545,6 +1558,7 @@ TEST_F(StatsCollectorTest, GetStatsAfterRemoveAudioStream) {
const StatsReport* report = FindNthReportByType(
reports, StatsReport::kStatsReportTypeSsrc, 1);
EXPECT_FALSE(report == NULL);
+ EXPECT_EQ(stats.GetTimeNow(), report->timestamp());
std::string track_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameTrackId);
EXPECT_EQ(kLocalTrackId, track_id);
@@ -1622,6 +1636,7 @@ TEST_F(StatsCollectorTest, LocalAndRemoteTracksWithSameSsrc) {
const StatsReport* track_report = FindNthReportByType(
reports, StatsReport::kStatsReportTypeSsrc, 1);
EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
std::string track_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameTrackId);
EXPECT_EQ(kLocalTrackId, track_id);
@@ -1633,6 +1648,7 @@ TEST_F(StatsCollectorTest, LocalAndRemoteTracksWithSameSsrc) {
track_report = FindNthReportByType(reports,
StatsReport::kStatsReportTypeSsrc, 1);
EXPECT_TRUE(track_report);
+ EXPECT_EQ(stats.GetTimeNow(), track_report->timestamp());
track_id = ExtractSsrcStatsValue(reports,
StatsReport::kStatsValueNameTrackId);
EXPECT_EQ(kRemoteTrackId, track_id);
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.cc
index 582d2033612..a902210478c 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.cc
@@ -408,6 +408,8 @@ const char* StatsReport::Value::display_name() const {
return "datachannelid";
// 'goog' prefixed constants.
+ case kStatsValueNameAccelerateRate:
+ return "googAccelerateRate";
case kStatsValueNameActiveConnection:
return "googActiveConnection";
case kStatsValueNameActualEncBitrate:
@@ -544,6 +546,8 @@ const char* StatsReport::Value::display_name() const {
return "googNacksReceived";
case kStatsValueNameNacksSent:
return "googNacksSent";
+ case kStatsValueNamePreemptiveExpandRate:
+ return "googPreemptiveExpandRate";
case kStatsValueNamePlisReceived:
return "googPlisReceived";
case kStatsValueNamePlisSent:
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h b/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h
index 9df1de795e6..c58f39058b1 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/statstypes.h
@@ -131,6 +131,7 @@ class StatsReport {
kStatsValueNameTransportId,
// Internal StatsValue names.
+ kStatsValueNameAccelerateRate,
kStatsValueNameActualEncBitrate,
kStatsValueNameAdaptationChanges,
kStatsValueNameAvailableReceiveBandwidth,
@@ -197,6 +198,7 @@ class StatsReport {
kStatsValueNameNacksSent,
kStatsValueNamePlisReceived,
kStatsValueNamePlisSent,
+ kStatsValueNamePreemptiveExpandRate,
kStatsValueNamePreferredJitterBufferMs,
kStatsValueNameRemoteAddress,
kStatsValueNameRemoteCandidateId,
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.cc
index c6339d3c3f4..47f17a1739a 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.cc
@@ -45,7 +45,7 @@ static const uint32 kAdmMaxIdleTimeProcess = 1000;
// Constants here are derived by running VoE using a real ADM.
// The constants correspond to 10ms of mono audio at 44kHz.
static const int kTimePerFrameMs = 10;
-static const int kNumberOfChannels = 1;
+static const uint8_t kNumberOfChannels = 1;
static const int kSamplesPerSecond = 44000;
static const int kTotalDelayMs = 0;
static const int kClockDriftMs = 0;
@@ -623,8 +623,8 @@ bool FakeAudioCaptureModule::Initialize() {
void FakeAudioCaptureModule::SetSendBuffer(int value) {
Sample* buffer_ptr = reinterpret_cast<Sample*>(send_buffer_);
- const int buffer_size_in_samples = sizeof(send_buffer_) /
- kNumberBytesPerSample;
+ const int buffer_size_in_samples =
+ sizeof(send_buffer_) / kNumberBytesPerSample;
for (int i = 0; i < buffer_size_in_samples; ++i) {
buffer_ptr[i] = value;
}
@@ -636,8 +636,8 @@ void FakeAudioCaptureModule::ResetRecBuffer() {
bool FakeAudioCaptureModule::CheckRecBuffer(int value) {
const Sample* buffer_ptr = reinterpret_cast<const Sample*>(rec_buffer_);
- const int buffer_size_in_samples = sizeof(rec_buffer_) /
- kNumberBytesPerSample;
+ const int buffer_size_in_samples =
+ sizeof(rec_buffer_) / kNumberBytesPerSample;
for (int i = 0; i < buffer_size_in_samples; ++i) {
if (buffer_ptr[i] >= value) return true;
}
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.h b/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.h
index 57c1e58aafd..8ff4aa19e75 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule.h
@@ -58,8 +58,8 @@ class FakeAudioCaptureModule
// The value for the following constants have been derived by running VoE
// using a real ADM. The constants correspond to 10ms of mono audio at 44kHz.
- enum{kNumberSamples = 440};
- enum{kNumberBytesPerSample = sizeof(Sample)};
+ static const int kNumberSamples = 440;
+ static const int kNumberBytesPerSample = sizeof(Sample);
// Creates a FakeAudioCaptureModule or returns NULL on failure.
// |process_thread| is used to push and pull audio frames to and from the
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc
index 8fcbfd70123..56f1d070edd 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakeaudiocapturemodule_unittest.cc
@@ -55,18 +55,18 @@ class FakeAdmTest : public testing::Test,
// Callbacks inherited from webrtc::AudioTransport.
// ADM is pushing data.
- virtual int32_t RecordedDataIsAvailable(const void* audioSamples,
- const uint32_t nSamples,
- const uint8_t nBytesPerSample,
- const uint8_t nChannels,
- const uint32_t samplesPerSec,
- const uint32_t totalDelayMS,
- const int32_t clockDrift,
- const uint32_t currentMicLevel,
- const bool keyPressed,
- uint32_t& newMicLevel) {
+ int32_t RecordedDataIsAvailable(const void* audioSamples,
+ const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec,
+ const uint32_t totalDelayMS,
+ const int32_t clockDrift,
+ const uint32_t currentMicLevel,
+ const bool keyPressed,
+ uint32_t& newMicLevel) override {
rec_buffer_bytes_ = nSamples * nBytesPerSample;
- if ((rec_buffer_bytes_ <= 0) ||
+ if ((rec_buffer_bytes_ == 0) ||
(rec_buffer_bytes_ > FakeAudioCaptureModule::kNumberSamples *
FakeAudioCaptureModule::kNumberBytesPerSample)) {
ADD_FAILURE();
@@ -79,14 +79,14 @@ class FakeAdmTest : public testing::Test,
}
// ADM is pulling data.
- virtual int32_t NeedMorePlayData(const uint32_t nSamples,
- const uint8_t nBytesPerSample,
- const uint8_t nChannels,
- const uint32_t samplesPerSec,
- void* audioSamples,
- uint32_t& nSamplesOut,
- int64_t* elapsed_time_ms,
- int64_t* ntp_time_ms) {
+ int32_t NeedMorePlayData(const uint32_t nSamples,
+ const uint8_t nBytesPerSample,
+ const uint8_t nChannels,
+ const uint32_t samplesPerSec,
+ void* audioSamples,
+ uint32_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) override {
++pull_iterations_;
const uint32_t audio_buffer_size = nSamples * nBytesPerSample;
const uint32_t bytes_out = RecordedDataReceived() ?
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakedatachannelprovider.h b/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakedatachannelprovider.h
index bf64a94e45d..eb86873c904 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakedatachannelprovider.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/test/fakedatachannelprovider.h
@@ -91,11 +91,15 @@ class FakeDataChannelProvider : public webrtc::DataChannelProviderInterface {
void set_send_blocked(bool blocked) {
send_blocked_ = blocked;
if (!blocked) {
- std::set<webrtc::DataChannel*>::iterator it;
- for (it = connected_channels_.begin();
- it != connected_channels_.end();
- ++it) {
- (*it)->OnChannelReady(true);
+ // Take a snapshot of the connected channels and check to see whether
+ // each value is still in connected_channels_ before calling
+ // OnChannelReady(). This avoids problems where the set gets modified
+ // in response to OnChannelReady().
+ for (webrtc::DataChannel *ch : std::set<webrtc::DataChannel*>(
+ connected_channels_.begin(), connected_channels_.end())) {
+ if (connected_channels_.count(ch)) {
+ ch->OnChannelReady(true);
+ }
}
}
}
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/test/mockpeerconnectionobservers.h b/chromium/third_party/libjingle/source/talk/app/webrtc/test/mockpeerconnectionobservers.h
index f31b16c7443..d2697b4364e 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/test/mockpeerconnectionobservers.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/test/mockpeerconnectionobservers.h
@@ -98,8 +98,10 @@ class MockDataChannelObserver : public webrtc::DataChannelObserver {
channel_->UnregisterObserver();
}
- virtual void OnStateChange() { state_ = channel_->state(); }
- virtual void OnMessage(const DataBuffer& buffer) {
+ void OnBufferedAmountChange(uint64 previous_amount) override {}
+
+ void OnStateChange() override { state_ = channel_->state(); }
+ void OnMessage(const DataBuffer& buffer) override {
last_message_.assign(buffer.data.data<char>(), buffer.data.size());
++received_message_count_;
}
@@ -127,6 +129,7 @@ class MockStatsObserver : public webrtc::StatsObserver {
stats_.number_of_reports = reports.size();
for (const auto* r : reports) {
if (r->type() == StatsReport::kStatsReportTypeSsrc) {
+ stats_.timestamp = r->timestamp();
GetIntValue(r, StatsReport::kStatsValueNameAudioOutputLevel,
&stats_.audio_output_level);
GetIntValue(r, StatsReport::kStatsValueNameAudioInputLevel,
@@ -136,9 +139,11 @@ class MockStatsObserver : public webrtc::StatsObserver {
GetIntValue(r, StatsReport::kStatsValueNameBytesSent,
&stats_.bytes_sent);
} else if (r->type() == StatsReport::kStatsReportTypeBwe) {
+ stats_.timestamp = r->timestamp();
GetIntValue(r, StatsReport::kStatsValueNameAvailableReceiveBandwidth,
&stats_.available_receive_bandwidth);
} else if (r->type() == StatsReport::kStatsReportTypeComponent) {
+ stats_.timestamp = r->timestamp();
GetStringValue(r, StatsReport::kStatsValueNameDtlsCipher,
&stats_.dtls_cipher);
GetStringValue(r, StatsReport::kStatsValueNameSrtpCipher,
@@ -149,6 +154,7 @@ class MockStatsObserver : public webrtc::StatsObserver {
bool called() const { return called_; }
size_t number_of_reports() const { return stats_.number_of_reports; }
+ double timestamp() const { return stats_.timestamp; }
int AudioOutputLevel() const {
ASSERT(called_);
@@ -210,6 +216,7 @@ class MockStatsObserver : public webrtc::StatsObserver {
struct {
void Clear() {
number_of_reports = 0;
+ timestamp = 0;
audio_output_level = 0;
audio_input_level = 0;
bytes_received = 0;
@@ -220,6 +227,7 @@ class MockStatsObserver : public webrtc::StatsObserver {
}
size_t number_of_reports;
+ double timestamp;
int audio_output_level;
int audio_input_level;
int bytes_received;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/umametrics.h b/chromium/third_party/libjingle/source/talk/app/webrtc/umametrics.h
index d029a698bdb..bb9e05230d3 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/umametrics.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/umametrics.h
@@ -65,6 +65,12 @@ enum PeerConnectionMetricsName {
kTimeToConnect, // In milliseconds.
kLocalCandidates_IPv4, // Number of IPv4 local candidates.
kLocalCandidates_IPv6, // Number of IPv6 local candidates.
+ kAudioSrtpCipher, // Name of SRTP cipher used in audio channel.
+ kAudioSslCipher, // Name of SSL cipher used in audio channel.
+ kVideoSrtpCipher, // Name of SRTP cipher used in video channel.
+ kVideoSslCipher, // Name of SSL cipher used in video channel.
+ kDataSrtpCipher, // Name of SRTP cipher used in data channel.
+ kDataSslCipher, // Name of SSL cipher used in data channel.
kPeerConnectionMetricsName_Max
};
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc
index 30aedcad194..7c78aea91fa 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/videotrack.cc
@@ -29,8 +29,6 @@
#include <string>
-#include "talk/media/webrtc/webrtcvideocapturer.h"
-
namespace webrtc {
static const char kVideoTrackKind[] = "video";
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc
index a93977d6d98..b6f23ca9d00 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp.cc
@@ -539,27 +539,11 @@ static bool AddSsrcLine(uint32 ssrc_id, const std::string& attribute,
return AddLine(os.str(), message);
}
-// Split the message into two parts by the first delimiter.
-static bool SplitByDelimiter(const std::string& message,
- const char delimiter,
- std::string* field1,
- std::string* field2) {
- // Find the first delimiter
- size_t pos = message.find(delimiter);
- if (pos == std::string::npos) {
- return false;
- }
- *field1 = message.substr(0, pos);
- // The rest is the value.
- *field2 = message.substr(pos + 1);
- return true;
-}
-
// Get value only from <attribute>:<value>.
static bool GetValue(const std::string& message, const std::string& attribute,
std::string* value, SdpParseError* error) {
std::string leftpart;
- if (!SplitByDelimiter(message, kSdpDelimiterColon, &leftpart, value)) {
+ if (!rtc::tokenize_first(message, kSdpDelimiterColon, &leftpart, value)) {
return ParseFailedGetValue(message, attribute, error);
}
// The left part should end with the expected attribute.
@@ -972,7 +956,8 @@ bool ParseCandidate(const std::string& message, Candidate* candidate,
// Makes sure |message| contains only one line.
if (message.size() > first_line.size()) {
std::string left, right;
- if (SplitByDelimiter(message, kNewLine, &left, &right) && !right.empty()) {
+ if (rtc::tokenize_first(message, kNewLine, &left, &right) &&
+ !right.empty()) {
return ParseFailed(message, 0, "Expect one line only", error);
}
}
@@ -989,8 +974,8 @@ bool ParseCandidate(const std::string& message, Candidate* candidate,
std::string candidate_value;
// |first_line| must be in the form of "candidate:<value>".
- if (!SplitByDelimiter(first_line, kSdpDelimiterColon,
- &attribute_candidate, &candidate_value) ||
+ if (!rtc::tokenize_first(first_line, kSdpDelimiterColon, &attribute_candidate,
+ &candidate_value) ||
attribute_candidate != kAttributeCandidate) {
if (is_raw) {
std::ostringstream description;
@@ -1289,16 +1274,7 @@ void BuildMediaDescription(const ContentInfo* content_info,
// RFC 4566
// b=AS:<bandwidth>
- // We should always use the default bandwidth for RTP-based data
- // channels. Don't allow SDP to set the bandwidth, because that
- // would give JS the opportunity to "break the Internet".
- // TODO(pthatcher): But we need to temporarily allow the SDP to control
- // this for backwards-compatibility. Once we don't need that any
- // more, remove this.
- bool support_dc_sdp_bandwidth_temporarily = true;
- if (media_desc->bandwidth() >= 1000 &&
- (media_type != cricket::MEDIA_TYPE_DATA ||
- support_dc_sdp_bandwidth_temporarily)) {
+ if (media_desc->bandwidth() >= 1000) {
InitLine(kLineTypeSessionBandwidth, kApplicationSpecificMaximum, &os);
os << kSdpDelimiterColon << (media_desc->bandwidth() / 1000);
AddLine(os.str(), message);
@@ -2216,7 +2192,7 @@ bool ParseMediaDescription(const std::string& message,
for (size_t j = 3 ; j < fields.size(); ++j) {
// TODO(wu): Remove when below bug is fixed.
// https://bugzilla.mozilla.org/show_bug.cgi?id=996329
- if (fields[j] == "" && j == fields.size() - 1) {
+ if (fields[j].empty() && j == fields.size() - 1) {
continue;
}
@@ -2264,17 +2240,6 @@ bool ParseMediaDescription(const std::string& message,
if (!AddSctpDataCodec(data_desc, p))
return false;
}
-
- // We should always use the default bandwidth for RTP-based data
- // channels. Don't allow SDP to set the bandwidth, because that
- // would give JS the opportunity to "break the Internet".
- // TODO(pthatcher): But we need to temporarily allow the SDP to control
- // this for backwards-compatibility. Once we don't need that any
- // more, remove this.
- bool support_dc_sdp_bandwidth_temporarily = true;
- if (content.get() && !support_dc_sdp_bandwidth_temporarily) {
- content->set_bandwidth(cricket::kAutoBandwidth);
- }
} else {
LOG(LS_WARNING) << "Unsupported media type: " << line;
continue;
@@ -2532,6 +2497,17 @@ bool ParseContent(const std::string& message,
if (!GetValueFromString(line, bandwidth, &b, error)) {
return false;
}
+ // We should never use more than the default bandwidth for RTP-based
+ // data channels. Don't allow SDP to set the bandwidth, because
+ // that would give JS the opportunity to "break the Internet".
+ // See: https://code.google.com/p/chromium/issues/detail?id=280726
+ if (media_type == cricket::MEDIA_TYPE_DATA && IsRtp(protocol) &&
+ b > cricket::kDataMaxBandwidth / 1000) {
+ std::ostringstream description;
+ description << "RTP-based data channels may not send more than "
+ << cricket::kDataMaxBandwidth / 1000 << "kbps.";
+ return ParseFailed(line, description.str(), error);
+ }
media_desc->set_bandwidth(b * 1000);
}
}
@@ -2749,10 +2725,8 @@ bool ParseSsrcAttribute(const std::string& line, SsrcInfoVec* ssrc_infos,
// a=ssrc:<ssrc-id> <attribute>
// a=ssrc:<ssrc-id> <attribute>:<value>
std::string field1, field2;
- if (!SplitByDelimiter(line.substr(kLinePrefixLength),
- kSdpDelimiterSpace,
- &field1,
- &field2)) {
+ if (!rtc::tokenize_first(line.substr(kLinePrefixLength), kSdpDelimiterSpace,
+ &field1, &field2)) {
const size_t expected_fields = 2;
return ParseFailedExpectFieldNum(line, expected_fields, error);
}
@@ -2769,8 +2743,7 @@ bool ParseSsrcAttribute(const std::string& line, SsrcInfoVec* ssrc_infos,
std::string attribute;
std::string value;
- if (!SplitByDelimiter(field2, kSdpDelimiterColon,
- &attribute, &value)) {
+ if (!rtc::tokenize_first(field2, kSdpDelimiterColon, &attribute, &value)) {
std::ostringstream description;
description << "Failed to get the ssrc attribute value from " << field2
<< ". Expected format <attribute>:<value>.";
@@ -3016,22 +2989,13 @@ bool ParseRtpmapAttribute(const std::string& line,
return true;
}
-void PruneRight(const char delimiter, std::string* message) {
- size_t trailing = message->find(delimiter);
- if (trailing != std::string::npos) {
- *message = message->substr(0, trailing);
- }
-}
-
bool ParseFmtpParam(const std::string& line, std::string* parameter,
std::string* value, SdpParseError* error) {
- if (!SplitByDelimiter(line, kSdpDelimiterEqual, parameter, value)) {
+ if (!rtc::tokenize_first(line, kSdpDelimiterEqual, parameter, value)) {
ParseFailed(line, "Unable to parse fmtp parameter. \'=\' missing.", error);
return false;
}
// a=fmtp:<payload_type> <param1>=<value1>; <param2>=<value2>; ...
- // When parsing the values the trailing ";" gets picked up. Remove them.
- PruneRight(kSdpDelimiterSemicolon, value);
return true;
}
@@ -3042,44 +3006,52 @@ bool ParseFmtpAttributes(const std::string& line, const MediaType media_type,
media_type != cricket::MEDIA_TYPE_VIDEO) {
return true;
}
- std::vector<std::string> fields;
- rtc::split(line.substr(kLinePrefixLength),
- kSdpDelimiterSpace, &fields);
+
+ std::string line_payload;
+ std::string line_params;
// RFC 5576
// a=fmtp:<format> <format specific parameters>
// At least two fields, whereas the second one is any of the optional
// parameters.
- if (fields.size() < 2) {
+ if (!rtc::tokenize_first(line.substr(kLinePrefixLength), kSdpDelimiterSpace,
+ &line_payload, &line_params)) {
ParseFailedExpectMinFieldNum(line, 2, error);
return false;
}
+ // Parse out the payload information.
std::string payload_type_str;
- if (!GetValue(fields[0], kAttributeFmtp, &payload_type_str, error)) {
+ if (!GetValue(line_payload, kAttributeFmtp, &payload_type_str, error)) {
return false;
}
+ int payload_type = 0;
+ if (!GetPayloadTypeFromString(line_payload, payload_type_str, &payload_type,
+ error)) {
+ return false;
+ }
+
+ // Parse out format specific parameters.
+ std::vector<std::string> fields;
+ rtc::split(line_params, kSdpDelimiterSemicolon, &fields);
+
cricket::CodecParameterMap codec_params;
- for (std::vector<std::string>::const_iterator iter = fields.begin() + 1;
- iter != fields.end(); ++iter) {
- std::string name;
- std::string value;
- if (iter->find(kSdpDelimiterEqual) == std::string::npos) {
+ for (auto& iter : fields) {
+ if (iter.find(kSdpDelimiterEqual) == std::string::npos) {
// Only fmtps with equals are currently supported. Other fmtp types
// should be ignored. Unknown fmtps do not constitute an error.
continue;
}
- if (!ParseFmtpParam(*iter, &name, &value, error)) {
+
+ std::string name;
+ std::string value;
+ if (!ParseFmtpParam(rtc::string_trim(iter), &name, &value, error)) {
return false;
}
codec_params[name] = value;
}
- int payload_type = 0;
- if (!GetPayloadTypeFromString(line, payload_type_str, &payload_type, error)) {
- return false;
- }
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
UpdateCodec<AudioContentDescription, cricket::AudioCodec>(
media_desc, payload_type, codec_params);
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc
index 862e93c539b..b6577682041 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsdp_unittest.cc
@@ -1207,11 +1207,10 @@ class WebRtcSdpTest : public testing::Test {
"a=fmtp:111 0-15,66,70\r\n"
"a=fmtp:111 ";
std::ostringstream os;
- os << "minptime=" << params.min_ptime
- << "; stereo=" << params.stereo
+ os << "minptime=" << params.min_ptime << "; stereo=" << params.stereo
<< "; sprop-stereo=" << params.sprop_stereo
<< "; useinbandfec=" << params.useinband
- << " maxaveragebitrate=" << params.maxaveragebitrate << "\r\n"
+ << "; maxaveragebitrate=" << params.maxaveragebitrate << "\r\n"
<< "a=ptime:" << params.ptime << "\r\n"
<< "a=maxptime:" << params.max_ptime << "\r\n";
sdp += os.str();
@@ -1222,7 +1221,7 @@ class WebRtcSdpTest : public testing::Test {
os << "m=video 9 RTP/SAVPF 99 95\r\n"
<< "a=rtpmap:99 VP8/90000\r\n"
<< "a=rtpmap:95 RTX/90000\r\n"
- << "a=fmtp:95 apt=99;rtx-time=1000\r\n";
+ << "a=fmtp:95 apt=99;\r\n";
sdp += os.str();
// Deserialize
@@ -1699,12 +1698,7 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithDataChannelAndBandwidth) {
std::string expected_sdp = kSdpString;
expected_sdp.append(kSdpRtpDataChannelString);
- // We want to test that serializing data content ignores bandwidth
- // settings (it should always be the default). Thus, we don't do
- // the following:
- // TODO(pthatcher): We need to temporarily allow the SDP to control
- // this for backwards-compatibility. Once we don't need that any
- // more, remove this.
+ // Serializing data content shouldn't ignore bandwidth settings.
InjectAfter("m=application 9 RTP/SAVPF 101\r\nc=IN IP4 0.0.0.0\r\n",
"b=AS:100\r\n",
&expected_sdp);
@@ -2260,28 +2254,39 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelAndNewPort) {
}
TEST_F(WebRtcSdpTest, DeserializeSdpWithRtpDataChannelsAndBandwidth) {
- AddRtpDataChannel();
+ // We want to test that deserializing data content limits bandwidth
+ // settings (it should never be greater than the default).
+ // This should prevent someone from using unlimited data bandwidth through
+ // JS and "breaking the Internet".
+ // See: https://code.google.com/p/chromium/issues/detail?id=280726
+ std::string sdp_with_bandwidth = kSdpString;
+ sdp_with_bandwidth.append(kSdpRtpDataChannelString);
+ InjectAfter("a=mid:data_content_name\r\n",
+ "b=AS:100\r\n",
+ &sdp_with_bandwidth);
+ JsepSessionDescription jdesc_with_bandwidth(kDummyString);
+
+ EXPECT_FALSE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsAndBandwidth) {
+ AddSctpDataChannel();
JsepSessionDescription jdesc(kDummyString);
- // We want to test that deserializing data content ignores bandwidth
- // settings (it should always be the default). Thus, we don't do
- // the following:
- // TODO(pthatcher): We need to temporarily allow the SDP to control
- // this for backwards-compatibility. Once we don't need that any
- // more, remove this.
DataContentDescription* dcd = static_cast<DataContentDescription*>(
GetFirstDataContent(&desc_)->description);
dcd->set_bandwidth(100 * 1000);
ASSERT_TRUE(jdesc.Initialize(desc_.Copy(), kSessionId, kSessionVersion));
std::string sdp_with_bandwidth = kSdpString;
- sdp_with_bandwidth.append(kSdpRtpDataChannelString);
+ sdp_with_bandwidth.append(kSdpSctpDataChannelString);
InjectAfter("a=mid:data_content_name\r\n",
"b=AS:100\r\n",
&sdp_with_bandwidth);
JsepSessionDescription jdesc_with_bandwidth(kDummyString);
- EXPECT_TRUE(
- SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
+ // SCTP has congestion control, so we shouldn't limit the bandwidth
+ // as we do for RTP.
+ EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth));
EXPECT_TRUE(CompareSessionDescription(jdesc, jdesc_with_bandwidth));
}
@@ -2504,7 +2509,41 @@ TEST_F(WebRtcSdpTest, DeserializeVideoFmtp) {
"t=0 0\r\n"
"m=video 3457 RTP/SAVPF 120\r\n"
"a=rtpmap:120 VP8/90000\r\n"
- "a=fmtp:120 x-google-min-bitrate=10; x-google-max-quantization=40\r\n";
+ "a=fmtp:120 x-google-min-bitrate=10;x-google-max-quantization=40\r\n";
+
+ // Deserialize
+ SdpParseError error;
+ EXPECT_TRUE(
+ webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error));
+
+ const ContentInfo* vc = GetFirstVideoContent(jdesc_output.description());
+ ASSERT_TRUE(vc != NULL);
+ const VideoContentDescription* vcd =
+ static_cast<const VideoContentDescription*>(vc->description);
+ ASSERT_FALSE(vcd->codecs().empty());
+ cricket::VideoCodec vp8 = vcd->codecs()[0];
+ EXPECT_EQ("VP8", vp8.name);
+ EXPECT_EQ(120, vp8.id);
+ cricket::CodecParameterMap::iterator found =
+ vp8.params.find("x-google-min-bitrate");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "10");
+ found = vp8.params.find("x-google-max-quantization");
+ ASSERT_TRUE(found != vp8.params.end());
+ EXPECT_EQ(found->second, "40");
+}
+
+TEST_F(WebRtcSdpTest, DeserializeVideoFmtpWithSpace) {
+ JsepSessionDescription jdesc_output(kDummyString);
+
+ const char kSdpWithFmtpString[] =
+ "v=0\r\n"
+ "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n"
+ "s=-\r\n"
+ "t=0 0\r\n"
+ "m=video 3457 RTP/SAVPF 120\r\n"
+ "a=rtpmap:120 VP8/90000\r\n"
+ "a=fmtp:120 x-google-min-bitrate=10; x-google-max-quantization=40\r\n";
// Deserialize
SdpParseError error;
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc
index e2a9d60f2c8..25333289689 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.cc
@@ -44,6 +44,7 @@
#include "talk/session/media/channelmanager.h"
#include "talk/session/media/mediasession.h"
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/checks.h"
#include "webrtc/base/helpers.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/stringencode.h"
@@ -489,6 +490,7 @@ WebRtcSession::WebRtcSession(
mediastream_signaling_(mediastream_signaling),
ice_observer_(NULL),
ice_connection_state_(PeerConnectionInterface::kIceConnectionNew),
+ ice_connection_receiving_(true),
older_version_remote_peer_(false),
dtls_enabled_(false),
data_channel_type_(cricket::DCT_NONE),
@@ -497,6 +499,7 @@ WebRtcSession::WebRtcSession(
}
WebRtcSession::~WebRtcSession() {
+ ASSERT(signaling_thread()->IsCurrent());
// Destroy video_channel_ first since it may have a pointer to the
// voice_channel_.
if (video_channel_) {
@@ -523,6 +526,8 @@ bool WebRtcSession::Initialize(
DTLSIdentityServiceInterface* dtls_identity_service,
const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
bundle_policy_ = rtc_configuration.bundle_policy;
+ rtcp_mux_policy_ = rtc_configuration.rtcp_mux_policy;
+ SetSslMaxProtocolVersion(options.ssl_max_version);
// TODO(perkj): Take |constraints| into consideration. Return false if not all
// mandatory constraints can be fulfilled. Note that |constraints|
@@ -642,6 +647,9 @@ bool WebRtcSession::Initialize(
audio_options_.audio_jitter_buffer_max_packets.Set(
rtc_configuration.audio_jitter_buffer_max_packets);
+ audio_options_.audio_jitter_buffer_fast_accelerate.Set(
+ rtc_configuration.audio_jitter_buffer_fast_accelerate);
+
const cricket::VideoCodec default_codec(
JsepSessionDescription::kDefaultVideoCodecId,
JsepSessionDescription::kDefaultVideoCodecName,
@@ -1387,7 +1395,11 @@ void WebRtcSession::OnTransportCompleted(cricket::Transport* transport) {
SetIceConnectionState(PeerConnectionInterface::kIceConnectionCompleted);
// Only report once when Ice connection is completed.
if (old_state != PeerConnectionInterface::kIceConnectionCompleted) {
- ReportBestConnectionState(transport);
+ cricket::TransportStats stats;
+ if (metrics_observer_ && transport->GetStats(&stats)) {
+ ReportBestConnectionState(stats);
+ ReportNegotiatedCiphers(stats);
+ }
}
}
@@ -1396,6 +1408,31 @@ void WebRtcSession::OnTransportFailed(cricket::Transport* transport) {
SetIceConnectionState(PeerConnectionInterface::kIceConnectionFailed);
}
+void WebRtcSession::OnTransportReceiving(cricket::Transport* transport) {
+ ASSERT(signaling_thread()->IsCurrent());
+ // The ice connection is considered receiving if at least one transport is
+ // receiving on any channels.
+ bool receiving = false;
+ for (const auto& kv : transport_proxies()) {
+ cricket::Transport* transport = kv.second->impl();
+ if (transport && transport->any_channel_receiving()) {
+ receiving = true;
+ break;
+ }
+ }
+ SetIceConnectionReceiving(receiving);
+}
+
+void WebRtcSession::SetIceConnectionReceiving(bool receiving) {
+ if (ice_connection_receiving_ == receiving) {
+ return;
+ }
+ ice_connection_receiving_ = receiving;
+ if (ice_observer_) {
+ ice_observer_->OnIceConnectionReceivingChange(receiving);
+ }
+}
+
void WebRtcSession::OnTransportProxyCandidatesReady(
cricket::TransportProxy* proxy, const cricket::Candidates& candidates) {
ASSERT(signaling_thread()->IsCurrent());
@@ -1600,6 +1637,18 @@ bool WebRtcSession::CreateChannels(const SessionDescription* desc) {
}
}
+ if (rtcp_mux_policy_ == PeerConnectionInterface::kRtcpMuxPolicyRequire) {
+ if (voice_channel()) {
+ voice_channel()->ActivateRtcpMux();
+ }
+ if (video_channel()) {
+ video_channel()->ActivateRtcpMux();
+ }
+ if (data_channel()) {
+ data_channel()->ActivateRtcpMux();
+ }
+ }
+
// Enable bundle before when kMaxBundle policy is in effect.
if (bundle_policy_ == PeerConnectionInterface::kBundlePolicyMaxBundle) {
const cricket::ContentGroup* bundle_group = desc->GetGroupByName(
@@ -1619,12 +1668,11 @@ bool WebRtcSession::CreateChannels(const SessionDescription* desc) {
bool WebRtcSession::CreateVoiceChannel(const cricket::ContentInfo* content) {
voice_channel_.reset(channel_manager_->CreateVoiceChannel(
- this, content->name, true));
+ this, content->name, true, audio_options_));
if (!voice_channel_) {
return false;
}
- voice_channel_->SetChannelOptions(audio_options_);
voice_channel_->SignalDtlsSetupFailure.connect(
this, &WebRtcSession::OnDtlsSetupFailure);
return true;
@@ -1856,16 +1904,9 @@ bool WebRtcSession::ReadyToUseRemoteCandidate(
// Walk through the ConnectionInfos to gather best connection usage
// for IPv4 and IPv6.
-void WebRtcSession::ReportBestConnectionState(cricket::Transport* transport) {
- if (!metrics_observer_) {
- return;
- }
-
- cricket::TransportStats stats;
- if (!transport->GetStats(&stats)) {
- return;
- }
-
+void WebRtcSession::ReportBestConnectionState(
+ const cricket::TransportStats& stats) {
+ DCHECK(metrics_observer_ != NULL);
for (cricket::TransportChannelStatsList::const_iterator it =
stats.channel_stats.begin();
it != stats.channel_stats.end(); ++it) {
@@ -1881,11 +1922,48 @@ void WebRtcSession::ReportBestConnectionState(cricket::Transport* transport) {
AF_INET6) {
metrics_observer_->IncrementCounter(kBestConnections_IPv6);
} else {
- ASSERT(false);
+ RTC_NOTREACHED();
}
return;
}
}
}
+void WebRtcSession::ReportNegotiatedCiphers(
+ const cricket::TransportStats& stats) {
+ DCHECK(metrics_observer_ != NULL);
+ if (!dtls_enabled_ || stats.channel_stats.empty()) {
+ return;
+ }
+
+ const std::string& srtp_cipher = stats.channel_stats[0].srtp_cipher;
+ const std::string& ssl_cipher = stats.channel_stats[0].ssl_cipher;
+ if (srtp_cipher.empty() && ssl_cipher.empty()) {
+ return;
+ }
+
+ PeerConnectionMetricsName srtp_name;
+ PeerConnectionMetricsName ssl_name;
+ if (stats.content_name == cricket::CN_AUDIO) {
+ srtp_name = kAudioSrtpCipher;
+ ssl_name = kAudioSslCipher;
+ } else if (stats.content_name == cricket::CN_VIDEO) {
+ srtp_name = kVideoSrtpCipher;
+ ssl_name = kVideoSslCipher;
+ } else if (stats.content_name == cricket::CN_DATA) {
+ srtp_name = kDataSrtpCipher;
+ ssl_name = kDataSslCipher;
+ } else {
+ RTC_NOTREACHED();
+ return;
+ }
+
+ if (!srtp_cipher.empty()) {
+ metrics_observer_->AddHistogramSample(srtp_name, srtp_cipher);
+ }
+ if (!ssl_cipher.empty()) {
+ metrics_observer_->AddHistogramSample(ssl_name, ssl_cipher);
+ }
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h
index aa1deb523c2..30ebc1e49ee 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession.h
@@ -84,6 +84,8 @@ class IceObserver {
public:
IceObserver() {}
// Called any time the IceConnectionState changes
+ // TODO(honghaiz): Change the name to OnIceConnectionStateChange so as to
+ // conform to the w3c standard.
virtual void OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {}
// Called any time the IceGatheringState changes
@@ -96,6 +98,9 @@ class IceObserver {
// (via PeerConnectionObserver)
virtual void OnIceComplete() {}
+ // Called whenever the state changes between receiving and not receiving.
+ virtual void OnIceConnectionReceivingChange(bool receiving) {}
+
protected:
~IceObserver() {}
@@ -253,19 +258,6 @@ class WebRtcSession : public cricket::BaseSession,
metrics_observer_ = metrics_observer;
}
- protected:
- // Don't fire a new description. The only thing it's used for is to
- // push new media descriptions to the BaseChannels. But in
- // WebRtcSession, we just push to the BaseChannels directly, so we
- // don't need this (and it would cause the descriptions to be pushed
- // down twice).
- // TODO(pthatcher): Remove this method and signal completely from
- // BaseSession once all the subclasses of BaseSession push to
- // BaseChannels directly rather than relying on the signal, or once
- // BaseChannel no longer listens to the event and requires
- // descriptions to be pushed down.
- virtual void SignalNewDescription() override {}
-
private:
// Indicates the type of SessionDescription in a call to SetLocalDescription
// and SetRemoteDescription.
@@ -298,9 +290,8 @@ class WebRtcSession : public cricket::BaseSession,
cricket::TransportProxy* proxy,
const cricket::Candidates& candidates);
virtual void OnCandidatesAllocationDone();
+ void OnTransportReceiving(cricket::Transport* transport) override;
- // Creates local session description with audio and video contents.
- bool CreateDefaultLocalDescription();
// Enables media channels to allow sending of media.
void EnableChannels();
// Creates a JsepIceCandidate and adds it to the local session description
@@ -344,6 +335,7 @@ class WebRtcSession : public cricket::BaseSession,
std::string BadStateErrMsg(State state);
void SetIceConnectionState(PeerConnectionInterface::IceConnectionState state);
+ void SetIceConnectionReceiving(bool receiving);
bool ValidateBundleSettings(const cricket::SessionDescription* desc);
bool HasRtcpMuxEnabled(const cricket::ContentInfo* content);
@@ -372,7 +364,9 @@ class WebRtcSession : public cricket::BaseSession,
// Invoked when OnTransportCompleted is signaled to gather the usage
// of IPv4/IPv6 as best connection.
- void ReportBestConnectionState(cricket::Transport* transport);
+ void ReportBestConnectionState(const cricket::TransportStats& stats);
+
+ void ReportNegotiatedCiphers(const cricket::TransportStats& stats);
rtc::scoped_ptr<cricket::VoiceChannel> voice_channel_;
rtc::scoped_ptr<cricket::VideoChannel> video_channel_;
@@ -381,6 +375,7 @@ class WebRtcSession : public cricket::BaseSession,
MediaStreamSignaling* mediastream_signaling_;
IceObserver* ice_observer_;
PeerConnectionInterface::IceConnectionState ice_connection_state_;
+ bool ice_connection_receiving_;
rtc::scoped_ptr<SessionDescriptionInterface> local_desc_;
rtc::scoped_ptr<SessionDescriptionInterface> remote_desc_;
// Candidates that arrived before the remote description was set.
@@ -413,6 +408,9 @@ class WebRtcSession : public cricket::BaseSession,
// Declares the bundle policy for the WebRTCSession.
PeerConnectionInterface::BundlePolicy bundle_policy_;
+ // Declares the RTCP mux policy for the WebRTCSession.
+ PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy_;
+
DISALLOW_COPY_AND_ASSIGN(WebRtcSession);
};
} // namespace webrtc
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc
index e4f39f822b0..0111f53cee3 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsession_unittest.cc
@@ -26,6 +26,7 @@
*/
#include "talk/app/webrtc/audiotrack.h"
+#include "talk/app/webrtc/fakemetricsobserver.h"
#include "talk/app/webrtc/jsepicecandidate.h"
#include "talk/app/webrtc/jsepsessiondescription.h"
#include "talk/app/webrtc/mediastreamsignaling.h"
@@ -81,6 +82,7 @@ using webrtc::CreateSessionDescriptionRequest;
using webrtc::DTLSIdentityRequestObserver;
using webrtc::DTLSIdentityServiceInterface;
using webrtc::FakeConstraints;
+using webrtc::FakeMetricsObserver;
using webrtc::IceCandidateCollection;
using webrtc::JsepIceCandidate;
using webrtc::JsepSessionDescription;
@@ -156,8 +158,6 @@ static const char kSdpWithRtx[] =
"a=rtpmap:96 rtx/90000\r\n"
"a=fmtp:96 apt=0\r\n";
-static const int kAudioJitterBufferMaxPackets = 50;
-
// Add some extra |newlines| to the |message| after |line|.
static void InjectAfter(const std::string& line,
const std::string& newlines,
@@ -167,33 +167,6 @@ static void InjectAfter(const std::string& line,
tmp.c_str(), tmp.length(), message);
}
-class FakeMetricsObserver : public webrtc::MetricsObserverInterface {
- public:
- FakeMetricsObserver() { Reset(); }
- void Reset() {
- memset(peer_connection_metrics_counters_, 0,
- sizeof(peer_connection_metrics_counters_));
- memset(peer_connection_metrics_name_, 0,
- sizeof(peer_connection_metrics_name_));
- }
-
- void IncrementCounter(webrtc::PeerConnectionMetricsCounter type) override {
- peer_connection_metrics_counters_[type]++;
- }
- void AddHistogramSample(webrtc::PeerConnectionMetricsName type,
- int value) override {
- ASSERT(peer_connection_metrics_name_[type] == 0);
- peer_connection_metrics_name_[type] = value;
- }
-
- int peer_connection_metrics_counters_
- [webrtc::kPeerConnectionMetricsCounter_Max];
- int peer_connection_metrics_name_[webrtc::kPeerConnectionMetricsCounter_Max];
-
- int AddRef() override { return 1; }
- int Release() override { return 1; }
-};
-
class MockIceObserver : public webrtc::IceObserver {
public:
MockIceObserver()
@@ -362,7 +335,8 @@ class WebRtcSessionTest : public testing::Test {
stun_server_(cricket::TestStunServer::Create(Thread::Current(),
stun_socket_addr_)),
turn_server_(Thread::Current(), kTurnUdpIntAddr, kTurnUdpExtAddr),
- mediastream_signaling_(channel_manager_.get()) {
+ mediastream_signaling_(channel_manager_.get()),
+ metrics_observer_(new rtc::RefCountedObject<FakeMetricsObserver>()) {
tdesc_factory_->set_protocol(cricket::ICEPROTO_HYBRID);
cricket::ServerAddresses stun_servers;
@@ -400,16 +374,11 @@ class WebRtcSessionTest : public testing::Test {
EXPECT_TRUE(session_->Initialize(options_, constraints_.get(),
identity_service, rtc_configuration));
- session_->set_metrics_observer(&metrics_observer_);
+ session_->set_metrics_observer(metrics_observer_);
}
void Init() {
PeerConnectionInterface::RTCConfiguration configuration;
- configuration.type = PeerConnectionInterface::kAll;
- configuration.bundle_policy =
- PeerConnectionInterface::kBundlePolicyBalanced;
- configuration.audio_jitter_buffer_max_packets =
- kAudioJitterBufferMaxPackets;
Init(NULL, configuration);
}
@@ -417,20 +386,20 @@ class WebRtcSessionTest : public testing::Test {
PeerConnectionInterface::IceTransportsType ice_transport_type) {
PeerConnectionInterface::RTCConfiguration configuration;
configuration.type = ice_transport_type;
- configuration.bundle_policy =
- PeerConnectionInterface::kBundlePolicyBalanced;
- configuration.audio_jitter_buffer_max_packets =
- kAudioJitterBufferMaxPackets;
Init(NULL, configuration);
}
void InitWithBundlePolicy(
PeerConnectionInterface::BundlePolicy bundle_policy) {
PeerConnectionInterface::RTCConfiguration configuration;
- configuration.type = PeerConnectionInterface::kAll;
configuration.bundle_policy = bundle_policy;
- configuration.audio_jitter_buffer_max_packets =
- kAudioJitterBufferMaxPackets;
+ Init(NULL, configuration);
+ }
+
+ void InitWithRtcpMuxPolicy(
+ PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy) {
+ PeerConnectionInterface::RTCConfiguration configuration;
+ configuration.rtcp_mux_policy = rtcp_mux_policy;
Init(NULL, configuration);
}
@@ -438,11 +407,6 @@ class WebRtcSessionTest : public testing::Test {
FakeIdentityService* identity_service = new FakeIdentityService();
identity_service->set_should_fail(identity_request_should_fail);
PeerConnectionInterface::RTCConfiguration configuration;
- configuration.type = PeerConnectionInterface::kAll;
- configuration.bundle_policy =
- PeerConnectionInterface::kBundlePolicyBalanced;
- configuration.audio_jitter_buffer_max_packets =
- kAudioJitterBufferMaxPackets;
Init(identity_service, configuration);
}
@@ -1034,20 +998,18 @@ class WebRtcSessionTest : public testing::Test {
ExpectedBestConnection best_connection_after_initial_ice_converged_;
void VerifyBestConnectionAfterIceConverge(
- const FakeMetricsObserver& metrics_observer) const {
+ const rtc::scoped_refptr<FakeMetricsObserver> metrics_observer) const {
Verify(metrics_observer, best_connection_after_initial_ice_converged_);
}
private:
- void Verify(const FakeMetricsObserver& metrics_observer,
+ void Verify(const rtc::scoped_refptr<FakeMetricsObserver> metrics_observer,
const ExpectedBestConnection& expected) const {
EXPECT_EQ(
- metrics_observer
- .peer_connection_metrics_counters_[webrtc::kBestConnections_IPv4],
+ metrics_observer->GetCounter(webrtc::kBestConnections_IPv4),
expected.ipv4_count_);
EXPECT_EQ(
- metrics_observer
- .peer_connection_metrics_counters_[webrtc::kBestConnections_IPv6],
+ metrics_observer->GetCounter(webrtc::kBestConnections_IPv6),
expected.ipv6_count_);
}
};
@@ -1147,7 +1109,7 @@ class WebRtcSessionTest : public testing::Test {
observer_.ice_connection_state_,
kIceCandidatesTimeout);
- metrics_observer_.Reset();
+ metrics_observer_->Reset();
// Clearing the rules, session should move back to completed state.
loopback_network_manager.ClearRules(fss_.get());
@@ -1292,7 +1254,7 @@ class WebRtcSessionTest : public testing::Test {
MockIceObserver observer_;
cricket::FakeVideoMediaChannel* video_channel_;
cricket::FakeVoiceMediaChannel* voice_channel_;
- FakeMetricsObserver metrics_observer_;
+ rtc::scoped_refptr<FakeMetricsObserver> metrics_observer_;
};
TEST_F(WebRtcSessionTest, TestInitializeWithDtls) {
@@ -2614,6 +2576,89 @@ TEST_F(WebRtcSessionTest, TestSetRemoteDescriptionInvalidIceCredentials) {
EXPECT_FALSE(session_->SetRemoteDescription(modified_offer, &error));
}
+// Test that candidates sent to the "video" transport do not get pushed down to
+// the "audio" transport channel when bundling using TransportProxy.
+TEST_F(WebRtcSessionTest, TestIgnoreCandidatesForUnusedTransportWhenBundling) {
+ AddInterface(rtc::SocketAddress(kClientAddrHost1, kClientAddrPort));
+
+ InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
+ mediastream_signaling_.SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.use_rtp_mux = true;
+
+ SessionDescriptionInterface* offer = CreateRemoteOffer();
+ SetRemoteDescriptionWithoutError(offer);
+
+ SessionDescriptionInterface* answer = CreateAnswer(NULL);
+ SetLocalDescriptionWithoutError(answer);
+
+ EXPECT_EQ(session_->GetTransportProxy("audio")->impl(),
+ session_->GetTransportProxy("video")->impl());
+
+ cricket::Transport* t = session_->GetTransport("audio");
+
+ // Checks if one of the transport channels contains a connection using a given
+ // port.
+ auto connection_with_remote_port = [t](int port) {
+ cricket::TransportStats stats;
+ t->GetStats(&stats);
+ for (auto& chan_stat : stats.channel_stats) {
+ for (auto& conn_info : chan_stat.connection_infos) {
+ if (conn_info.remote_candidate.address().port() == port) {
+ return true;
+ }
+ }
+ }
+ return false;
+ };
+
+ EXPECT_FALSE(connection_with_remote_port(5000));
+ EXPECT_FALSE(connection_with_remote_port(5001));
+ EXPECT_FALSE(connection_with_remote_port(6000));
+
+ // The way the *_WAIT checks work is they only wait if the condition fails,
+ // which does not help in the case where state is not changing. This is
+ // problematic in this test since we want to verify that adding a video
+ // candidate does _not_ change state. So we interleave candidates and assume
+ // that messages are executed in the order they were posted.
+
+ // First audio candidate.
+ cricket::Candidate candidate0;
+ candidate0.set_address(rtc::SocketAddress("1.1.1.1", 5000));
+ candidate0.set_component(1);
+ candidate0.set_protocol("udp");
+ JsepIceCandidate ice_candidate0(kMediaContentName0, kMediaContentIndex0,
+ candidate0);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate0));
+
+ // Video candidate.
+ cricket::Candidate candidate1;
+ candidate1.set_address(rtc::SocketAddress("1.1.1.1", 6000));
+ candidate1.set_component(1);
+ candidate1.set_protocol("udp");
+ JsepIceCandidate ice_candidate1(kMediaContentName1, kMediaContentIndex1,
+ candidate1);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate1));
+
+ // Second audio candidate.
+ cricket::Candidate candidate2;
+ candidate2.set_address(rtc::SocketAddress("1.1.1.1", 5001));
+ candidate2.set_component(1);
+ candidate2.set_protocol("udp");
+ JsepIceCandidate ice_candidate2(kMediaContentName0, kMediaContentIndex0,
+ candidate2);
+ EXPECT_TRUE(session_->ProcessIceMessage(&ice_candidate2));
+
+ EXPECT_TRUE_WAIT(connection_with_remote_port(5000), 1000);
+ EXPECT_TRUE_WAIT(connection_with_remote_port(5001), 1000);
+
+ // No need here for a _WAIT check since we are checking that state hasn't
+ // changed: if this is false we would be doing waits for nothing and if this
+ // is true then there will be no messages processed anyways.
+ EXPECT_FALSE(connection_with_remote_port(6000));
+}
+
// kBundlePolicyBalanced bundle policy and answer contains BUNDLE.
TEST_F(WebRtcSessionTest, TestBalancedBundleInAnswer) {
InitWithBundlePolicy(PeerConnectionInterface::kBundlePolicyBalanced);
@@ -2789,6 +2834,46 @@ TEST_F(WebRtcSessionTest, TestMaxBundleWithSetRemoteDescriptionFirst) {
session_->GetTransportProxy("video")->impl());
}
+TEST_F(WebRtcSessionTest, TestRequireRtcpMux) {
+ InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyRequire);
+ mediastream_signaling_.SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_FALSE(session_->GetTransportProxy("audio")->impl()->HasChannel(2));
+ EXPECT_FALSE(session_->GetTransportProxy("video")->impl()->HasChannel(2));
+
+ mediastream_signaling_.SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_FALSE(session_->GetTransportProxy("audio")->impl()->HasChannel(2));
+ EXPECT_FALSE(session_->GetTransportProxy("video")->impl()->HasChannel(2));
+}
+
+TEST_F(WebRtcSessionTest, TestNegotiateRtcpMux) {
+ InitWithRtcpMuxPolicy(PeerConnectionInterface::kRtcpMuxPolicyNegotiate);
+ mediastream_signaling_.SendAudioVideoStream1();
+
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ SessionDescriptionInterface* offer = CreateOffer(options);
+ SetLocalDescriptionWithoutError(offer);
+
+ EXPECT_TRUE(session_->GetTransportProxy("audio")->impl()->HasChannel(2));
+ EXPECT_TRUE(session_->GetTransportProxy("video")->impl()->HasChannel(2));
+
+ mediastream_signaling_.SendAudioVideoStream2();
+ SessionDescriptionInterface* answer =
+ CreateRemoteAnswer(session_->local_description());
+ SetRemoteDescriptionWithoutError(answer);
+
+ EXPECT_FALSE(session_->GetTransportProxy("audio")->impl()->HasChannel(2));
+ EXPECT_FALSE(session_->GetTransportProxy("video")->impl()->HasChannel(2));
+}
+
// This test verifies that SetLocalDescription and SetRemoteDescription fails
// if BUNDLE is enabled but rtcp-mux is disabled in m-lines.
TEST_F(WebRtcSessionTest, TestDisabledRtcpMuxWithBundleEnabled) {
@@ -3431,7 +3516,7 @@ TEST_F(WebRtcSessionTest, TestSctpDataChannelSendPortParsing) {
ASSERT_TRUE(ch != NULL);
ASSERT_EQ(1UL, ch->send_codecs().size());
EXPECT_EQ(cricket::kGoogleSctpDataCodecId, ch->send_codecs()[0].id);
- EXPECT_TRUE(!strcmp(cricket::kGoogleSctpDataCodecName,
+ EXPECT_EQ(0, strcmp(cricket::kGoogleSctpDataCodecName,
ch->send_codecs()[0].name.c_str()));
EXPECT_TRUE(ch->send_codecs()[0].GetParam(cricket::kCodecParamPort,
&portnum));
@@ -3439,7 +3524,7 @@ TEST_F(WebRtcSessionTest, TestSctpDataChannelSendPortParsing) {
ASSERT_EQ(1UL, ch->recv_codecs().size());
EXPECT_EQ(cricket::kGoogleSctpDataCodecId, ch->recv_codecs()[0].id);
- EXPECT_TRUE(!strcmp(cricket::kGoogleSctpDataCodecName,
+ EXPECT_EQ(0, strcmp(cricket::kGoogleSctpDataCodecName,
ch->recv_codecs()[0].name.c_str()));
EXPECT_TRUE(ch->recv_codecs()[0].GetParam(cricket::kCodecParamPort,
&portnum));
@@ -3767,6 +3852,38 @@ TEST_F(WebRtcSessionTest, TestSetSocketOptionBeforeBundle) {
EXPECT_EQ(8000, option_val);
}
+// Test creating a session, request multiple offers, destroy the session
+// and make sure we got success/failure callbacks for all of the requests.
+// Background: crbug.com/507307
+TEST_F(WebRtcSessionTest, CreateOffersAndShutdown) {
+ Init();
+
+ rtc::scoped_refptr<WebRtcSessionCreateSDPObserverForTest> observers[100];
+ PeerConnectionInterface::RTCOfferAnswerOptions options;
+ options.offer_to_receive_audio =
+ RTCOfferAnswerOptions::kOfferToReceiveMediaTrue;
+
+ for (auto& o : observers) {
+ o = new WebRtcSessionCreateSDPObserverForTest();
+ session_->CreateOffer(o, options);
+ }
+
+ session_.reset();
+
+ // Make sure we process pending messages on the current (signaling) thread
+ // before checking we we got our callbacks. Quit() will do this and then
+ // immediately exit. We won't need the queue after this point anyway.
+ rtc::Thread::Current()->Quit();
+
+ for (auto& o : observers) {
+ // We expect to have received a notification now even if the session was
+ // terminated. The offer creation may or may not have succeeded, but we
+ // must have received a notification which, so the only invalid state
+ // is kInit.
+ EXPECT_NE(WebRtcSessionCreateSDPObserverForTest::kInit, o->state());
+ }
+}
+
// TODO(bemasc): Add a TestIceStatesBundle with BUNDLE enabled. That test
// currently fails because upon disconnection and reconnection OnIceComplete is
// called more than once without returning to IceGatheringGathering.
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
index aab24cf0654..1909b0ed78d 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
@@ -40,6 +40,8 @@ namespace webrtc {
namespace {
static const char kFailedDueToIdentityFailed[] =
" failed because DTLS identity request failed";
+static const char kFailedDueToSessionShutdown[] =
+ " failed because the session was shut down";
static const uint64 kInitSessionVersion = 2;
@@ -183,6 +185,18 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory(
}
WebRtcSessionDescriptionFactory::~WebRtcSessionDescriptionFactory() {
+ ASSERT(signaling_thread_->IsCurrent());
+
+ // Fail any requests that were asked for before identity generation completed.
+ FailPendingRequests(kFailedDueToSessionShutdown);
+
+ // Process all pending notifications in the message queue. If we don't do
+ // this, requests will linger and not know they succeeded or failed.
+ rtc::MessageList list;
+ signaling_thread_->Clear(this, rtc::MQID_ANY, &list);
+ for (auto& msg : list)
+ OnMessage(&msg);
+
transport_desc_factory_.set_identity(NULL);
}
@@ -400,6 +414,19 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer(
PostCreateSessionDescriptionSucceeded(request.observer, answer);
}
+void WebRtcSessionDescriptionFactory::FailPendingRequests(
+ const std::string& reason) {
+ ASSERT(signaling_thread_->IsCurrent());
+ while (!create_session_description_requests_.empty()) {
+ const CreateSessionDescriptionRequest& request =
+ create_session_description_requests_.front();
+ PostCreateSessionDescriptionFailed(request.observer,
+ ((request.type == CreateSessionDescriptionRequest::kOffer) ?
+ "CreateOffer" : "CreateAnswer") + reason);
+ create_session_description_requests_.pop();
+ }
+}
+
void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionFailed(
CreateSessionDescriptionObserver* observer, const std::string& error) {
CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(observer);
@@ -422,16 +449,7 @@ void WebRtcSessionDescriptionFactory::OnIdentityRequestFailed(int error) {
LOG(LS_ERROR) << "Async identity request failed: error = " << error;
identity_request_state_ = IDENTITY_FAILED;
- std::string msg = kFailedDueToIdentityFailed;
- while (!create_session_description_requests_.empty()) {
- const CreateSessionDescriptionRequest& request =
- create_session_description_requests_.front();
- PostCreateSessionDescriptionFailed(
- request.observer,
- ((request.type == CreateSessionDescriptionRequest::kOffer) ?
- "CreateOffer" : "CreateAnswer") + msg);
- create_session_description_requests_.pop();
- }
+ FailPendingRequests(kFailedDueToIdentityFailed);
}
void WebRtcSessionDescriptionFactory::SetIdentity(
diff --git a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h
index 41798a485fb..860532dec93 100644
--- a/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h
+++ b/chromium/third_party/libjingle/source/talk/app/webrtc/webrtcsessiondescriptionfactory.h
@@ -133,6 +133,8 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler,
void InternalCreateOffer(CreateSessionDescriptionRequest request);
void InternalCreateAnswer(CreateSessionDescriptionRequest request);
+ // Posts failure notifications for all pending session description requests.
+ void FailPendingRequests(const std::string& reason);
void PostCreateSessionDescriptionFailed(
CreateSessionDescriptionObserver* observer,
const std::string& error);
@@ -145,17 +147,16 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler,
std::queue<CreateSessionDescriptionRequest>
create_session_description_requests_;
- rtc::Thread* signaling_thread_;
- MediaStreamSignaling* mediastream_signaling_;
+ rtc::Thread* const signaling_thread_;
+ MediaStreamSignaling* const mediastream_signaling_;
cricket::TransportDescriptionFactory transport_desc_factory_;
cricket::MediaSessionDescriptionFactory session_desc_factory_;
uint64 session_version_;
rtc::scoped_ptr<DTLSIdentityServiceInterface> identity_service_;
- rtc::scoped_refptr<WebRtcIdentityRequestObserver>
- identity_request_observer_;
- WebRtcSession* session_;
- std::string session_id_;
- cricket::DataChannelType data_channel_type_;
+ rtc::scoped_refptr<WebRtcIdentityRequestObserver> identity_request_observer_;
+ WebRtcSession* const session_;
+ const std::string session_id_;
+ const cricket::DataChannelType data_channel_type_;
IdentityRequestState identity_request_state_;
DISALLOW_COPY_AND_ASSIGN(WebRtcSessionDescriptionFactory);
diff --git a/chromium/third_party/libjingle/source/talk/build/common.gypi b/chromium/third_party/libjingle/source/talk/build/common.gypi
index 4735bc83e2c..36a96c5c558 100644
--- a/chromium/third_party/libjingle/source/talk/build/common.gypi
+++ b/chromium/third_party/libjingle/source/talk/build/common.gypi
@@ -34,7 +34,6 @@
# TODO(ronghuawu): For now, disable the Chrome plugins, which causes a
# flood of chromium-style warnings.
'clang_use_chrome_plugins%': 0,
- 'libpeer_target_type%': 'static_library',
'conditions': [
['OS=="android" or OS=="linux"', {
'java_home%': '<!(python -c "import os; dir=os.getenv(\'JAVA_HOME\', \'/usr/lib/jvm/java-7-openjdk-amd64\'); assert os.path.exists(os.path.join(dir, \'include/jni.h\')), \'Point \\$JAVA_HOME or the java_home gyp variable to a directory containing include/jni.h!\'; print dir")',
@@ -79,20 +78,22 @@
'HAVE_WEBRTC_VOICE',
],
'conditions': [
- # TODO(ronghuawu): Support dynamic library build.
- ['"<(libpeer_target_type)"=="static_library"', {
- 'defines': [ 'LIBPEERCONNECTION_LIB=1' ],
- }],
['OS=="linux"', {
'defines': [
'LINUX',
'WEBRTC_LINUX',
],
+ # Remove Chromium's disabling of the -Wformat warning.
+ 'cflags!': [
+ '-Wno-format',
+ ],
'conditions': [
['clang==1', {
'cflags': [
'-Wall',
'-Wextra',
+ '-Wformat',
+ '-Wformat-security',
'-Wimplicit-fallthrough',
'-Wmissing-braces',
'-Wreorder',
diff --git a/chromium/third_party/libjingle/source/talk/build/isolate.gypi b/chromium/third_party/libjingle/source/talk/build/isolate.gypi
index 00c38324a06..13f3d50aff0 100644
--- a/chromium/third_party/libjingle/source/talk/build/isolate.gypi
+++ b/chromium/third_party/libjingle/source/talk/build/isolate.gypi
@@ -123,9 +123,6 @@
'--extra-variable', 'mac_product_name', '<(mac_product_name)',
],
}],
- ["test_isolation_outdir!=''", {
- 'action': [ '--isolate-server', '<(test_isolation_outdir)' ],
- }],
],
},
],
diff --git a/chromium/third_party/libjingle/source/talk/codereview.settings b/chromium/third_party/libjingle/source/talk/codereview.settings
index db2535b99da..c138ebb7a79 100644
--- a/chromium/third_party/libjingle/source/talk/codereview.settings
+++ b/chromium/third_party/libjingle/source/talk/codereview.settings
@@ -1,9 +1,10 @@
# This file is used by gcl to get repository specific information.
-CODE_REVIEW_SERVER: webrtc-codereview.appspot.com
+CODE_REVIEW_SERVER: codereview.webrtc.org
CC_LIST: webrtc-reviews@webrtc.org
-VIEW_VC: http://code.google.com/p/webrtc/source/detail?r=
+VIEW_VC: https://chromium.googlesource.com/external/webrtc/+/
TRY_ON_UPLOAD: False
TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-webrtc
+TRYSERVER_PROJECT: webrtc
TRYSERVER_ROOT: src/talk
PROJECT: webrtc
-FORCE_HTTPS_COMMIT_URL: True
+PENDING_REF_PREFIX: refs/pending/
diff --git a/chromium/third_party/libjingle/source/talk/examples/android/jni/Android.mk b/chromium/third_party/libjingle/source/talk/examples/android/jni/Android.mk
deleted file mode 100644
index 8e80160039b..00000000000
--- a/chromium/third_party/libjingle/source/talk/examples/android/jni/Android.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-# This space intentionally left blank (required for Android build system).
-
diff --git a/chromium/third_party/libjingle/source/talk/examples/android/res/values/strings.xml b/chromium/third_party/libjingle/source/talk/examples/android/res/values/strings.xml
index 428b0eb8be3..4f2f3777e22 100644
--- a/chromium/third_party/libjingle/source/talk/examples/android/res/values/strings.xml
+++ b/chromium/third_party/libjingle/source/talk/examples/android/res/values/strings.xml
@@ -87,6 +87,11 @@
<string name="pref_audiocodec_dlg">Select default audio codec.</string>
<string name="pref_audiocodec_default">OPUS</string>
+ <string name="pref_noaudioprocessing_key">audioprocessing_preference</string>
+ <string name="pref_noaudioprocessing_title">Disable audio processing.</string>
+ <string name="pref_noaudioprocessing_dlg">Disable audio processing pipeline.</string>
+ <string name="pref_noaudioprocessing_default">false</string>
+
<string name="pref_miscsettings_key">misc_settings_key</string>
<string name="pref_miscsettings_title">Miscellaneous settings.</string>
diff --git a/chromium/third_party/libjingle/source/talk/examples/android/res/xml/preferences.xml b/chromium/third_party/libjingle/source/talk/examples/android/res/xml/preferences.xml
index ad0affb478e..73d8d5e254d 100644
--- a/chromium/third_party/libjingle/source/talk/examples/android/res/xml/preferences.xml
+++ b/chromium/third_party/libjingle/source/talk/examples/android/res/xml/preferences.xml
@@ -82,6 +82,12 @@
android:dialogTitle="@string/pref_audiocodec_dlg"
android:entries="@array/audioCodecs"
android:entryValues="@array/audioCodecs" />
+
+ <CheckBoxPreference
+ android:key="@string/pref_noaudioprocessing_key"
+ android:title="@string/pref_noaudioprocessing_title"
+ android:dialogTitle="@string/pref_noaudioprocessing_dlg"
+ android:defaultValue="@string/pref_noaudioprocessing_default" />
</PreferenceCategory>
<PreferenceCategory
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDAppClient.m b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDAppClient.m
index e4c2f81b481..ac99ca29971 100644
--- a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDAppClient.m
+++ b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDAppClient.m
@@ -34,6 +34,7 @@
#import "RTCMediaConstraints.h"
#import "RTCMediaStream.h"
#import "RTCPair.h"
+#import "RTCPeerConnectionInterface.h"
#import "RTCVideoCapturer.h"
#import "RTCAVFoundationVideoSource.h"
@@ -41,6 +42,7 @@
#import "ARDCEODTURNClient.h"
#import "ARDJoinResponse.h"
#import "ARDMessageResponse.h"
+#import "ARDSDPUtils.h"
#import "ARDSignalingMessage.h"
#import "ARDUtilities.h"
#import "ARDWebSocketChannel.h"
@@ -343,10 +345,15 @@ static NSInteger const kARDAppClientErrorInvalidRoom = -6;
[_delegate appClient:self didError:sdpError];
return;
}
+ // Prefer H264 if available.
+ RTCSessionDescription *sdpPreferringH264 =
+ [ARDSDPUtils descriptionForDescription:sdp
+ preferredVideoCodec:@"H264"];
[_peerConnection setLocalDescriptionWithDelegate:self
- sessionDescription:sdp];
+ sessionDescription:sdpPreferringH264];
ARDSessionDescriptionMessage *message =
- [[ARDSessionDescriptionMessage alloc] initWithDescription:sdp];
+ [[ARDSessionDescriptionMessage alloc]
+ initWithDescription:sdpPreferringH264];
[self sendSignalingMessage:message];
});
}
@@ -397,9 +404,11 @@ static NSInteger const kARDAppClientErrorInvalidRoom = -6;
// Create peer connection.
RTCMediaConstraints *constraints = [self defaultPeerConnectionConstraints];
- _peerConnection = [_factory peerConnectionWithICEServers:_iceServers
- constraints:constraints
- delegate:self];
+ RTCConfiguration *config = [[RTCConfiguration alloc] init];
+ config.iceServers = _iceServers;
+ _peerConnection = [_factory peerConnectionWithConfiguration:config
+ constraints:constraints
+ delegate:self];
// Create AV media stream and add it to the peer connection.
RTCMediaStream *localStream = [self createLocalMediaStream];
[_peerConnection addStream:localStream];
@@ -438,8 +447,12 @@ static NSInteger const kARDAppClientErrorInvalidRoom = -6;
ARDSessionDescriptionMessage *sdpMessage =
(ARDSessionDescriptionMessage *)message;
RTCSessionDescription *description = sdpMessage.sessionDescription;
+ // Prefer H264 if available.
+ RTCSessionDescription *sdpPreferringH264 =
+ [ARDSDPUtils descriptionForDescription:description
+ preferredVideoCodec:@"H264"];
[_peerConnection setRemoteDescriptionWithDelegate:self
- sessionDescription:description];
+ sessionDescription:sdpPreferringH264];
break;
}
case kARDSignalingMessageTypeCandidate: {
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDSDPUtils.h b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDSDPUtils.h
new file mode 100644
index 00000000000..2f14e6dec0b
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDSDPUtils.h
@@ -0,0 +1,41 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import <Foundation/Foundation.h>
+
+@class RTCSessionDescription;
+
+@interface ARDSDPUtils : NSObject
+
+// Updates the original SDP description to instead prefer the specified video
+// codec. We do this by placing the specified codec at the beginning of the
+// codec list if it exists in the sdp.
++ (RTCSessionDescription *)
+ descriptionForDescription:(RTCSessionDescription *)description
+ preferredVideoCodec:(NSString *)codec;
+
+@end
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDSDPUtils.m b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDSDPUtils.m
new file mode 100644
index 00000000000..157d6fc1f61
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ARDSDPUtils.m
@@ -0,0 +1,108 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#import "ARDSDPUtils.h"
+
+#import "RTCSessionDescription.h"
+
+@implementation ARDSDPUtils
+
++ (RTCSessionDescription *)
+ descriptionForDescription:(RTCSessionDescription *)description
+ preferredVideoCodec:(NSString *)codec {
+ NSString *sdpString = description.description;
+ NSString *lineSeparator = @"\n";
+ NSString *mLineSeparator = @" ";
+ // Copied from PeerConnectionClient.java.
+ // TODO(tkchin): Move this to a shared C++ file.
+ NSMutableArray *lines =
+ [NSMutableArray arrayWithArray:
+ [sdpString componentsSeparatedByString:lineSeparator]];
+ int mLineIndex = -1;
+ NSString *codecRtpMap = nil;
+ // a=rtpmap:<payload type> <encoding name>/<clock rate>
+ // [/<encoding parameters>]
+ NSString *pattern =
+ [NSString stringWithFormat:@"^a=rtpmap:(\\d+) %@(/\\d+)+[\r]?$", codec];
+ NSRegularExpression *regex =
+ [NSRegularExpression regularExpressionWithPattern:pattern
+ options:0
+ error:nil];
+ for (NSInteger i = 0; (i < lines.count) && (mLineIndex == -1 || !codecRtpMap);
+ ++i) {
+ NSString *line = lines[i];
+ if ([line hasPrefix:@"m=video"]) {
+ mLineIndex = i;
+ continue;
+ }
+ NSTextCheckingResult *codecMatches =
+ [regex firstMatchInString:line
+ options:0
+ range:NSMakeRange(0, line.length)];
+ if (codecMatches) {
+ codecRtpMap =
+ [line substringWithRange:[codecMatches rangeAtIndex:1]];
+ continue;
+ }
+ }
+ if (mLineIndex == -1) {
+ NSLog(@"No m=video line, so can't prefer %@", codec);
+ return description;
+ }
+ if (!codecRtpMap) {
+ NSLog(@"No rtpmap for %@", codec);
+ return description;
+ }
+ NSArray *origMLineParts =
+ [lines[mLineIndex] componentsSeparatedByString:mLineSeparator];
+ if (origMLineParts.count > 3) {
+ NSMutableArray *newMLineParts =
+ [NSMutableArray arrayWithCapacity:origMLineParts.count];
+ NSInteger origPartIndex = 0;
+ // Format is: m=<media> <port> <proto> <fmt> ...
+ [newMLineParts addObject:origMLineParts[origPartIndex++]];
+ [newMLineParts addObject:origMLineParts[origPartIndex++]];
+ [newMLineParts addObject:origMLineParts[origPartIndex++]];
+ [newMLineParts addObject:codecRtpMap];
+ for (; origPartIndex < origMLineParts.count; ++origPartIndex) {
+ if (![codecRtpMap isEqualToString:origMLineParts[origPartIndex]]) {
+ [newMLineParts addObject:origMLineParts[origPartIndex]];
+ }
+ }
+ NSString *newMLine =
+ [newMLineParts componentsJoinedByString:mLineSeparator];
+ [lines replaceObjectAtIndex:mLineIndex
+ withObject:newMLine];
+ } else {
+ NSLog(@"Wrong SDP media description format: %@", lines[mLineIndex]);
+ }
+ NSString *mangledSdpString = [lines componentsJoinedByString:lineSeparator];
+ return [[RTCSessionDescription alloc] initWithType:description.type
+ sdp:mangledSdpString];
+}
+
+@end
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
index b12a61a6182..2f07c7a4cc4 100644
--- a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
+++ b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m
@@ -151,8 +151,10 @@
self.remoteVideoTrack = nil;
self.localVideoTrack = nil;
[_client disconnect];
- [self.presentingViewController dismissViewControllerAnimated:YES
- completion:nil];
+ if (![self isBeingDismissed]) {
+ [self.presentingViewController dismissViewControllerAnimated:YES
+ completion:nil];
+ }
}
- (void)switchCamera {
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/Info.plist b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/Info.plist
index 0b66b1c68d4..fd1e26f8a54 100644
--- a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/Info.plist
+++ b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/Info.plist
@@ -2,69 +2,96 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
- <key>BuildMachineOSBuild</key>
- <string>12E55</string>
- <key>CFBundleDevelopmentRegion</key>
- <string>en</string>
- <key>CFBundleDisplayName</key>
- <string>AppRTCDemo</string>
- <key>CFBundleExecutable</key>
- <string>AppRTCDemo</string>
- <key>CFBundleIcons</key>
- <dict>
- <key>CFBundlePrimaryIcon</key>
- <dict>
- <key>CFBundleIconFiles</key>
- <array>
- <string>Icon.png</string>
- </array>
- </dict>
- </dict>
- <key>CFBundleIdentifier</key>
- <string>com.google.AppRTCDemo</string>
- <key>CFBundleInfoDictionaryVersion</key>
- <string>6.0</string>
- <key>CFBundleName</key>
- <string>AppRTCDemo</string>
- <key>CFBundlePackageType</key>
- <string>APPL</string>
- <key>CFBundleResourceSpecification</key>
- <string>ResourceRules.plist</string>
- <key>CFBundleShortVersionString</key>
- <string>1.0</string>
- <key>CFBundleSignature</key>
- <string>????</string>
- <key>CFBundleSupportedPlatforms</key>
- <array>
- <string>iPhoneOS</string>
- </array>
- <key>CFBundleVersion</key>
- <string>1.0</string>
- <key>UIRequiredDeviceCapabilities</key>
- <array>
- <string>armv7</string>
- </array>
- <key>UIStatusBarTintParameters</key>
- <dict>
- <key>UINavigationBar</key>
- <dict>
- <key>Style</key>
- <string>UIBarStyleDefault</string>
- <key>Translucent</key>
- <false/>
- </dict>
- </dict>
- <key>UISupportedInterfaceOrientations</key>
- <array>
- <string>UIInterfaceOrientationPortrait</string>
- </array>
- <key>UIAppFonts</key>
- <array>
- <string>Roboto-Regular.ttf</string>
- </array>
- <key>UIBackgroundModes</key>
- <array>
- <string>voip</string>
- </array>
+ <key>BuildMachineOSBuild</key>
+ <string>12E55</string>
+ <key>CFBundleDevelopmentRegion</key>
+ <string>en</string>
+ <key>CFBundleDisplayName</key>
+ <string>AppRTCDemo</string>
+ <key>CFBundleExecutable</key>
+ <string>AppRTCDemo</string>
+ <key>CFBundleIcons</key>
+ <dict>
+ <key>CFBundlePrimaryIcon</key>
+ <dict>
+ <key>CFBundleIconFiles</key>
+ <array>
+ <string>Icon.png</string>
+ </array>
+ </dict>
+ </dict>
+ <key>CFBundleIdentifier</key>
+ <string>com.google.AppRTCDemo</string>
+ <key>CFBundleInfoDictionaryVersion</key>
+ <string>6.0</string>
+ <key>CFBundleName</key>
+ <string>AppRTCDemo</string>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleShortVersionString</key>
+ <string>1.0</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>CFBundleSupportedPlatforms</key>
+ <array>
+ <string>iPhoneOS</string>
+ </array>
+ <key>CFBundleVersion</key>
+ <string>1.0</string>
+ <key>UIStatusBarTintParameters</key>
+ <dict>
+ <key>UINavigationBar</key>
+ <dict>
+ <key>Style</key>
+ <string>UIBarStyleDefault</string>
+ <key>Translucent</key>
+ <false/>
+ </dict>
+ </dict>
+ <key>UISupportedInterfaceOrientations</key>
+ <array>
+ <string>UIInterfaceOrientationPortrait</string>
+ </array>
+ <key>UIAppFonts</key>
+ <array>
+ <string>Roboto-Regular.ttf</string>
+ </array>
+ <key>UIBackgroundModes</key>
+ <array>
+ <string>voip</string>
+ </array>
+ <key>UILaunchImages</key>
+ <array>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>7.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone5</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{320, 568}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone6</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{375, 667}</string>
+ </dict>
+ <dict>
+ <key>UILaunchImageMinimumOSVersion</key>
+ <string>8.0</string>
+ <key>UILaunchImageName</key>
+ <string>iPhone6p</string>
+ <key>UILaunchImageOrientation</key>
+ <string>Portrait</string>
+ <key>UILaunchImageSize</key>
+ <string>{414, 736}</string>
+ </dict>
+ </array>
</dict>
</plist>
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/Default-568h.png b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/Default-568h.png
deleted file mode 100644
index 2735148c93a..00000000000
--- a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/Default-568h.png
+++ /dev/null
Binary files differ
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.png b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.png
new file mode 100644
index 00000000000..9d005fde061
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.png
Binary files differ
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.png b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.png
new file mode 100644
index 00000000000..fce3eb95b3c
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.png
Binary files differ
diff --git a/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.png b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.png
new file mode 100644
index 00000000000..aee20c22093
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.png
Binary files differ
diff --git a/chromium/third_party/libjingle/source/talk/libjingle.gyp b/chromium/third_party/libjingle/source/talk/libjingle.gyp
index 1c97f0f0a49..9de1c9bf6df 100755
--- a/chromium/third_party/libjingle/source/talk/libjingle.gyp
+++ b/chromium/third_party/libjingle/source/talk/libjingle.gyp
@@ -140,14 +140,18 @@
# included here, or better yet, build a proper .jar in webrtc
# and include it here.
'android_java_files': [
+ 'app/webrtc/java/android/org/webrtc/EglBase.java',
+ 'app/webrtc/java/android/org/webrtc/GlRectDrawer.java',
+ 'app/webrtc/java/android/org/webrtc/GlShader.java',
+ 'app/webrtc/java/android/org/webrtc/GlUtil.java',
'app/webrtc/java/android/org/webrtc/VideoRendererGui.java',
'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java',
'app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java',
'app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java',
- '<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java',
'<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java',
'<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java',
'<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java',
+ '<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/BuildInfo.java',
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioManager.java',
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioUtils.java',
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java',
@@ -257,6 +261,8 @@
'app/webrtc/objc/RTCPeerConnection+Internal.h',
'app/webrtc/objc/RTCPeerConnection.mm',
'app/webrtc/objc/RTCPeerConnectionFactory.mm',
+ 'app/webrtc/objc/RTCPeerConnectionInterface+Internal.h',
+ 'app/webrtc/objc/RTCPeerConnectionInterface.mm',
'app/webrtc/objc/RTCPeerConnectionObserver.h',
'app/webrtc/objc/RTCPeerConnectionObserver.mm',
'app/webrtc/objc/RTCSessionDescription+Internal.h',
@@ -286,6 +292,7 @@
'app/webrtc/objc/public/RTCPeerConnection.h',
'app/webrtc/objc/public/RTCPeerConnectionDelegate.h',
'app/webrtc/objc/public/RTCPeerConnectionFactory.h',
+ 'app/webrtc/objc/public/RTCPeerConnectionInterface.h',
'app/webrtc/objc/public/RTCSessionDescription.h',
'app/webrtc/objc/public/RTCSessionDescriptionDelegate.h',
'app/webrtc/objc/public/RTCStatsDelegate.h',
@@ -351,6 +358,9 @@
# Need to build against 10.7 framework for full ARC support
# on OSX.
'MACOSX_DEPLOYMENT_TARGET' : '10.7',
+ # RTCVideoTrack.mm uses code with partial availability.
+ # https://code.google.com/p/webrtc/issues/detail?id=4695
+ 'WARNING_CFLAGS!': ['-Wpartial-availability'],
},
'link_settings': {
'xcode_settings': {
@@ -429,8 +439,6 @@
'media/base/cryptoparams.h',
'media/base/device.h',
'media/base/fakescreencapturerfactory.h',
- 'media/base/filemediaengine.cc',
- 'media/base/filemediaengine.h',
'media/base/hybriddataengine.h',
'media/base/mediachannel.h',
'media/base/mediacommon.h',
@@ -470,22 +478,20 @@
'media/devices/videorendererfactory.h',
'media/devices/yuvframescapturer.cc',
'media/devices/yuvframescapturer.h',
- 'media/other/linphonemediaengine.h',
'media/sctp/sctpdataengine.cc',
'media/sctp/sctpdataengine.h',
'media/webrtc/simulcast.cc',
'media/webrtc/simulcast.h',
'media/webrtc/webrtccommon.h',
- 'media/webrtc/webrtcexport.h',
'media/webrtc/webrtcmediaengine.cc',
'media/webrtc/webrtcmediaengine.h',
'media/webrtc/webrtcmediaengine.cc',
'media/webrtc/webrtcpassthroughrender.cc',
'media/webrtc/webrtcpassthroughrender.h',
'media/webrtc/webrtcvideocapturer.cc',
+ 'media/webrtc/webrtcvideocapturer.h',
'media/webrtc/webrtcvideocapturerfactory.h',
'media/webrtc/webrtcvideocapturerfactory.cc',
- 'media/webrtc/webrtcvideocapturer.h',
'media/webrtc/webrtcvideodecoderfactory.h',
'media/webrtc/webrtcvideoencoderfactory.h',
'media/webrtc/webrtcvideoengine2.cc',
@@ -600,6 +606,10 @@
# deprecated functions and remove this flag.
'-Wno-deprecated-declarations',
],
+ # Disable partial availability warning to prevent errors
+ # in macdevicemanagermm.mm using AVFoundation.
+ # https://code.google.com/p/webrtc/issues/detail?id=4695
+ 'WARNING_CFLAGS!': ['-Wpartial-availability'],
},
'link_settings': {
'xcode_settings': {
@@ -666,15 +676,11 @@
'session/media/currentspeakermonitor.h',
'session/media/mediamonitor.cc',
'session/media/mediamonitor.h',
- 'session/media/mediarecorder.cc',
- 'session/media/mediarecorder.h',
'session/media/mediasession.cc',
'session/media/mediasession.h',
'session/media/mediasink.h',
'session/media/rtcpmuxfilter.cc',
'session/media/rtcpmuxfilter.h',
- 'session/media/soundclip.cc',
- 'session/media/soundclip.h',
'session/media/srtpfilter.cc',
'session/media/srtpfilter.h',
'session/media/typingmonitor.cc',
diff --git a/chromium/third_party/libjingle/source/talk/libjingle_examples.gyp b/chromium/third_party/libjingle/source/talk/libjingle_examples.gyp
index af2456fcd95..5cd31b6d108 100755
--- a/chromium/third_party/libjingle/source/talk/libjingle_examples.gyp
+++ b/chromium/third_party/libjingle/source/talk/libjingle_examples.gyp
@@ -173,6 +173,8 @@
'examples/objc/AppRTCDemo/ARDMessageResponse.m',
'examples/objc/AppRTCDemo/ARDMessageResponse+Internal.h',
'examples/objc/AppRTCDemo/ARDRoomServerClient.h',
+ 'examples/objc/AppRTCDemo/ARDSDPUtils.h',
+ 'examples/objc/AppRTCDemo/ARDSDPUtils.m',
'examples/objc/AppRTCDemo/ARDSignalingChannel.h',
'examples/objc/AppRTCDemo/ARDSignalingMessage.h',
'examples/objc/AppRTCDemo/ARDSignalingMessage.m',
@@ -220,7 +222,9 @@
'conditions': [
['OS=="ios"', {
'mac_bundle_resources': [
- 'examples/objc/AppRTCDemo/ios/resources/Default-568h.png',
+ 'examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.png',
+ 'examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.png',
+ 'examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.png',
'examples/objc/AppRTCDemo/ios/resources/Roboto-Regular.ttf',
'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp.png',
'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp@2x.png',
@@ -290,6 +294,9 @@
# warning so we can compile successfully.
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
+ # SRWebSocket.m uses code with partial availability.
+ # https://code.google.com/p/webrtc/issues/detail?id=4695
+ 'WARNING_CFLAGS!': ['-Wpartial-availability'],
},
}],
],
diff --git a/chromium/third_party/libjingle/source/talk/libjingle_tests.gyp b/chromium/third_party/libjingle/source/talk/libjingle_tests.gyp
index 3ca2f1ff367..d08a23682c3 100755
--- a/chromium/third_party/libjingle/source/talk/libjingle_tests.gyp
+++ b/chromium/third_party/libjingle/source/talk/libjingle_tests.gyp
@@ -84,7 +84,6 @@
'sources': [
'media/base/capturemanager_unittest.cc',
'media/base/codec_unittest.cc',
- 'media/base/filemediaengine_unittest.cc',
'media/base/rtpdataengine_unittest.cc',
'media/base/rtpdump_unittest.cc',
'media/base/rtputils_unittest.cc',
@@ -103,6 +102,7 @@
'media/webrtc/webrtcvideocapturer_unittest.cc',
'media/base/videoframe_unittest.h',
'media/webrtc/webrtcvideoframe_unittest.cc',
+ 'media/webrtc/webrtcvideoframefactory_unittest.cc',
# Disabled because some tests fail.
# TODO(ronghuawu): Reenable these tests.
@@ -156,7 +156,6 @@
'session/media/channel_unittest.cc',
'session/media/channelmanager_unittest.cc',
'session/media/currentspeakermonitor_unittest.cc',
- 'session/media/mediarecorder_unittest.cc',
'session/media/mediasession_unittest.cc',
'session/media/rtcpmuxfilter_unittest.cc',
'session/media/srtpfilter_unittest.cc',
@@ -199,6 +198,8 @@
'app/webrtc/datachannel_unittest.cc',
'app/webrtc/dtlsidentitystore_unittest.cc',
'app/webrtc/dtmfsender_unittest.cc',
+ 'app/webrtc/fakemetricsobserver.cc',
+ 'app/webrtc/fakemetricsobserver.h',
'app/webrtc/jsepsessiondescription_unittest.cc',
'app/webrtc/localaudiosource_unittest.cc',
'app/webrtc/mediastream_unittest.cc',
diff --git a/chromium/third_party/libjingle/source/talk/media/base/capturemanager.cc b/chromium/third_party/libjingle/source/talk/media/base/capturemanager.cc
index 95612187fa2..d3b3f82b556 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/capturemanager.cc
+++ b/chromium/third_party/libjingle/source/talk/media/base/capturemanager.cc
@@ -375,8 +375,10 @@ void CaptureManager::UnregisterVideoCapturer(
// for the CaptureManager it doesn't matter as it will no longer receive any
// frames from the VideoCapturer.
SignalCapturerStateChange.stop(video_capturer->SignalStateChange);
- video_capturer->Stop();
- SignalCapturerStateChange(video_capturer, CS_STOPPED);
+ if (video_capturer->IsRunning()) {
+ video_capturer->Stop();
+ SignalCapturerStateChange(video_capturer, CS_STOPPED);
+ }
}
bool CaptureManager::StartWithBestCaptureFormat(
diff --git a/chromium/third_party/libjingle/source/talk/media/base/constants.cc b/chromium/third_party/libjingle/source/talk/media/base/constants.cc
index 562dad4a3ea..0d0a33c18aa 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/constants.cc
+++ b/chromium/third_party/libjingle/source/talk/media/base/constants.cc
@@ -128,9 +128,11 @@ const int kNumDefaultUnsignalledVideoRecvStreams = 0;
const char kVp8CodecName[] = "VP8";
const char kVp9CodecName[] = "VP9";
+const char kH264CodecName[] = "H264";
const int kDefaultVp8PlType = 100;
const int kDefaultVp9PlType = 101;
+const int kDefaultH264PlType = 107;
const int kDefaultRedPlType = 116;
const int kDefaultUlpfecType = 117;
const int kDefaultRtxVp8PlType = 96;
diff --git a/chromium/third_party/libjingle/source/talk/media/base/constants.h b/chromium/third_party/libjingle/source/talk/media/base/constants.h
index 84216fb534e..d92cb22514c 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/constants.h
+++ b/chromium/third_party/libjingle/source/talk/media/base/constants.h
@@ -158,9 +158,11 @@ extern const int kNumDefaultUnsignalledVideoRecvStreams;
extern const char kVp8CodecName[];
extern const char kVp9CodecName[];
+extern const char kH264CodecName[];
extern const int kDefaultVp8PlType;
extern const int kDefaultVp9PlType;
+extern const int kDefaultH264PlType;
extern const int kDefaultRedPlType;
extern const int kDefaultUlpfecType;
extern const int kDefaultRtxVp8PlType;
diff --git a/chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h b/chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h
index ead9f79e33f..15fcb202f1c 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h
+++ b/chromium/third_party/libjingle/source/talk/media/base/fakemediaengine.h
@@ -630,11 +630,6 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
int max_bps_;
};
-class FakeSoundclipMedia : public SoundclipMedia {
- public:
- virtual bool PlaySound(const char* buf, int len, int flags) { return true; }
-};
-
class FakeDataMediaChannel : public RtpHelper<DataMediaChannel> {
public:
explicit FakeDataMediaChannel(void* unused)
@@ -716,9 +711,6 @@ class FakeBaseEngine {
: loglevel_(-1),
options_changed_(false),
fail_create_channel_(false) {}
- bool Init(rtc::Thread* worker_thread) { return true; }
- void Terminate() {}
-
void SetLogging(int level, const char* filter) {
loglevel_ = level;
logfilter_ = filter;
@@ -756,6 +748,8 @@ class FakeVoiceEngine : public FakeBaseEngine {
// sanity checks against that.
codecs_.push_back(AudioCodec(101, "fake_audio_codec", 0, 0, 1, 0));
}
+ bool Init(rtc::Thread* worker_thread) { return true; }
+ void Terminate() {}
int GetCapabilities() { return AUDIO_SEND | AUDIO_RECV; }
AudioOptions GetAudioOptions() const {
return options_;
@@ -769,12 +763,13 @@ class FakeVoiceEngine : public FakeBaseEngine {
return true;
}
- VoiceMediaChannel* CreateChannel() {
+ VoiceMediaChannel* CreateChannel(const AudioOptions& options) {
if (fail_create_channel_) {
- return NULL;
+ return nullptr;
}
FakeVoiceMediaChannel* ch = new FakeVoiceMediaChannel(this);
+ ch->SetOptions(options);
channels_.push_back(ch);
return ch;
}
@@ -784,7 +779,6 @@ class FakeVoiceEngine : public FakeBaseEngine {
void UnregisterChannel(VoiceMediaChannel* channel) {
channels_.erase(std::find(channels_.begin(), channels_.end(), channel));
}
- SoundclipMedia* CreateSoundclip() { return new FakeSoundclipMedia(); }
const std::vector<AudioCodec>& codecs() { return codecs_; }
void SetCodecs(const std::vector<AudioCodec> codecs) { codecs_ = codecs; }
@@ -867,6 +861,7 @@ class FakeVideoEngine : public FakeBaseEngine {
// sanity checks against that.
codecs_.push_back(VideoCodec(0, "fake_video_codec", 0, 0, 0, 0));
}
+ void Init() {}
bool GetOptions(VideoOptions* options) const {
*options = options_;
return true;
diff --git a/chromium/third_party/libjingle/source/talk/media/base/filemediaengine.cc b/chromium/third_party/libjingle/source/talk/media/base/filemediaengine.cc
deleted file mode 100644
index 0fc8d5610b1..00000000000
--- a/chromium/third_party/libjingle/source/talk/media/base/filemediaengine.cc
+++ /dev/null
@@ -1,376 +0,0 @@
-/*
- * libjingle
- * Copyright 2004 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "talk/media/base/filemediaengine.h"
-
-#include <algorithm>
-#include <limits.h>
-
-#include "talk/media/base/rtpdump.h"
-#include "talk/media/base/rtputils.h"
-#include "talk/media/base/streamparams.h"
-#include "webrtc/base/buffer.h"
-#include "webrtc/base/event.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/base/pathutils.h"
-#include "webrtc/base/stream.h"
-
-namespace cricket {
-
-///////////////////////////////////////////////////////////////////////////
-// Implementation of FileMediaEngine.
-///////////////////////////////////////////////////////////////////////////
-int FileMediaEngine::GetCapabilities() {
- int capabilities = 0;
- if (!voice_input_filename_.empty()) {
- capabilities |= AUDIO_SEND;
- }
- if (!voice_output_filename_.empty()) {
- capabilities |= AUDIO_RECV;
- }
- if (!video_input_filename_.empty()) {
- capabilities |= VIDEO_SEND;
- }
- if (!video_output_filename_.empty()) {
- capabilities |= VIDEO_RECV;
- }
- return capabilities;
-}
-
-VoiceMediaChannel* FileMediaEngine::CreateChannel() {
- rtc::FileStream* input_file_stream = NULL;
- rtc::FileStream* output_file_stream = NULL;
-
- if (voice_input_filename_.empty() && voice_output_filename_.empty())
- return NULL;
- if (!voice_input_filename_.empty()) {
- input_file_stream = rtc::Filesystem::OpenFile(
- rtc::Pathname(voice_input_filename_), "rb");
- if (!input_file_stream) {
- LOG(LS_ERROR) << "Not able to open the input audio stream file.";
- return NULL;
- }
- }
-
- if (!voice_output_filename_.empty()) {
- output_file_stream = rtc::Filesystem::OpenFile(
- rtc::Pathname(voice_output_filename_), "wb");
- if (!output_file_stream) {
- delete input_file_stream;
- LOG(LS_ERROR) << "Not able to open the output audio stream file.";
- return NULL;
- }
- }
-
- return new FileVoiceChannel(input_file_stream, output_file_stream,
- rtp_sender_thread_);
-}
-
-VideoMediaChannel* FileMediaEngine::CreateVideoChannel(
- const VideoOptions& options,
- VoiceMediaChannel* voice_ch) {
- rtc::FileStream* input_file_stream = NULL;
- rtc::FileStream* output_file_stream = NULL;
-
- if (video_input_filename_.empty() && video_output_filename_.empty())
- return NULL;
-
- if (!video_input_filename_.empty()) {
- input_file_stream = rtc::Filesystem::OpenFile(
- rtc::Pathname(video_input_filename_), "rb");
- if (!input_file_stream) {
- LOG(LS_ERROR) << "Not able to open the input video stream file.";
- return NULL;
- }
- }
-
- if (!video_output_filename_.empty()) {
- output_file_stream = rtc::Filesystem::OpenFile(
- rtc::Pathname(video_output_filename_), "wb");
- if (!output_file_stream) {
- delete input_file_stream;
- LOG(LS_ERROR) << "Not able to open the output video stream file.";
- return NULL;
- }
- }
-
- FileVideoChannel* channel = new FileVideoChannel(
- input_file_stream, output_file_stream, rtp_sender_thread_);
- channel->SetOptions(options);
- return channel;
-}
-
-///////////////////////////////////////////////////////////////////////////
-// Definition of RtpSenderReceiver.
-///////////////////////////////////////////////////////////////////////////
-class RtpSenderReceiver : public rtc::MessageHandler {
- public:
- RtpSenderReceiver(MediaChannel* channel,
- rtc::StreamInterface* input_file_stream,
- rtc::StreamInterface* output_file_stream,
- rtc::Thread* sender_thread);
- virtual ~RtpSenderReceiver();
-
- // Called by media channel. Context: media channel thread.
- bool SetSend(bool send);
- void SetSendSsrc(uint32 ssrc);
- void OnPacketReceived(rtc::Buffer* packet);
-
- // Override virtual method of parent MessageHandler. Context: Worker Thread.
- virtual void OnMessage(rtc::Message* pmsg);
-
- private:
- // Read the next RTP dump packet, whose RTP SSRC is the same as first_ssrc_.
- // Return true if successful.
- bool ReadNextPacket(RtpDumpPacket* packet);
- // Send a RTP packet to the network. The input parameter data points to the
- // start of the RTP packet and len is the packet size. Return true if the sent
- // size is equal to len.
- bool SendRtpPacket(const void* data, size_t len);
-
- MediaChannel* media_channel_;
- rtc::scoped_ptr<rtc::StreamInterface> input_stream_;
- rtc::scoped_ptr<rtc::StreamInterface> output_stream_;
- rtc::scoped_ptr<RtpDumpLoopReader> rtp_dump_reader_;
- rtc::scoped_ptr<RtpDumpWriter> rtp_dump_writer_;
- rtc::Thread* sender_thread_;
- bool own_sender_thread_;
- // RTP dump packet read from the input stream.
- RtpDumpPacket rtp_dump_packet_;
- uint32 start_send_time_;
- bool sending_;
- bool first_packet_;
- uint32 first_ssrc_;
-
- DISALLOW_COPY_AND_ASSIGN(RtpSenderReceiver);
-};
-
-///////////////////////////////////////////////////////////////////////////
-// Implementation of RtpSenderReceiver.
-///////////////////////////////////////////////////////////////////////////
-RtpSenderReceiver::RtpSenderReceiver(
- MediaChannel* channel,
- rtc::StreamInterface* input_file_stream,
- rtc::StreamInterface* output_file_stream,
- rtc::Thread* sender_thread)
- : media_channel_(channel),
- input_stream_(input_file_stream),
- output_stream_(output_file_stream),
- sending_(false),
- first_packet_(true) {
- if (sender_thread == NULL) {
- sender_thread_ = new rtc::Thread();
- own_sender_thread_ = true;
- } else {
- sender_thread_ = sender_thread;
- own_sender_thread_ = false;
- }
-
- if (input_stream_) {
- rtp_dump_reader_.reset(new RtpDumpLoopReader(input_stream_.get()));
- // Start the sender thread, which reads rtp dump records, waits based on
- // the record timestamps, and sends the RTP packets to the network.
- if (own_sender_thread_) {
- sender_thread_->Start();
- }
- }
-
- // Create a rtp dump writer for the output RTP dump stream.
- if (output_stream_) {
- rtp_dump_writer_.reset(new RtpDumpWriter(output_stream_.get()));
- }
-}
-
-RtpSenderReceiver::~RtpSenderReceiver() {
- if (own_sender_thread_) {
- sender_thread_->Stop();
- delete sender_thread_;
- }
-}
-
-bool RtpSenderReceiver::SetSend(bool send) {
- bool was_sending = sending_;
- sending_ = send;
- if (!was_sending && sending_) {
- sender_thread_->PostDelayed(0, this); // Wake up the send thread.
- start_send_time_ = rtc::Time();
- }
- return true;
-}
-
-void RtpSenderReceiver::SetSendSsrc(uint32 ssrc) {
- if (rtp_dump_reader_) {
- rtp_dump_reader_->SetSsrc(ssrc);
- }
-}
-
-void RtpSenderReceiver::OnPacketReceived(rtc::Buffer* packet) {
- if (rtp_dump_writer_) {
- rtp_dump_writer_->WriteRtpPacket(packet->data(), packet->size());
- }
-}
-
-void RtpSenderReceiver::OnMessage(rtc::Message* pmsg) {
- if (!sending_) {
- // If the sender thread is not sending, ignore this message. The thread goes
- // to sleep until SetSend(true) wakes it up.
- return;
- }
- if (!first_packet_) {
- // Send the previously read packet.
- SendRtpPacket(&rtp_dump_packet_.data[0], rtp_dump_packet_.data.size());
- }
-
- if (ReadNextPacket(&rtp_dump_packet_)) {
- int wait = rtc::TimeUntil(
- start_send_time_ + rtp_dump_packet_.elapsed_time);
- wait = std::max(0, wait);
- sender_thread_->PostDelayed(wait, this);
- } else {
- sender_thread_->Quit();
- }
-}
-
-bool RtpSenderReceiver::ReadNextPacket(RtpDumpPacket* packet) {
- while (rtc::SR_SUCCESS == rtp_dump_reader_->ReadPacket(packet)) {
- uint32 ssrc;
- if (!packet->GetRtpSsrc(&ssrc)) {
- return false;
- }
- if (first_packet_) {
- first_packet_ = false;
- first_ssrc_ = ssrc;
- }
- if (ssrc == first_ssrc_) {
- return true;
- }
- }
- return false;
-}
-
-bool RtpSenderReceiver::SendRtpPacket(const void* data, size_t len) {
- if (!media_channel_)
- return false;
-
- rtc::Buffer packet(reinterpret_cast<const uint8_t*>(data), len,
- kMaxRtpPacketLen);
- return media_channel_->SendPacket(&packet);
-}
-
-///////////////////////////////////////////////////////////////////////////
-// Implementation of FileVoiceChannel.
-///////////////////////////////////////////////////////////////////////////
-FileVoiceChannel::FileVoiceChannel(
- rtc::StreamInterface* input_file_stream,
- rtc::StreamInterface* output_file_stream,
- rtc::Thread* rtp_sender_thread)
- : send_ssrc_(0),
- rtp_sender_receiver_(new RtpSenderReceiver(this, input_file_stream,
- output_file_stream,
- rtp_sender_thread)) {}
-
-FileVoiceChannel::~FileVoiceChannel() {}
-
-bool FileVoiceChannel::SetSendCodecs(const std::vector<AudioCodec>& codecs) {
- // TODO(whyuan): Check the format of RTP dump input.
- return true;
-}
-
-bool FileVoiceChannel::SetSend(SendFlags flag) {
- return rtp_sender_receiver_->SetSend(flag != SEND_NOTHING);
-}
-
-bool FileVoiceChannel::AddSendStream(const StreamParams& sp) {
- if (send_ssrc_ != 0 || sp.ssrcs.size() != 1) {
- LOG(LS_ERROR) << "FileVoiceChannel only supports one send stream.";
- return false;
- }
- send_ssrc_ = sp.ssrcs[0];
- rtp_sender_receiver_->SetSendSsrc(send_ssrc_);
- return true;
-}
-
-bool FileVoiceChannel::RemoveSendStream(uint32 ssrc) {
- if (ssrc != send_ssrc_)
- return false;
- send_ssrc_ = 0;
- rtp_sender_receiver_->SetSendSsrc(send_ssrc_);
- return true;
-}
-
-void FileVoiceChannel::OnPacketReceived(
- rtc::Buffer* packet, const rtc::PacketTime& packet_time) {
- rtp_sender_receiver_->OnPacketReceived(packet);
-}
-
-///////////////////////////////////////////////////////////////////////////
-// Implementation of FileVideoChannel.
-///////////////////////////////////////////////////////////////////////////
-FileVideoChannel::FileVideoChannel(
- rtc::StreamInterface* input_file_stream,
- rtc::StreamInterface* output_file_stream,
- rtc::Thread* rtp_sender_thread)
- : send_ssrc_(0),
- rtp_sender_receiver_(new RtpSenderReceiver(this, input_file_stream,
- output_file_stream,
- rtp_sender_thread)) {}
-
-FileVideoChannel::~FileVideoChannel() {}
-
-bool FileVideoChannel::SetSendCodecs(const std::vector<VideoCodec>& codecs) {
- // TODO(whyuan): Check the format of RTP dump input.
- return true;
-}
-
-bool FileVideoChannel::SetSend(bool send) {
- return rtp_sender_receiver_->SetSend(send);
-}
-
-bool FileVideoChannel::AddSendStream(const StreamParams& sp) {
- if (send_ssrc_ != 0 || sp.ssrcs.size() != 1) {
- LOG(LS_ERROR) << "FileVideoChannel only support one send stream.";
- return false;
- }
- send_ssrc_ = sp.ssrcs[0];
- rtp_sender_receiver_->SetSendSsrc(send_ssrc_);
- return true;
-}
-
-bool FileVideoChannel::RemoveSendStream(uint32 ssrc) {
- if (ssrc != send_ssrc_)
- return false;
- send_ssrc_ = 0;
- rtp_sender_receiver_->SetSendSsrc(send_ssrc_);
- return true;
-}
-
-void FileVideoChannel::OnPacketReceived(
- rtc::Buffer* packet, const rtc::PacketTime& packet_time) {
- rtp_sender_receiver_->OnPacketReceived(packet);
-}
-
-} // namespace cricket
diff --git a/chromium/third_party/libjingle/source/talk/media/base/filemediaengine.h b/chromium/third_party/libjingle/source/talk/media/base/filemediaengine.h
deleted file mode 100644
index c2e80c383e6..00000000000
--- a/chromium/third_party/libjingle/source/talk/media/base/filemediaengine.h
+++ /dev/null
@@ -1,330 +0,0 @@
-/*
- * libjingle
- * Copyright 2004 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef TALK_MEDIA_BASE_FILEMEDIAENGINE_H_
-#define TALK_MEDIA_BASE_FILEMEDIAENGINE_H_
-
-#include <string>
-#include <vector>
-
-#include "talk/media/base/codec.h"
-#include "talk/media/base/mediachannel.h"
-#include "talk/media/base/mediaengine.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/stream.h"
-
-namespace rtc {
-class StreamInterface;
-}
-
-namespace cricket {
-
-// A media engine contains a capturer, an encoder, and a sender in the sender
-// side and a receiver, a decoder, and a renderer in the receiver side.
-// FileMediaEngine simulates the capturer and the encoder via an input RTP dump
-// stream and simulates the decoder and the renderer via an output RTP dump
-// stream. Depending on the parameters of the constructor, FileMediaEngine can
-// act as file voice engine, file video engine, or both. Currently, we use
-// only the RTP dump packets. TODO(whyuan): Enable RTCP packets.
-class FileMediaEngine : public MediaEngineInterface {
- public:
- FileMediaEngine() : rtp_sender_thread_(NULL) {}
- virtual ~FileMediaEngine() {}
-
- // Set the file name of the input or output RTP dump for voice or video.
- // Should be called before the channel is created.
- void set_voice_input_filename(const std::string& filename) {
- voice_input_filename_ = filename;
- }
- void set_voice_output_filename(const std::string& filename) {
- voice_output_filename_ = filename;
- }
- void set_video_input_filename(const std::string& filename) {
- video_input_filename_ = filename;
- }
- void set_video_output_filename(const std::string& filename) {
- video_output_filename_ = filename;
- }
-
- // Should be called before codecs() and video_codecs() are called. We need to
- // set the voice and video codecs; otherwise, Jingle initiation will fail.
- void set_voice_codecs(const std::vector<AudioCodec>& codecs) {
- voice_codecs_ = codecs;
- }
- void set_video_codecs(const std::vector<VideoCodec>& codecs) {
- video_codecs_ = codecs;
- }
-
- // Implement pure virtual methods of MediaEngine.
- virtual bool Init(rtc::Thread* worker_thread) {
- return true;
- }
- virtual void Terminate() {}
- virtual int GetCapabilities();
- virtual VoiceMediaChannel* CreateChannel();
- virtual VideoMediaChannel* CreateVideoChannel(const VideoOptions& options,
- VoiceMediaChannel* voice_ch);
- virtual SoundclipMedia* CreateSoundclip() { return NULL; }
- virtual AudioOptions GetAudioOptions() const { return AudioOptions(); }
- virtual bool SetAudioOptions(const AudioOptions& options) { return true; }
- virtual bool SetAudioDelayOffset(int offset) { return true; }
- virtual bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config) {
- return true;
- }
- virtual bool SetSoundDevices(const Device* in_dev, const Device* out_dev) {
- return true;
- }
- virtual bool SetVideoCaptureDevice(const Device* cam_device) { return true; }
- virtual bool SetVideoCapturer(VideoCapturer* /*capturer*/) {
- return true;
- }
- virtual VideoCapturer* GetVideoCapturer() const {
- return NULL;
- }
- virtual bool GetOutputVolume(int* level) {
- *level = 0;
- return true;
- }
- virtual bool SetOutputVolume(int level) { return true; }
- virtual int GetInputLevel() { return 0; }
- virtual bool SetLocalMonitor(bool enable) { return true; }
- // TODO(whyuan): control channel send?
- virtual bool SetVideoCapture(bool capture) { return true; }
- virtual const std::vector<AudioCodec>& audio_codecs() {
- return voice_codecs_;
- }
- virtual const std::vector<VideoCodec>& video_codecs() {
- return video_codecs_;
- }
- virtual const std::vector<RtpHeaderExtension>& audio_rtp_header_extensions() {
- return audio_rtp_header_extensions_;
- }
- virtual const std::vector<RtpHeaderExtension>& video_rtp_header_extensions() {
- return video_rtp_header_extensions_;
- }
-
- virtual bool FindAudioCodec(const AudioCodec& codec) { return true; }
- virtual bool FindVideoCodec(const VideoCodec& codec) { return true; }
- virtual void SetVoiceLogging(int min_sev, const char* filter) {}
- virtual void SetVideoLogging(int min_sev, const char* filter) {}
- virtual bool StartAecDump(rtc::PlatformFile) { return false; }
-
- virtual bool RegisterVideoProcessor(VideoProcessor* processor) {
- return true;
- }
- virtual bool UnregisterVideoProcessor(VideoProcessor* processor) {
- return true;
- }
- virtual bool RegisterVoiceProcessor(uint32 ssrc,
- VoiceProcessor* processor,
- MediaProcessorDirection direction) {
- return true;
- }
- virtual bool UnregisterVoiceProcessor(uint32 ssrc,
- VoiceProcessor* processor,
- MediaProcessorDirection direction) {
- return true;
- }
-
- virtual sigslot::repeater2<VideoCapturer*, CaptureState>&
- SignalVideoCaptureStateChange() {
- return signal_state_change_;
- }
-
- void set_rtp_sender_thread(rtc::Thread* thread) {
- rtp_sender_thread_ = thread;
- }
-
- private:
- std::string voice_input_filename_;
- std::string voice_output_filename_;
- std::string video_input_filename_;
- std::string video_output_filename_;
- std::vector<AudioCodec> voice_codecs_;
- std::vector<VideoCodec> video_codecs_;
- std::vector<RtpHeaderExtension> audio_rtp_header_extensions_;
- std::vector<RtpHeaderExtension> video_rtp_header_extensions_;
- sigslot::repeater2<VideoCapturer*, CaptureState>
- signal_state_change_;
- rtc::Thread* rtp_sender_thread_;
-
- DISALLOW_COPY_AND_ASSIGN(FileMediaEngine);
-};
-
-class RtpSenderReceiver; // Forward declaration. Defined in the .cc file.
-
-class FileVoiceChannel : public VoiceMediaChannel {
- public:
- FileVoiceChannel(rtc::StreamInterface* input_file_stream,
- rtc::StreamInterface* output_file_stream,
- rtc::Thread* rtp_sender_thread);
- virtual ~FileVoiceChannel();
-
- // Implement pure virtual methods of VoiceMediaChannel.
- virtual bool SetRecvCodecs(const std::vector<AudioCodec>& codecs) {
- return true;
- }
- virtual bool SetSendCodecs(const std::vector<AudioCodec>& codecs);
- virtual bool SetRecvRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions) {
- return true;
- }
- virtual bool SetSendRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions) {
- return true;
- }
- virtual bool SetPlayout(bool playout) { return true; }
- virtual bool SetSend(SendFlags flag);
- virtual bool SetRemoteRenderer(uint32 ssrc, AudioRenderer* renderer) {
- return false;
- }
- virtual bool SetLocalRenderer(uint32 ssrc, AudioRenderer* renderer) {
- return false;
- }
- virtual bool GetActiveStreams(AudioInfo::StreamList* actives) { return true; }
- virtual int GetOutputLevel() { return 0; }
- virtual int GetTimeSinceLastTyping() { return -1; }
- virtual void SetTypingDetectionParameters(int time_window,
- int cost_per_typing, int reporting_threshold, int penalty_decay,
- int type_event_delay) {}
-
- virtual bool SetOutputScaling(uint32 ssrc, double left, double right) {
- return false;
- }
- virtual bool GetOutputScaling(uint32 ssrc, double* left, double* right) {
- return false;
- }
- virtual bool SetRingbackTone(const char* buf, int len) { return true; }
- virtual bool PlayRingbackTone(uint32 ssrc, bool play, bool loop) {
- return true;
- }
- virtual bool InsertDtmf(uint32 ssrc, int event, int duration, int flags) {
- return false;
- }
- virtual bool GetStats(VoiceMediaInfo* info) { return true; }
-
- // Implement pure virtual methods of MediaChannel.
- virtual void OnPacketReceived(rtc::Buffer* packet,
- const rtc::PacketTime& packet_time);
- virtual void OnRtcpReceived(rtc::Buffer* packet,
- const rtc::PacketTime& packet_time) {}
- virtual void OnReadyToSend(bool ready) {}
- virtual bool AddSendStream(const StreamParams& sp);
- virtual bool RemoveSendStream(uint32 ssrc);
- virtual bool AddRecvStream(const StreamParams& sp) { return true; }
- virtual bool RemoveRecvStream(uint32 ssrc) { return true; }
- virtual bool MuteStream(uint32 ssrc, bool on) { return false; }
- virtual bool SetMaxSendBandwidth(int bps) { return true; }
- virtual bool SetOptions(const AudioOptions& options) {
- options_ = options;
- return true;
- }
- virtual bool GetOptions(AudioOptions* options) const {
- *options = options_;
- return true;
- }
-
- private:
- uint32 send_ssrc_;
- rtc::scoped_ptr<RtpSenderReceiver> rtp_sender_receiver_;
- AudioOptions options_;
-
- DISALLOW_COPY_AND_ASSIGN(FileVoiceChannel);
-};
-
-class FileVideoChannel : public VideoMediaChannel {
- public:
- FileVideoChannel(rtc::StreamInterface* input_file_stream,
- rtc::StreamInterface* output_file_stream,
- rtc::Thread* rtp_sender_thread);
- virtual ~FileVideoChannel();
- // Implement pure virtual methods of VideoMediaChannel.
- void DetachVoiceChannel() override {}
- virtual bool SetRecvCodecs(const std::vector<VideoCodec>& codecs) {
- return true;
- }
- virtual bool SetSendCodecs(const std::vector<VideoCodec>& codecs);
- virtual bool GetSendCodec(VideoCodec* send_codec) {
- *send_codec = VideoCodec();
- return true;
- }
- virtual bool SetSendStreamFormat(uint32 ssrc, const VideoFormat& format) {
- return true;
- }
- virtual bool SetRecvRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions) {
- return true;
- }
- virtual bool SetSendRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions) {
- return true;
- }
- virtual bool SetRender(bool render) { return true; }
- virtual bool SetSend(bool send);
- virtual bool SetRenderer(uint32 ssrc, VideoRenderer* renderer) {
- return true;
- }
- virtual bool SetCapturer(uint32 ssrc, VideoCapturer* capturer) {
- return false;
- }
- virtual bool GetStats(VideoMediaInfo* info) { return true; }
- virtual bool SendIntraFrame() { return false; }
- virtual bool RequestIntraFrame() { return false; }
-
- // Implement pure virtual methods of MediaChannel.
- virtual void OnPacketReceived(rtc::Buffer* packet,
- const rtc::PacketTime& packet_time);
- virtual void OnRtcpReceived(rtc::Buffer* packet,
- const rtc::PacketTime& packet_time) {}
- virtual void OnReadyToSend(bool ready) {}
- virtual bool AddSendStream(const StreamParams& sp);
- virtual bool RemoveSendStream(uint32 ssrc);
- virtual bool AddRecvStream(const StreamParams& sp) { return true; }
- virtual bool RemoveRecvStream(uint32 ssrc) { return true; }
- virtual bool MuteStream(uint32 ssrc, bool on) { return false; }
- virtual bool SetMaxSendBandwidth(int bps) { return true; }
- virtual bool SetOptions(const VideoOptions& options) {
- options_ = options;
- return true;
- }
- virtual bool GetOptions(VideoOptions* options) const {
- *options = options_;
- return true;
- }
- virtual void UpdateAspectRatio(int ratio_w, int ratio_h) {}
-
- private:
- uint32 send_ssrc_;
- rtc::scoped_ptr<RtpSenderReceiver> rtp_sender_receiver_;
- VideoOptions options_;
-
- DISALLOW_COPY_AND_ASSIGN(FileVideoChannel);
-};
-
-} // namespace cricket
-
-#endif // TALK_MEDIA_BASE_FILEMEDIAENGINE_H_
diff --git a/chromium/third_party/libjingle/source/talk/media/base/filemediaengine_unittest.cc b/chromium/third_party/libjingle/source/talk/media/base/filemediaengine_unittest.cc
deleted file mode 100644
index 43c2c841076..00000000000
--- a/chromium/third_party/libjingle/source/talk/media/base/filemediaengine_unittest.cc
+++ /dev/null
@@ -1,459 +0,0 @@
-/*
- * libjingle
- * Copyright 2004 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include <set>
-
-#include "talk/media/base/filemediaengine.h"
-#include "talk/media/base/rtpdump.h"
-#include "talk/media/base/streamparams.h"
-#include "talk/media/base/testutils.h"
-#include "webrtc/base/buffer.h"
-#include "webrtc/base/gunit.h"
-#include "webrtc/base/helpers.h"
-#include "webrtc/base/pathutils.h"
-#include "webrtc/base/stream.h"
-
-namespace cricket {
-
-static const int kWaitTimeMs = 100;
-static const std::string kFakeFileName = "foobar";
-
-//////////////////////////////////////////////////////////////////////////////
-// Media channel sends RTP packets via NetworkInterface. Rather than sending
-// packets to the network, FileNetworkInterface writes packets to a stream and
-// feeds packets back to the channel via OnPacketReceived.
-//////////////////////////////////////////////////////////////////////////////
-class FileNetworkInterface : public MediaChannel::NetworkInterface {
- public:
- FileNetworkInterface(rtc::StreamInterface* output, MediaChannel* ch)
- : media_channel_(ch),
- num_sent_packets_(0) {
- if (output) {
- dump_writer_.reset(new RtpDumpWriter(output));
- }
- }
-
- // Implement pure virtual methods of NetworkInterface.
- virtual bool SendPacket(rtc::Buffer* packet,
- rtc::DiffServCodePoint dscp) {
- if (!packet) return false;
-
- if (media_channel_) {
- media_channel_->OnPacketReceived(packet, rtc::PacketTime());
- }
- if (dump_writer_.get() &&
- rtc::SR_SUCCESS !=
- dump_writer_->WriteRtpPacket(packet->data(), packet->size())) {
- return false;
- }
-
- ++num_sent_packets_;
- return true;
- }
-
- virtual bool SendRtcp(rtc::Buffer* packet,
- rtc::DiffServCodePoint dscp) { return false; }
- virtual int SetOption(MediaChannel::NetworkInterface::SocketType type,
- rtc::Socket::Option opt, int option) {
- return 0;
- }
- virtual void SetDefaultDSCPCode(rtc::DiffServCodePoint dscp) {}
-
- size_t num_sent_packets() const { return num_sent_packets_; }
-
- private:
- MediaChannel* media_channel_;
- rtc::scoped_ptr<RtpDumpWriter> dump_writer_;
- size_t num_sent_packets_;
-
- DISALLOW_COPY_AND_ASSIGN(FileNetworkInterface);
-};
-
-class FileMediaEngineTest : public testing::Test {
- public:
- virtual void SetUp() {
- ASSERT_TRUE(GetTempFilename(&voice_input_filename_));
- ASSERT_TRUE(GetTempFilename(&voice_output_filename_));
- ASSERT_TRUE(GetTempFilename(&video_input_filename_));
- ASSERT_TRUE(GetTempFilename(&video_output_filename_));
- }
- virtual void TearDown() {
- // Force to close the dump files, if opened.
- voice_channel_.reset();
- video_channel_.reset();
-
- DeleteTempFile(voice_input_filename_);
- DeleteTempFile(voice_output_filename_);
- DeleteTempFile(video_input_filename_);
- DeleteTempFile(video_output_filename_);
- }
-
- protected:
- void CreateEngineAndChannels(const std::string& voice_in,
- const std::string& voice_out,
- const std::string& video_in,
- const std::string& video_out,
- size_t ssrc_count) {
- // Force to close the dump files, if opened.
- voice_channel_.reset();
- video_channel_.reset();
-
- if (!voice_in.empty()) {
- EXPECT_TRUE(WriteTestPacketsToFile(voice_in, ssrc_count));
- }
- if (!video_in.empty()) {
- EXPECT_TRUE(WriteTestPacketsToFile(video_in, ssrc_count));
- }
-
- engine_.reset(new FileMediaEngine);
- engine_->set_voice_input_filename(voice_in);
- engine_->set_voice_output_filename(voice_out);
- engine_->set_video_input_filename(video_in);
- engine_->set_video_output_filename(video_out);
- engine_->set_rtp_sender_thread(rtc::Thread::Current());
-
- voice_channel_.reset(engine_->CreateChannel());
- video_channel_.reset(engine_->CreateVideoChannel(VideoOptions(), NULL));
- }
-
- bool GetTempFilename(std::string* filename) {
- rtc::Pathname temp_path;
- if (!rtc::Filesystem::GetTemporaryFolder(temp_path, true, NULL)) {
- return false;
- }
- temp_path.SetPathname(
- rtc::Filesystem::TempFilename(temp_path, "fme-test-"));
-
- if (filename) {
- *filename = temp_path.pathname();
- }
- return true;
- }
-
- bool WriteTestPacketsToFile(const std::string& filename, size_t ssrc_count) {
- rtc::scoped_ptr<rtc::StreamInterface> stream(
- rtc::Filesystem::OpenFile(rtc::Pathname(filename), "wb"));
- bool ret = (NULL != stream.get());
- RtpDumpWriter writer(stream.get());
-
- for (size_t i = 0; i < ssrc_count; ++i) {
- ret &= RtpTestUtility::WriteTestPackets(
- RtpTestUtility::GetTestPacketCount(), false,
- static_cast<uint32>(RtpTestUtility::kDefaultSsrc + i),
- &writer);
- }
- return ret;
- }
-
- void DeleteTempFile(std::string filename) {
- rtc::Pathname pathname(filename);
- if (rtc::Filesystem::IsFile(rtc::Pathname(pathname))) {
- rtc::Filesystem::DeleteFile(pathname);
- }
- }
-
- bool GetSsrcAndPacketCounts(rtc::StreamInterface* stream,
- size_t* ssrc_count, size_t* packet_count) {
- rtc::scoped_ptr<RtpDumpReader> reader(new RtpDumpReader(stream));
- size_t count = 0;
- RtpDumpPacket packet;
- std::set<uint32> ssrcs;
- while (rtc::SR_SUCCESS == reader->ReadPacket(&packet)) {
- count++;
- uint32 ssrc;
- if (!packet.GetRtpSsrc(&ssrc)) {
- return false;
- }
- ssrcs.insert(ssrc);
- }
- if (ssrc_count) {
- *ssrc_count = ssrcs.size();
- }
- if (packet_count) {
- *packet_count = count;
- }
- return true;
- }
-
- static const uint32 kWaitTimeout = 3000;
- std::string voice_input_filename_;
- std::string voice_output_filename_;
- std::string video_input_filename_;
- std::string video_output_filename_;
- rtc::scoped_ptr<FileMediaEngine> engine_;
- rtc::scoped_ptr<VoiceMediaChannel> voice_channel_;
- rtc::scoped_ptr<VideoMediaChannel> video_channel_;
-};
-
-TEST_F(FileMediaEngineTest, TestDefaultImplementation) {
- CreateEngineAndChannels("", "", "", "", 1);
- EXPECT_TRUE(engine_->Init(rtc::Thread::Current()));
- EXPECT_EQ(0, engine_->GetCapabilities());
- EXPECT_TRUE(NULL == voice_channel_.get());
- EXPECT_TRUE(NULL == video_channel_.get());
- EXPECT_TRUE(NULL == engine_->CreateSoundclip());
- cricket::AudioOptions audio_options;
- EXPECT_TRUE(engine_->SetAudioOptions(audio_options));
- VideoEncoderConfig video_encoder_config;
- EXPECT_TRUE(engine_->SetDefaultVideoEncoderConfig(video_encoder_config));
- EXPECT_TRUE(engine_->SetSoundDevices(NULL, NULL));
- EXPECT_TRUE(engine_->SetVideoCaptureDevice(NULL));
- EXPECT_TRUE(engine_->SetOutputVolume(0));
- EXPECT_EQ(0, engine_->GetInputLevel());
- EXPECT_TRUE(engine_->SetLocalMonitor(true));
- EXPECT_TRUE(engine_->SetVideoCapture(true));
- EXPECT_EQ(0U, engine_->audio_codecs().size());
- EXPECT_EQ(0U, engine_->video_codecs().size());
- AudioCodec voice_codec;
- EXPECT_TRUE(engine_->FindAudioCodec(voice_codec));
- VideoCodec video_codec;
- EXPECT_TRUE(engine_->FindVideoCodec(video_codec));
- engine_->Terminate();
-}
-
-// Test that when file path is not pointing to a valid stream file, the channel
-// creation function should fail and return NULL.
-TEST_F(FileMediaEngineTest, TestBadFilePath) {
- engine_.reset(new FileMediaEngine);
- engine_->set_voice_input_filename(kFakeFileName);
- engine_->set_video_input_filename(kFakeFileName);
- EXPECT_TRUE(engine_->CreateChannel() == NULL);
- EXPECT_TRUE(engine_->CreateVideoChannel(VideoOptions(), NULL) == NULL);
-}
-
-TEST_F(FileMediaEngineTest, TestCodecs) {
- CreateEngineAndChannels("", "", "", "", 1);
- std::vector<AudioCodec> voice_codecs = engine_->audio_codecs();
- std::vector<VideoCodec> video_codecs = engine_->video_codecs();
- EXPECT_EQ(0U, voice_codecs.size());
- EXPECT_EQ(0U, video_codecs.size());
-
- AudioCodec voice_codec(103, "ISAC", 16000, 0, 1, 0);
- voice_codecs.push_back(voice_codec);
- engine_->set_voice_codecs(voice_codecs);
- voice_codecs = engine_->audio_codecs();
- ASSERT_EQ(1U, voice_codecs.size());
- EXPECT_EQ(voice_codec, voice_codecs[0]);
-
- VideoCodec video_codec(96, "H264-SVC", 320, 240, 30, 0);
- video_codecs.push_back(video_codec);
- engine_->set_video_codecs(video_codecs);
- video_codecs = engine_->video_codecs();
- ASSERT_EQ(1U, video_codecs.size());
- EXPECT_EQ(video_codec, video_codecs[0]);
-}
-
-// Test that the capabilities and channel creation of the Filemedia engine
-// depend on the stream parameters passed to its constructor.
-TEST_F(FileMediaEngineTest, TestGetCapabilities) {
- CreateEngineAndChannels(voice_input_filename_, "", "", "", 1);
- EXPECT_EQ(AUDIO_SEND, engine_->GetCapabilities());
- EXPECT_TRUE(NULL != voice_channel_.get());
- EXPECT_TRUE(NULL == video_channel_.get());
-
- CreateEngineAndChannels(voice_input_filename_, voice_output_filename_, "", "",
- 1);
- EXPECT_EQ(AUDIO_SEND | AUDIO_RECV, engine_->GetCapabilities());
- EXPECT_TRUE(NULL != voice_channel_.get());
- EXPECT_TRUE(NULL == video_channel_.get());
-
- CreateEngineAndChannels("", "", video_input_filename_, "", 1);
- EXPECT_EQ(VIDEO_SEND, engine_->GetCapabilities());
- EXPECT_TRUE(NULL == voice_channel_.get());
- EXPECT_TRUE(NULL != video_channel_.get());
-
- CreateEngineAndChannels(voice_input_filename_, voice_output_filename_,
- video_input_filename_, video_output_filename_, 1);
- EXPECT_EQ(AUDIO_SEND | AUDIO_RECV | VIDEO_SEND | VIDEO_RECV,
- engine_->GetCapabilities());
- EXPECT_TRUE(NULL != voice_channel_.get());
- EXPECT_TRUE(NULL != video_channel_.get());
-}
-
-// FileVideoChannel is the same as FileVoiceChannel in terms of receiving and
-// sending the RTP packets. We therefore test only FileVoiceChannel.
-
-// Test that SetSend() controls whether a voice channel sends RTP packets.
-TEST_F(FileMediaEngineTest, TestVoiceChannelSetSend) {
- CreateEngineAndChannels(voice_input_filename_, voice_output_filename_, "", "",
- 1);
- EXPECT_TRUE(NULL != voice_channel_.get());
- rtc::MemoryStream net_dump;
- FileNetworkInterface net_interface(&net_dump, voice_channel_.get());
- voice_channel_->SetInterface(&net_interface);
-
- // The channel is not sending yet.
- rtc::Thread::Current()->ProcessMessages(kWaitTimeMs);
- EXPECT_EQ(0U, net_interface.num_sent_packets());
-
- // The channel starts sending.
- voice_channel_->SetSend(SEND_MICROPHONE);
- EXPECT_TRUE_WAIT(net_interface.num_sent_packets() >= 1U, kWaitTimeout);
-
- // The channel stops sending.
- voice_channel_->SetSend(SEND_NOTHING);
- // Wait until packets are all delivered.
- rtc::Thread::Current()->ProcessMessages(kWaitTimeMs);
- size_t old_number = net_interface.num_sent_packets();
- rtc::Thread::Current()->ProcessMessages(kWaitTimeMs);
- EXPECT_EQ(old_number, net_interface.num_sent_packets());
-
- // The channel starts sending again.
- voice_channel_->SetSend(SEND_MICROPHONE);
- EXPECT_TRUE_WAIT(net_interface.num_sent_packets() > old_number, kWaitTimeout);
-
- // When the function exits, the net_interface object is released. The sender
- // thread may call net_interface to send packets, which results in a segment
- // fault. We hence stop sending and wait until all packets are delivered
- // before we exit this function.
- voice_channel_->SetSend(SEND_NOTHING);
- rtc::Thread::Current()->ProcessMessages(kWaitTimeMs);
-}
-
-// Test the sender thread of the channel. The sender sends RTP packets
-// continuously with proper sequence number, timestamp, and payload.
-TEST_F(FileMediaEngineTest, TestVoiceChannelSenderThread) {
- CreateEngineAndChannels(voice_input_filename_, voice_output_filename_, "", "",
- 1);
- EXPECT_TRUE(NULL != voice_channel_.get());
- rtc::MemoryStream net_dump;
- FileNetworkInterface net_interface(&net_dump, voice_channel_.get());
- voice_channel_->SetInterface(&net_interface);
-
- voice_channel_->SetSend(SEND_MICROPHONE);
- // Wait until the number of sent packets is no less than 2 * kPacketNumber.
- EXPECT_TRUE_WAIT(
- net_interface.num_sent_packets() >=
- 2 * RtpTestUtility::GetTestPacketCount(),
- kWaitTimeout);
- voice_channel_->SetSend(SEND_NOTHING);
- // Wait until packets are all delivered.
- rtc::Thread::Current()->ProcessMessages(kWaitTimeMs);
- EXPECT_TRUE(RtpTestUtility::VerifyTestPacketsFromStream(
- 2 * RtpTestUtility::GetTestPacketCount(), &net_dump,
- RtpTestUtility::kDefaultSsrc));
-
- // Each sent packet is dumped to net_dump and is also feed to the channel
- // via OnPacketReceived, which in turn writes the packets into voice_output_.
- // We next verify the packets in voice_output_.
- voice_channel_.reset(); // Force to close the files.
- rtc::scoped_ptr<rtc::StreamInterface> voice_output_;
- voice_output_.reset(rtc::Filesystem::OpenFile(
- rtc::Pathname(voice_output_filename_), "rb"));
- EXPECT_TRUE(voice_output_.get() != NULL);
- EXPECT_TRUE(RtpTestUtility::VerifyTestPacketsFromStream(
- 2 * RtpTestUtility::GetTestPacketCount(), voice_output_.get(),
- RtpTestUtility::kDefaultSsrc));
-}
-
-// Test that we can specify the ssrc for outgoing RTP packets.
-TEST_F(FileMediaEngineTest, TestVoiceChannelSendSsrc) {
- CreateEngineAndChannels(voice_input_filename_, voice_output_filename_, "", "",
- 1);
- EXPECT_TRUE(NULL != voice_channel_.get());
- const uint32 send_ssrc = RtpTestUtility::kDefaultSsrc + 1;
- voice_channel_->AddSendStream(StreamParams::CreateLegacy(send_ssrc));
-
- rtc::MemoryStream net_dump;
- FileNetworkInterface net_interface(&net_dump, voice_channel_.get());
- voice_channel_->SetInterface(&net_interface);
-
- voice_channel_->SetSend(SEND_MICROPHONE);
- // Wait until the number of sent packets is no less than 2 * kPacketNumber.
- EXPECT_TRUE_WAIT(
- net_interface.num_sent_packets() >=
- 2 * RtpTestUtility::GetTestPacketCount(),
- kWaitTimeout);
- voice_channel_->SetSend(SEND_NOTHING);
- // Wait until packets are all delivered.
- rtc::Thread::Current()->ProcessMessages(kWaitTimeMs);
- EXPECT_TRUE(RtpTestUtility::VerifyTestPacketsFromStream(
- 2 * RtpTestUtility::GetTestPacketCount(), &net_dump, send_ssrc));
-
- // Each sent packet is dumped to net_dump and is also feed to the channel
- // via OnPacketReceived, which in turn writes the packets into voice_output_.
- // We next verify the packets in voice_output_.
- voice_channel_.reset(); // Force to close the files.
- rtc::scoped_ptr<rtc::StreamInterface> voice_output_;
- voice_output_.reset(rtc::Filesystem::OpenFile(
- rtc::Pathname(voice_output_filename_), "rb"));
- EXPECT_TRUE(voice_output_.get() != NULL);
- EXPECT_TRUE(RtpTestUtility::VerifyTestPacketsFromStream(
- 2 * RtpTestUtility::GetTestPacketCount(), voice_output_.get(),
- send_ssrc));
-}
-
-// Test the sender thread of the channel, where the input rtpdump has two SSRCs.
-TEST_F(FileMediaEngineTest, TestVoiceChannelSenderThreadTwoSsrcs) {
- CreateEngineAndChannels(voice_input_filename_, voice_output_filename_, "", "",
- 2);
- // Verify that voice_input_filename_ contains 2 *
- // RtpTestUtility::GetTestPacketCount() packets
- // with different SSRCs.
- rtc::scoped_ptr<rtc::StreamInterface> input_stream(
- rtc::Filesystem::OpenFile(
- rtc::Pathname(voice_input_filename_), "rb"));
- ASSERT_TRUE(NULL != input_stream.get());
- size_t ssrc_count;
- size_t packet_count;
- EXPECT_TRUE(GetSsrcAndPacketCounts(input_stream.get(), &ssrc_count,
- &packet_count));
- EXPECT_EQ(2U, ssrc_count);
- EXPECT_EQ(2 * RtpTestUtility::GetTestPacketCount(), packet_count);
- input_stream.reset();
-
- // Send 2 * RtpTestUtility::GetTestPacketCount() packets and verify that all
- // these packets have the same SSRCs (that is, the packets with different
- // SSRCs are skipped by the filemediaengine).
- EXPECT_TRUE(NULL != voice_channel_.get());
- rtc::MemoryStream net_dump;
- FileNetworkInterface net_interface(&net_dump, voice_channel_.get());
- voice_channel_->SetInterface(&net_interface);
- voice_channel_->SetSend(SEND_MICROPHONE);
- EXPECT_TRUE_WAIT(
- net_interface.num_sent_packets() >=
- 2 * RtpTestUtility::GetTestPacketCount(),
- kWaitTimeout);
- voice_channel_->SetSend(SEND_NOTHING);
- // Wait until packets are all delivered.
- rtc::Thread::Current()->ProcessMessages(kWaitTimeMs);
- net_dump.Rewind();
- EXPECT_TRUE(GetSsrcAndPacketCounts(&net_dump, &ssrc_count, &packet_count));
- EXPECT_EQ(1U, ssrc_count);
- EXPECT_GE(packet_count, 2 * RtpTestUtility::GetTestPacketCount());
-}
-
-// Test SendIntraFrame() and RequestIntraFrame() of video channel.
-TEST_F(FileMediaEngineTest, TestVideoChannelIntraFrame) {
- CreateEngineAndChannels("", "", video_input_filename_, video_output_filename_,
- 1);
- EXPECT_TRUE(NULL != video_channel_.get());
- EXPECT_FALSE(video_channel_->SendIntraFrame());
- EXPECT_FALSE(video_channel_->RequestIntraFrame());
-}
-
-} // namespace cricket
diff --git a/chromium/third_party/libjingle/source/talk/media/base/mediachannel.h b/chromium/third_party/libjingle/source/talk/media/base/mediachannel.h
index d77ddbb2483..e7af7a76aa6 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/mediachannel.h
+++ b/chromium/third_party/libjingle/source/talk/media/base/mediachannel.h
@@ -152,12 +152,14 @@ struct AudioOptions {
stereo_swapping.SetFrom(change.stereo_swapping);
audio_jitter_buffer_max_packets.SetFrom(
change.audio_jitter_buffer_max_packets);
+ audio_jitter_buffer_fast_accelerate.SetFrom(
+ change.audio_jitter_buffer_fast_accelerate);
typing_detection.SetFrom(change.typing_detection);
aecm_generate_comfort_noise.SetFrom(change.aecm_generate_comfort_noise);
conference_mode.SetFrom(change.conference_mode);
adjust_agc_delta.SetFrom(change.adjust_agc_delta);
experimental_agc.SetFrom(change.experimental_agc);
- experimental_aec.SetFrom(change.experimental_aec);
+ extended_filter_aec.SetFrom(change.extended_filter_aec);
delay_agnostic_aec.SetFrom(change.delay_agnostic_aec);
experimental_ns.SetFrom(change.experimental_ns);
aec_dump.SetFrom(change.aec_dump);
@@ -183,11 +185,13 @@ struct AudioOptions {
highpass_filter == o.highpass_filter &&
stereo_swapping == o.stereo_swapping &&
audio_jitter_buffer_max_packets == o.audio_jitter_buffer_max_packets &&
+ audio_jitter_buffer_fast_accelerate ==
+ o.audio_jitter_buffer_fast_accelerate &&
typing_detection == o.typing_detection &&
aecm_generate_comfort_noise == o.aecm_generate_comfort_noise &&
conference_mode == o.conference_mode &&
experimental_agc == o.experimental_agc &&
- experimental_aec == o.experimental_aec &&
+ extended_filter_aec == o.extended_filter_aec &&
delay_agnostic_aec == o.delay_agnostic_aec &&
experimental_ns == o.experimental_ns &&
adjust_agc_delta == o.adjust_agc_delta &&
@@ -215,12 +219,14 @@ struct AudioOptions {
ost << ToStringIfSet("swap", stereo_swapping);
ost << ToStringIfSet("audio_jitter_buffer_max_packets",
audio_jitter_buffer_max_packets);
+ ost << ToStringIfSet("audio_jitter_buffer_fast_accelerate",
+ audio_jitter_buffer_fast_accelerate);
ost << ToStringIfSet("typing", typing_detection);
ost << ToStringIfSet("comfort_noise", aecm_generate_comfort_noise);
ost << ToStringIfSet("conference", conference_mode);
ost << ToStringIfSet("agc_delta", adjust_agc_delta);
ost << ToStringIfSet("experimental_agc", experimental_agc);
- ost << ToStringIfSet("experimental_aec", experimental_aec);
+ ost << ToStringIfSet("extended_filter_aec", extended_filter_aec);
ost << ToStringIfSet("delay_agnostic_aec", delay_agnostic_aec);
ost << ToStringIfSet("experimental_ns", experimental_ns);
ost << ToStringIfSet("aec_dump", aec_dump);
@@ -255,13 +261,15 @@ struct AudioOptions {
Settable<bool> stereo_swapping;
// Audio receiver jitter buffer (NetEq) max capacity in number of packets.
Settable<int> audio_jitter_buffer_max_packets;
+ // Audio receiver jitter buffer (NetEq) fast accelerate mode.
+ Settable<bool> audio_jitter_buffer_fast_accelerate;
// Audio processing to detect typing.
Settable<bool> typing_detection;
Settable<bool> aecm_generate_comfort_noise;
Settable<bool> conference_mode;
Settable<int> adjust_agc_delta;
Settable<bool> experimental_agc;
- Settable<bool> experimental_aec;
+ Settable<bool> extended_filter_aec;
Settable<bool> delay_agnostic_aec;
Settable<bool> experimental_ns;
Settable<bool> aec_dump;
@@ -446,22 +454,6 @@ struct VideoOptions {
Settable<int> screencast_min_bitrate;
};
-// A class for playing out soundclips.
-class SoundclipMedia {
- public:
- enum SoundclipFlags {
- SF_LOOP = 1,
- };
-
- virtual ~SoundclipMedia() {}
-
- // Plays a sound out to the speakers with the given audio stream. The stream
- // must be 16-bit little-endian 16 kHz PCM. If a stream is already playing
- // on this SoundclipMedia, it is stopped. If clip is NULL, nothing is played.
- // Returns whether it was successful.
- virtual bool PlaySound(const char *clip, int len, int flags) = 0;
-};
-
struct RtpHeaderExtension {
RtpHeaderExtension() : id(0) {}
RtpHeaderExtension(const std::string& u, int i) : uri(u), id(i) {}
@@ -792,14 +784,15 @@ struct VoiceReceiverInfo : public MediaReceiverInfo {
expand_rate(0),
speech_expand_rate(0),
secondary_decoded_rate(0),
+ accelerate_rate(0),
+ preemptive_expand_rate(0),
decoding_calls_to_silence_generator(0),
decoding_calls_to_neteq(0),
decoding_normal(0),
decoding_plc(0),
decoding_cng(0),
decoding_plc_cng(0),
- capture_start_ntp_time_ms(-1) {
- }
+ capture_start_ntp_time_ms(-1) {}
int ext_seqnum;
int jitter_ms;
@@ -813,6 +806,10 @@ struct VoiceReceiverInfo : public MediaReceiverInfo {
float speech_expand_rate;
// fraction of data out of secondary decoding, including FEC and RED.
float secondary_decoded_rate;
+ // Fraction of data removed through time compression.
+ float accelerate_rate;
+ // Fraction of data inserted through time stretching.
+ float preemptive_expand_rate;
int decoding_calls_to_silence_generator;
int decoding_calls_to_neteq;
int decoding_normal;
diff --git a/chromium/third_party/libjingle/source/talk/media/base/mediaengine.h b/chromium/third_party/libjingle/source/talk/media/base/mediaengine.h
index 25adcee7019..501333f94d6 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/mediaengine.h
+++ b/chromium/third_party/libjingle/source/talk/media/base/mediaengine.h
@@ -77,16 +77,13 @@ class MediaEngineInterface {
// MediaChannel creation
// Creates a voice media channel. Returns NULL on failure.
- virtual VoiceMediaChannel *CreateChannel() = 0;
+ virtual VoiceMediaChannel* CreateChannel(const AudioOptions& options) = 0;
// Creates a video media channel, paired with the specified voice channel.
// Returns NULL on failure.
virtual VideoMediaChannel* CreateVideoChannel(
const VideoOptions& options,
VoiceMediaChannel* voice_media_channel) = 0;
- // Creates a soundclip object for playing sounds on. Returns NULL on failure.
- virtual SoundclipMedia *CreateSoundclip() = 0;
-
// Configuration
// Gets global audio options.
virtual AudioOptions GetAudioOptions() const = 0;
@@ -101,7 +98,6 @@ class MediaEngineInterface {
= 0;
// Device selection
- // TODO(tschmelcher): Add method for selecting the soundclip device.
virtual bool SetSoundDevices(const Device* in_device,
const Device* out_device) = 0;
@@ -172,30 +168,23 @@ class CompositeMediaEngine : public MediaEngineInterface {
virtual bool Init(rtc::Thread* worker_thread) {
if (!voice_.Init(worker_thread))
return false;
- if (!video_.Init(worker_thread)) {
- voice_.Terminate();
- return false;
- }
+ video_.Init();
return true;
}
virtual void Terminate() {
- video_.Terminate();
voice_.Terminate();
}
virtual int GetCapabilities() {
return (voice_.GetCapabilities() | video_.GetCapabilities());
}
- virtual VoiceMediaChannel *CreateChannel() {
- return voice_.CreateChannel();
+ virtual VoiceMediaChannel* CreateChannel(const AudioOptions& options) {
+ return voice_.CreateChannel(options);
}
virtual VideoMediaChannel* CreateVideoChannel(const VideoOptions& options,
VoiceMediaChannel* channel) {
return video_.CreateChannel(options, channel);
}
- virtual SoundclipMedia *CreateSoundclip() {
- return voice_.CreateSoundclip();
- }
virtual AudioOptions GetAudioOptions() const {
return voice_.GetOptions();
@@ -276,11 +265,8 @@ class NullVoiceEngine {
void Terminate() {}
int GetCapabilities() { return 0; }
// If you need this to return an actual channel, use FakeMediaEngine instead.
- VoiceMediaChannel* CreateChannel() {
- return NULL;
- }
- SoundclipMedia* CreateSoundclip() {
- return NULL;
+ VoiceMediaChannel* CreateChannel(const AudioOptions& options) {
+ return nullptr;
}
bool SetDelayOffset(int offset) { return true; }
AudioOptions GetOptions() const { return AudioOptions(); }
diff --git a/chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h b/chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h
index 90db020abec..e4d2829e88a 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h
+++ b/chromium/third_party/libjingle/source/talk/media/base/videoengine_unittest.h
@@ -496,7 +496,7 @@ class VideoMediaChannelTest : public testing::Test,
virtual void SetUp() {
cricket::Device device("test", "device");
- EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
+ engine_.Init();
channel_.reset(engine_.CreateChannel(cricket::VideoOptions(), NULL));
EXPECT_TRUE(channel_.get() != NULL);
ConnectVideoChannelError();
@@ -551,7 +551,6 @@ class VideoMediaChannelTest : public testing::Test,
}
virtual void TearDown() {
channel_.reset();
- engine_.Terminate();
}
void ConnectVideoChannelError() {
channel_->SignalMediaError.connect(this,
diff --git a/chromium/third_party/libjingle/source/talk/media/base/videoframe.cc b/chromium/third_party/libjingle/source/talk/media/base/videoframe.cc
index 79f52fcb4e1..7e79bf698c6 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/videoframe.cc
+++ b/chromium/third_party/libjingle/source/talk/media/base/videoframe.cc
@@ -190,28 +190,6 @@ void VideoFrame::StretchToPlanes(
static_cast<int>(width), static_cast<int>(height), interpolate);
}
-size_t VideoFrame::StretchToBuffer(size_t dst_width, size_t dst_height,
- uint8* dst_buffer, size_t size,
- bool interpolate, bool vert_crop) const {
- if (!dst_buffer) {
- LOG(LS_ERROR) << "NULL dst_buffer pointer.";
- return 0;
- }
-
- size_t needed = SizeOf(dst_width, dst_height);
- if (needed <= size) {
- uint8* dst_y = dst_buffer;
- uint8* dst_u = dst_y + dst_width * dst_height;
- uint8* dst_v = dst_u + ((dst_width + 1) >> 1) * ((dst_height + 1) >> 1);
- StretchToPlanes(dst_y, dst_u, dst_v,
- static_cast<int32>(dst_width),
- static_cast<int32>((dst_width + 1) >> 1),
- static_cast<int32>((dst_width + 1) >> 1),
- dst_width, dst_height, interpolate, vert_crop);
- }
- return needed;
-}
-
void VideoFrame::StretchToFrame(VideoFrame* dst,
bool interpolate, bool vert_crop) const {
if (!dst) {
@@ -225,6 +203,8 @@ void VideoFrame::StretchToFrame(VideoFrame* dst,
interpolate, vert_crop);
dst->SetElapsedTime(GetElapsedTime());
dst->SetTimeStamp(GetTimeStamp());
+ // Stretched frame should have the same rotation as the source.
+ dst->SetRotation(GetVideoRotation());
}
VideoFrame* VideoFrame::Stretch(size_t dst_width, size_t dst_height,
diff --git a/chromium/third_party/libjingle/source/talk/media/base/videoframe.h b/chromium/third_party/libjingle/source/talk/media/base/videoframe.h
index 0aac645128c..ac53ca201ba 100644
--- a/chromium/third_party/libjingle/source/talk/media/base/videoframe.h
+++ b/chromium/third_party/libjingle/source/talk/media/base/videoframe.h
@@ -188,16 +188,6 @@ class VideoFrame {
uint8 *y, uint8 *u, uint8 *v, int32 pitchY, int32 pitchU, int32 pitchV,
size_t width, size_t height, bool interpolate, bool crop) const;
- // Writes the frame into the given frame buffer, stretched to the given width
- // and height, provided that it is of sufficient size. Returns the frame's
- // actual size, regardless of whether it was written or not (like snprintf).
- // If there is insufficient space, nothing is written. The parameter
- // "interpolate" controls whether to interpolate or just take the
- // nearest-point. The parameter "crop" controls whether to crop this frame to
- // the aspect ratio of the given dimensions before stretching.
- virtual size_t StretchToBuffer(size_t w, size_t h, uint8 *buffer, size_t size,
- bool interpolate, bool crop) const;
-
// Writes the frame into the target VideoFrame, stretched to the size of that
// frame. The parameter "interpolate" controls whether to interpolate or just
// take the nearest-point. The parameter "crop" controls whether to crop this
@@ -230,6 +220,7 @@ class VideoFrame {
size_t pixel_height,
int64_t elapsed_time,
int64_t time_stamp) const = 0;
+ virtual void SetRotation(webrtc::VideoRotation rotation) = 0;
};
} // namespace cricket
diff --git a/chromium/third_party/libjingle/source/talk/media/other/linphonemediaengine.cc b/chromium/third_party/libjingle/source/talk/media/other/linphonemediaengine.cc
deleted file mode 100644
index 06b6d76f12a..00000000000
--- a/chromium/third_party/libjingle/source/talk/media/other/linphonemediaengine.cc
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
- * libjingle
- * Copyright 2010 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef MSILBC_LIBRARY
-#define MSILBC_LIBRARY "/usr/lib/mediastreamer/plugins/libmsilbc.so"
-#endif
-
-// LinphoneMediaEngine is a Linphone implementation of MediaEngine
-extern "C" {
-#include <mediastreamer2/mediastream.h>
-#include <mediastreamer2/msfilter.h>
-#include <mediastreamer2/mssndcard.h>
-}
-
-#include "talk/media/other/linphonemediaengine.h"
-
-#include "talk/media/base/rtpdump.h"
-#include "webrtc/base/buffer.h"
-#include "webrtc/base/event.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/base/pathutils.h"
-#include "webrtc/base/stream.h"
-
-#ifndef WIN32
-#include <libgen.h>
-#endif
-
-namespace cricket {
-
-///////////////////////////////////////////////////////////////////////////
-// Implementation of LinphoneMediaEngine.
-///////////////////////////////////////////////////////////////////////////
-LinphoneMediaEngine::LinphoneMediaEngine(const std::string& ringWav, const std::string& callWav) : ring_wav_(ringWav), call_wav_(callWav) { }
-
-bool LinphoneMediaEngine::Init() {
- ortp_init();
- ms_init();
-
-#ifdef HAVE_ILBC
-#ifndef WIN32
- char * path = strdup(MSILBC_LIBRARY);
- char * dirc = dirname(path);
- ms_load_plugins(dirc);
-#endif
- if (ms_filter_codec_supported("iLBC"))
- have_ilbc = 1;
- else
- have_ilbc = 0;
-#else
- have_ilbc = 0;
-#endif
-
-#ifdef HAVE_SPEEX
- voice_codecs_.push_back(AudioCodec(110, payload_type_speex_wb.mime_type, payload_type_speex_wb.clock_rate, 0, 1, 8));
- voice_codecs_.push_back(AudioCodec(111, payload_type_speex_nb.mime_type, payload_type_speex_nb.clock_rate, 0, 1, 7));
-#endif
-
-#ifdef HAVE_ILBC
- if (have_ilbc)
- voice_codecs_.push_back(AudioCodec(102, payload_type_ilbc.mime_type, payload_type_ilbc.clock_rate, 0, 1, 4));
-#endif
-
- voice_codecs_.push_back(AudioCodec(0, payload_type_pcmu8000.mime_type, payload_type_pcmu8000.clock_rate, 0, 1, 2));
- voice_codecs_.push_back(AudioCodec(101, payload_type_telephone_event.mime_type, payload_type_telephone_event.clock_rate, 0, 1, 1));
- return true;
-}
-
-void LinphoneMediaEngine::Terminate() {
- fflush(stdout);
-}
-
-
-int LinphoneMediaEngine::GetCapabilities() {
- int capabilities = 0;
- capabilities |= AUDIO_SEND;
- capabilities |= AUDIO_RECV;
- return capabilities;
-}
-
-VoiceMediaChannel* LinphoneMediaEngine::CreateChannel() {
- return new LinphoneVoiceChannel(this);
-}
-
-VideoMediaChannel* LinphoneMediaEngine::CreateVideoChannel(VoiceMediaChannel* voice_ch) {
- return NULL;
-}
-
-bool LinphoneMediaEngine::FindAudioCodec(const AudioCodec &c) {
- if (c.id == 0)
- return true;
- if (c.name == payload_type_telephone_event.mime_type)
- return true;
-#ifdef HAVE_SPEEX
- if (c.name == payload_type_speex_wb.mime_type && c.clockrate == payload_type_speex_wb.clock_rate)
- return true;
- if (c.name == payload_type_speex_nb.mime_type && c.clockrate == payload_type_speex_nb.clock_rate)
- return true;
-#endif
-#ifdef HAVE_ILBC
- if (have_ilbc && c.name == payload_type_ilbc.mime_type)
- return true;
-#endif
- return false;
-}
-
-///////////////////////////////////////////////////////////////////////////
-// Implementation of LinphoneVoiceChannel.
-///////////////////////////////////////////////////////////////////////////
-LinphoneVoiceChannel::LinphoneVoiceChannel(LinphoneMediaEngine*eng)
- : pt_(-1),
- audio_stream_(0),
- engine_(eng),
- ring_stream_(0)
-{
-
- rtc::Thread *thread = rtc::ThreadManager::CurrentThread();
- rtc::SocketServer *ss = thread->socketserver();
- socket_.reset(ss->CreateAsyncSocket(SOCK_DGRAM));
-
- socket_->Bind(rtc::SocketAddress("localhost",3000));
- socket_->SignalReadEvent.connect(this, &LinphoneVoiceChannel::OnIncomingData);
-
-}
-
-LinphoneVoiceChannel::~LinphoneVoiceChannel()
-{
- fflush(stdout);
- StopRing();
-
- if (audio_stream_)
- audio_stream_stop(audio_stream_);
-}
-
-bool LinphoneVoiceChannel::SetPlayout(bool playout) {
- play_ = playout;
- return true;
-}
-
-bool LinphoneVoiceChannel::SetSendCodecs(const std::vector<AudioCodec>& codecs) {
-
- bool first = true;
- std::vector<AudioCodec>::const_iterator i;
-
- ortp_set_log_level_mask(ORTP_MESSAGE|ORTP_WARNING|ORTP_ERROR|ORTP_FATAL);
-
- for (i = codecs.begin(); i < codecs.end(); i++) {
-
- if (!engine_->FindAudioCodec(*i))
- continue;
-#ifdef HAVE_ILBC
- if (engine_->have_ilbc && i->name == payload_type_ilbc.mime_type) {
- rtp_profile_set_payload(&av_profile, i->id, &payload_type_ilbc);
- }
-#endif
-#ifdef HAVE_SPEEX
- if (i->name == payload_type_speex_wb.mime_type && i->clockrate == payload_type_speex_wb.clock_rate) {
- rtp_profile_set_payload(&av_profile, i->id, &payload_type_speex_wb);
- } else if (i->name == payload_type_speex_nb.mime_type && i->clockrate == payload_type_speex_nb.clock_rate) {
- rtp_profile_set_payload(&av_profile, i->id, &payload_type_speex_nb);
- }
-#endif
-
- if (i->id == 0)
- rtp_profile_set_payload(&av_profile, 0, &payload_type_pcmu8000);
-
- if (i->name == payload_type_telephone_event.mime_type) {
- rtp_profile_set_payload(&av_profile, i->id, &payload_type_telephone_event);
- }
-
- if (first) {
- StopRing();
- LOG(LS_INFO) << "Using " << i->name << "/" << i->clockrate;
- pt_ = i->id;
- audio_stream_ = audio_stream_start(&av_profile, 2000, "127.0.0.1", 3000, i->id, 250, 0);
- first = false;
- }
- }
-
- if (first) {
- StopRing();
- // We're being asked to set an empty list of codecs. This will only happen when
- // working with a buggy client; let's try PCMU.
- LOG(LS_WARNING) << "Received empty list of codces; using PCMU/8000";
- audio_stream_ = audio_stream_start(&av_profile, 2000, "127.0.0.1", 3000, 0, 250, 0);
- }
-
- return true;
-}
-
-bool LinphoneVoiceChannel::SetSend(SendFlags flag) {
- mute_ = !flag;
- return true;
-}
-
-void LinphoneVoiceChannel::OnPacketReceived(rtc::Buffer* packet) {
- const void* data = packet->data();
- int len = packet->length();
- uint8 buf[2048];
- memcpy(buf, data, len);
-
- /* We may receive packets with payload type 13: comfort noise. Linphone can't
- * handle them, so let's ignore those packets.
- */
- int payloadtype = buf[1] & 0x7f;
- if (play_ && payloadtype != 13)
- socket_->SendTo(buf, len, rtc::SocketAddress("localhost",2000));
-}
-
-void LinphoneVoiceChannel::StartRing(bool bIncomingCall)
-{
- MSSndCard *sndcard = NULL;
- sndcard=ms_snd_card_manager_get_default_card(ms_snd_card_manager_get());
- if (sndcard)
- {
- if (bIncomingCall)
- {
- if (engine_->GetRingWav().size() > 0)
- {
- LOG(LS_VERBOSE) << "incoming ring. sound file: " << engine_->GetRingWav().c_str() << "\n";
- ring_stream_ = ring_start (engine_->GetRingWav().c_str(), 1, sndcard);
- }
- }
- else
- {
- if (engine_->GetCallWav().size() > 0)
- {
- LOG(LS_VERBOSE) << "outgoing ring. sound file: " << engine_->GetCallWav().c_str() << "\n";
- ring_stream_ = ring_start (engine_->GetCallWav().c_str(), 1, sndcard);
- }
- }
- }
-}
-
-void LinphoneVoiceChannel::StopRing()
-{
- if (ring_stream_) {
- ring_stop(ring_stream_);
- ring_stream_ = 0;
- }
-}
-
-void LinphoneVoiceChannel::OnIncomingData(rtc::AsyncSocket *s)
-{
- char *buf[2048];
- int len;
- len = s->Recv(buf, sizeof(buf));
- rtc::Buffer packet(buf, len);
- if (network_interface_ && !mute_)
- network_interface_->SendPacket(&packet);
-}
-
-}
diff --git a/chromium/third_party/libjingle/source/talk/media/other/linphonemediaengine.h b/chromium/third_party/libjingle/source/talk/media/other/linphonemediaengine.h
deleted file mode 100644
index b17f4062dfa..00000000000
--- a/chromium/third_party/libjingle/source/talk/media/other/linphonemediaengine.h
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * libjingle
- * Copyright 2010 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-// LinphoneMediaEngine is a Linphone implementation of MediaEngine
-
-#ifndef TALK_SESSION_PHONE_LINPHONEMEDIAENGINE_H_
-#define TALK_SESSION_PHONE_LINPHONEMEDIAENGINE_H_
-
-#include <string>
-#include <vector>
-
-extern "C" {
-#include <mediastreamer2/mediastream.h>
-}
-
-#include "talk/media/base/codec.h"
-#include "talk/media/base/mediachannel.h"
-#include "talk/media/base/mediaengine.h"
-#include "webrtc/base/scoped_ptr.h"
-
-namespace rtc {
-class StreamInterface;
-}
-
-namespace cricket {
-
-class LinphoneMediaEngine : public MediaEngineInterface {
- public:
- LinphoneMediaEngine(const std::string& ringWav, const std::string& callWav);
- virtual ~LinphoneMediaEngine() {}
-
- // Should be called before codecs() and video_codecs() are called. We need to
- // set the voice and video codecs; otherwise, Jingle initiation will fail.
- void set_voice_codecs(const std::vector<AudioCodec>& codecs) {
- voice_codecs_ = codecs;
- }
- void set_video_codecs(const std::vector<VideoCodec>& codecs) {
- video_codecs_ = codecs;
- }
-
- // Implement pure virtual methods of MediaEngine.
- virtual bool Init();
- virtual void Terminate();
- virtual int GetCapabilities();
- virtual VoiceMediaChannel* CreateChannel();
- virtual VideoMediaChannel* CreateVideoChannel(VoiceMediaChannel* voice_ch);
- virtual SoundclipMedia* CreateSoundclip() { return NULL; }
- virtual bool SetAudioOptions(int options) { return true; }
- virtual bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config) {
- return true;
- }
- virtual bool SetSoundDevices(const Device* in_dev, const Device* out_dev) {
- return true;
- }
- virtual bool SetVideoCaptureDevice(const Device* cam_device) { return true; }
- virtual bool SetOutputVolume(int level) { return true; }
- virtual int GetInputLevel() { return 0; }
- virtual bool SetLocalMonitor(bool enable) { return true; }
- // TODO: control channel send?
- virtual bool SetVideoCapture(bool capture) { return true; }
- virtual const std::vector<AudioCodec>& audio_codecs() {
- return voice_codecs_;
- }
- virtual const std::vector<VideoCodec>& video_codecs() {
- return video_codecs_;
- }
- virtual bool FindAudioCodec(const AudioCodec& codec);
- virtual bool FindVideoCodec(const VideoCodec& codec) { return true; }
- virtual void SetVoiceLogging(int min_sev, const char* filter) {}
- virtual void SetVideoLogging(int min_sev, const char* filter) {}
-
- std::string GetRingWav(){return ring_wav_;}
- std::string GetCallWav(){return call_wav_;}
-
- int have_ilbc;
-
- private:
- std::string voice_input_filename_;
- std::string voice_output_filename_;
- std::string video_input_filename_;
- std::string video_output_filename_;
- std::vector<AudioCodec> voice_codecs_;
- std::vector<VideoCodec> video_codecs_;
-
- std::string ring_wav_;
- std::string call_wav_;
-
- DISALLOW_COPY_AND_ASSIGN(LinphoneMediaEngine);
-};
-
-class LinphoneVoiceChannel : public VoiceMediaChannel {
- public:
- LinphoneVoiceChannel(LinphoneMediaEngine *eng);
- virtual ~LinphoneVoiceChannel();
-
- // Implement pure virtual methods of VoiceMediaChannel.
- virtual bool SetRecvCodecs(const std::vector<AudioCodec>& codecs) { return true; }
- virtual bool SetSendCodecs(const std::vector<AudioCodec>& codecs);
- virtual bool SetPlayout(bool playout);
- virtual bool SetSend(SendFlags flag);
- virtual bool AddStream(uint32 ssrc) { return true; }
- virtual bool RemoveStream(uint32 ssrc) { return true; }
- virtual bool GetActiveStreams(AudioInfo::StreamList* actives) { return true; }
- virtual int GetOutputLevel() { return 0; }
- virtual bool SetOutputScaling(uint32 ssrc, double left, double right) {
- return false;
- }
- virtual bool GetOutputScaling(uint32 ssrc, double* left, double* right) {
- return false;
- }
- virtual void SetRingbackTone(const char* buf, int len) {}
- virtual bool PlayRingbackTone(bool play, bool loop) { return true; }
- virtual bool PressDTMF(int event, bool playout) { return true; }
- virtual bool GetStats(VoiceMediaInfo* info) { return true; }
-
- // Implement pure virtual methods of MediaChannel.
- virtual void OnPacketReceived(rtc::Buffer* packet);
- virtual void OnRtcpReceived(rtc::Buffer* packet) {}
- virtual void SetSendSsrc(uint32 id) {} // TODO: change RTP packet?
- virtual bool SetRtcpCName(const std::string& cname) { return true; }
- virtual bool Mute(bool on) { return mute_; }
- virtual bool SetMaxSendBandwidth(int bps) { return true; }
- virtual bool SetOptions(int options) { return true; }
- virtual bool SetRecvRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions) { return true; }
- virtual bool SetSendRtpHeaderExtensions(
- const std::vector<RtpHeaderExtension>& extensions) { return true; }
-
- virtual void StartRing(bool bIncomingCall);
- virtual void StopRing();
-
- private:
- int pt_;
- bool mute_;
- bool play_;
- AudioStream *audio_stream_;
- LinphoneMediaEngine *engine_;
- RingStream* ring_stream_;
- rtc::scoped_ptr<rtc::AsyncSocket> socket_;
- void OnIncomingData(rtc::AsyncSocket *s);
-
- DISALLOW_COPY_AND_ASSIGN(LinphoneVoiceChannel);
-};
-
-} // namespace cricket
-
-#endif // TALK_SESSION_PHONE_LINPHONEMEDIAENGINE_H_
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.cc
index f78e8146dd8..9f6a0ff7409 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.cc
@@ -39,6 +39,10 @@ FakeAudioReceiveStream::FakeAudioReceiveStream(
: config_(config), received_packets_(0) {
}
+webrtc::AudioReceiveStream::Stats FakeAudioReceiveStream::GetStats() const {
+ return webrtc::AudioReceiveStream::Stats();
+}
+
const webrtc::AudioReceiveStream::Config&
FakeAudioReceiveStream::GetConfig() const {
return config_;
@@ -108,7 +112,7 @@ int FakeVideoSendStream::GetLastHeight() const {
}
void FakeVideoSendStream::IncomingCapturedFrame(
- const webrtc::I420VideoFrame& frame) {
+ const webrtc::VideoFrame& frame) {
++num_swapped_frames_;
last_frame_.ShallowCopy(frame);
}
@@ -141,7 +145,7 @@ bool FakeVideoSendStream::ReconfigureVideoEncoder(
return true;
}
-webrtc::VideoSendStreamInput* FakeVideoSendStream::Input() {
+webrtc::VideoCaptureInput* FakeVideoSendStream::Input() {
return this;
}
@@ -166,7 +170,7 @@ bool FakeVideoReceiveStream::IsReceiving() const {
return receiving_;
}
-void FakeVideoReceiveStream::InjectFrame(const webrtc::I420VideoFrame& frame,
+void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) {
config_.renderer->RenderFrame(frame, time_to_render_ms);
}
@@ -230,6 +234,14 @@ webrtc::Call::NetworkState FakeCall::GetNetworkState() const {
return network_state_;
}
+webrtc::AudioSendStream* FakeCall::CreateAudioSendStream(
+ const webrtc::AudioSendStream::Config& config) {
+ return nullptr;
+}
+
+void FakeCall::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) {
+}
+
webrtc::AudioReceiveStream* FakeCall::CreateAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config) {
audio_receive_streams_.push_back(new FakeAudioReceiveStream(config));
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.h b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.h
index e4b00d2517a..c500416d347 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtccall.h
@@ -42,6 +42,8 @@ class FakeAudioReceiveStream : public webrtc::AudioReceiveStream {
explicit FakeAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config);
+ webrtc::AudioReceiveStream::Stats GetStats() const override;
+
const webrtc::AudioReceiveStream::Config& GetConfig() const;
int received_packets() const { return received_packets_; }
@@ -53,7 +55,7 @@ class FakeAudioReceiveStream : public webrtc::AudioReceiveStream {
};
class FakeVideoSendStream : public webrtc::VideoSendStream,
- public webrtc::VideoSendStreamInput {
+ public webrtc::VideoCaptureInput {
public:
FakeVideoSendStream(const webrtc::VideoSendStream::Config& config,
const webrtc::VideoEncoderConfig& encoder_config);
@@ -71,13 +73,13 @@ class FakeVideoSendStream : public webrtc::VideoSendStream,
void SetStats(const webrtc::VideoSendStream::Stats& stats);
private:
- void IncomingCapturedFrame(const webrtc::I420VideoFrame& frame) override;
+ void IncomingCapturedFrame(const webrtc::VideoFrame& frame) override;
webrtc::VideoSendStream::Stats GetStats() override;
bool ReconfigureVideoEncoder(
const webrtc::VideoEncoderConfig& config) override;
- webrtc::VideoSendStreamInput* Input() override;
+ webrtc::VideoCaptureInput* Input() override;
void Start() override;
void Stop() override;
@@ -91,7 +93,7 @@ class FakeVideoSendStream : public webrtc::VideoSendStream,
webrtc::VideoCodecVP9 vp9;
} vpx_settings_;
int num_swapped_frames_;
- webrtc::I420VideoFrame last_frame_;
+ webrtc::VideoFrame last_frame_;
webrtc::VideoSendStream::Stats stats_;
};
@@ -104,7 +106,7 @@ class FakeVideoReceiveStream : public webrtc::VideoReceiveStream {
bool IsReceiving() const;
- void InjectFrame(const webrtc::I420VideoFrame& frame, int time_to_render_ms);
+ void InjectFrame(const webrtc::VideoFrame& frame, int time_to_render_ms);
void SetStats(const webrtc::VideoReceiveStream::Stats& stats);
@@ -137,6 +139,10 @@ class FakeCall : public webrtc::Call, public webrtc::PacketReceiver {
void SetStats(const webrtc::Call::Stats& stats);
private:
+ webrtc::AudioSendStream* CreateAudioSendStream(
+ const webrtc::AudioSendStream::Config& config) override;
+ void DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) override;
+
webrtc::AudioReceiveStream* CreateAudioReceiveStream(
const webrtc::AudioReceiveStream::Config& config) override;
void DestroyAudioReceiveStream(
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideocapturemodule.h b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideocapturemodule.h
index 93406cd55ba..3c515555e7d 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideocapturemodule.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideocapturemodule.h
@@ -109,7 +109,7 @@ class FakeWebRtcVideoCaptureModule : public webrtc::VideoCaptureModule {
bool SendFrame(int w, int h) {
if (!running_) return false;
- webrtc::I420VideoFrame sample;
+ webrtc::VideoFrame sample;
// Setting stride based on width.
if (sample.CreateEmptyFrame(w, h, w, (w + 1) / 2, (w + 1) / 2) < 0) {
return false;
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h
index fed19c97076..755a3def14c 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvideoengine.h
@@ -56,8 +56,6 @@ static const int kMaxVideoBitrate = 1000;
// renderer for a channel or it is adding a renderer for a capturer.
static const int kViEChannelIdBase = 0;
static const int kViEChannelIdMax = 1000;
-static const int kViECaptureIdBase = 10000; // Make sure there is a gap.
-static const int kViECaptureIdMax = 11000;
// Fake class for mocking out webrtc::VideoDecoder
class FakeWebRtcVideoDecoder : public webrtc::VideoDecoder {
@@ -159,10 +157,9 @@ class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder {
return codec_settings_;
}
- virtual int32 Encode(
- const webrtc::I420VideoFrame& inputImage,
- const webrtc::CodecSpecificInfo* codecSpecificInfo,
- const std::vector<webrtc::VideoFrameType>* frame_types) {
+ virtual int32 Encode(const webrtc::VideoFrame& inputImage,
+ const webrtc::CodecSpecificInfo* codecSpecificInfo,
+ const std::vector<webrtc::VideoFrameType>* frame_types) {
rtc::CritScope lock(&crit_);
++num_frames_encoded_;
return WEBRTC_VIDEO_CODEC_OK;
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h
index c6d39bbb223..419170b24dc 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/fakewebrtcvoiceengine.h
@@ -152,6 +152,7 @@ class FakeAudioProcessing : public webrtc::AudioProcessing {
WEBRTC_STUB(StartDebugRecording, (const char filename[kMaxFilenameSize]));
WEBRTC_STUB(StartDebugRecording, (FILE* handle));
WEBRTC_STUB(StopDebugRecording, ());
+ WEBRTC_VOID_STUB(UpdateHistogramsOnCallEnd, ());
webrtc::EchoCancellation* echo_cancellation() const override { return NULL; }
webrtc::EchoControlMobile* echo_control_mobile() const override {
return NULL;
@@ -216,7 +217,8 @@ class FakeWebRtcVoiceEngine
send_absolute_sender_time_ext_(-1),
receive_absolute_sender_time_ext_(-1),
associate_send_channel(-1),
- neteq_capacity(-1) {
+ neteq_capacity(-1),
+ neteq_fast_accelerate(false) {
memset(&send_codec, 0, sizeof(send_codec));
memset(&rx_agc_config, 0, sizeof(rx_agc_config));
}
@@ -254,6 +256,7 @@ class FakeWebRtcVoiceEngine
webrtc::PacketTime last_rtp_packet_time;
std::list<std::string> packets;
int neteq_capacity;
+ bool neteq_fast_accelerate;
};
FakeWebRtcVoiceEngine(const cricket::AudioCodec* const* codecs,
@@ -409,6 +412,8 @@ class FakeWebRtcVoiceEngine
if (config.Get<webrtc::NetEqCapacityConfig>().enabled) {
ch->neteq_capacity = config.Get<webrtc::NetEqCapacityConfig>().capacity;
}
+ ch->neteq_fast_accelerate =
+ config.Get<webrtc::NetEqFastAccelerate>().enabled;
channels_[++last_channel_] = ch;
return last_channel_;
}
@@ -512,8 +517,6 @@ class FakeWebRtcVoiceEngine
}
WEBRTC_STUB(GetVersion, (char version[1024]));
WEBRTC_STUB(LastError, ());
- WEBRTC_STUB(SetOnHoldStatus, (int, bool, webrtc::OnHoldModes));
- WEBRTC_STUB(GetOnHoldStatus, (int, bool&, webrtc::OnHoldModes&));
WEBRTC_FUNC(AssociateSendChannel, (int channel,
int accociate_send_channel)) {
WEBRTC_CHECK_CHANNEL(channel);
@@ -577,14 +580,6 @@ class FakeWebRtcVoiceEngine
}
return -1;
}
- WEBRTC_STUB(SetAMREncFormat, (int channel, webrtc::AmrMode mode));
- WEBRTC_STUB(SetAMRDecFormat, (int channel, webrtc::AmrMode mode));
- WEBRTC_STUB(SetAMRWbEncFormat, (int channel, webrtc::AmrMode mode));
- WEBRTC_STUB(SetAMRWbDecFormat, (int channel, webrtc::AmrMode mode));
- WEBRTC_STUB(SetISACInitTargetRate, (int channel, int rateBps,
- bool useFixedFrameSize));
- WEBRTC_STUB(SetISACMaxRate, (int channel, int rateBps));
- WEBRTC_STUB(SetISACMaxPayloadSize, (int channel, int sizeBytes));
WEBRTC_FUNC(SetRecPayloadType, (int channel,
const webrtc::CodecInst& codec)) {
WEBRTC_CHECK_CHANNEL(channel);
@@ -722,9 +717,6 @@ class FakeWebRtcVoiceEngine
dtmf_info_.dtmf_length_ms = length_ms;
return 0;
}
- WEBRTC_STUB(StartPlayingDtmfTone,
- (int eventCode, int attenuationDb = 10));
- WEBRTC_STUB(StopPlayingDtmfTone, ());
// webrtc::VoEFile
WEBRTC_FUNC(StartPlayingFileLocally, (int channel, const char* fileNameUTF8,
@@ -752,7 +744,6 @@ class FakeWebRtcVoiceEngine
WEBRTC_CHECK_CHANNEL(channel);
return (channels_[channel]->file) ? 1 : 0;
}
- WEBRTC_STUB(ScaleLocalFilePlayout, (int channel, float scale));
WEBRTC_STUB(StartPlayingFileAsMicrophone, (int channel,
const char* fileNameUTF8,
bool loop,
@@ -766,7 +757,6 @@ class FakeWebRtcVoiceEngine
float volumeScaling));
WEBRTC_STUB(StopPlayingFileAsMicrophone, (int channel));
WEBRTC_STUB(IsPlayingFileAsMicrophone, (int channel));
- WEBRTC_STUB(ScaleFileAsMicrophonePlayout, (int channel, float scale));
WEBRTC_STUB(StartRecordingPlayout, (int channel, const char* fileNameUTF8,
webrtc::CodecInst* compression,
int maxSizeBytes));
@@ -797,30 +787,8 @@ class FakeWebRtcVoiceEngine
recording_microphone_ = false;
return 0;
}
- WEBRTC_STUB(ConvertPCMToWAV, (const char* fileNameInUTF8,
- const char* fileNameOutUTF8));
- WEBRTC_STUB(ConvertPCMToWAV, (webrtc::InStream* streamIn,
- webrtc::OutStream* streamOut));
- WEBRTC_STUB(ConvertWAVToPCM, (const char* fileNameInUTF8,
- const char* fileNameOutUTF8));
- WEBRTC_STUB(ConvertWAVToPCM, (webrtc::InStream* streamIn,
- webrtc::OutStream* streamOut));
- WEBRTC_STUB(ConvertPCMToCompressed, (const char* fileNameInUTF8,
- const char* fileNameOutUTF8,
- webrtc::CodecInst* compression));
- WEBRTC_STUB(ConvertPCMToCompressed, (webrtc::InStream* streamIn,
- webrtc::OutStream* streamOut,
- webrtc::CodecInst* compression));
- WEBRTC_STUB(ConvertCompressedToPCM, (const char* fileNameInUTF8,
- const char* fileNameOutUTF8));
- WEBRTC_STUB(ConvertCompressedToPCM, (webrtc::InStream* streamIn,
- webrtc::OutStream* streamOut));
- WEBRTC_STUB(GetFileDuration, (const char* fileNameUTF8, int& durationMs,
- webrtc::FileFormats format));
- WEBRTC_STUB(GetPlaybackPosition, (int channel, int& positionMs));
// webrtc::VoEHardware
- WEBRTC_STUB(GetCPULoad, (int&));
WEBRTC_FUNC(GetNumOfRecordingDevices, (int& num)) {
return GetNumDevices(num);
}
@@ -837,12 +805,6 @@ class FakeWebRtcVoiceEngine
WEBRTC_STUB(SetPlayoutDevice, (int));
WEBRTC_STUB(SetAudioDeviceLayer, (webrtc::AudioLayers));
WEBRTC_STUB(GetAudioDeviceLayer, (webrtc::AudioLayers&));
- WEBRTC_STUB(GetPlayoutDeviceStatus, (bool&));
- WEBRTC_STUB(GetRecordingDeviceStatus, (bool&));
- WEBRTC_STUB(ResetAudioDevice, ());
- WEBRTC_STUB(AudioDeviceControl, (unsigned int, unsigned int, unsigned int));
- WEBRTC_STUB(SetLoudspeakerStatus, (bool enable));
- WEBRTC_STUB(GetLoudspeakerStatus, (bool& enabled));
WEBRTC_FUNC(SetRecordingSampleRate, (unsigned int samples_per_sec)) {
recording_sample_rate_ = samples_per_sec;
return 0;
@@ -860,7 +822,6 @@ class FakeWebRtcVoiceEngine
return 0;
}
WEBRTC_STUB(EnableBuiltInAEC, (bool enable));
- virtual bool BuiltInAECIsEnabled() const { return true; }
virtual bool BuiltInAECIsAvailable() const { return false; }
// webrtc::VoENetEqStats
@@ -912,9 +873,6 @@ class FakeWebRtcVoiceEngine
size_t length));
// webrtc::VoERTP_RTCP
- WEBRTC_STUB(RegisterRTPObserver, (int channel,
- webrtc::VoERTPObserver& observer));
- WEBRTC_STUB(DeRegisterRTPObserver, (int channel));
WEBRTC_FUNC(SetLocalSSRC, (int channel, unsigned int ssrc)) {
WEBRTC_CHECK_CHANNEL(channel);
channels_[channel]->send_ssrc = ssrc;
@@ -955,7 +913,6 @@ class FakeWebRtcVoiceEngine
return 0;
}
- WEBRTC_STUB(GetRemoteCSRCs, (int channel, unsigned int arrCSRC[15]));
WEBRTC_STUB(SetRTCPStatus, (int channel, bool enable));
WEBRTC_STUB(GetRTCPStatus, (int channel, bool& enabled));
WEBRTC_STUB(SetRTCP_CNAME, (int channel, const char cname[256]));
@@ -967,8 +924,6 @@ class FakeWebRtcVoiceEngine
unsigned int& playoutTimestamp,
unsigned int* jitter,
unsigned short* fractionLost));
- WEBRTC_STUB(GetRemoteRTCPSenderInfo, (int channel,
- webrtc::SenderInfo* sender_info));
WEBRTC_FUNC(GetRemoteRTCPReportBlocks,
(int channel, std::vector<webrtc::ReportBlock>* receive_blocks)) {
WEBRTC_CHECK_CHANNEL(channel);
@@ -986,11 +941,6 @@ class FakeWebRtcVoiceEngine
}
return 0;
}
- WEBRTC_STUB(SendApplicationDefinedRTCPPacket, (int channel,
- unsigned char subType,
- unsigned int name,
- const char* data,
- unsigned short dataLength));
WEBRTC_STUB(GetRTPStatistics, (int channel, unsigned int& averageJitterMs,
unsigned int& maxJitterMs,
unsigned int& discardedPackets));
@@ -1035,15 +985,6 @@ class FakeWebRtcVoiceEngine
channels_[channel]->nack_max_packets = maxNoPackets;
return 0;
}
- WEBRTC_STUB(StartRTPDump, (int channel, const char* fileNameUTF8,
- webrtc::RTPDirections direction));
- WEBRTC_STUB(StopRTPDump, (int channel, webrtc::RTPDirections direction));
- WEBRTC_STUB(RTPDumpIsActive, (int channel, webrtc::RTPDirections direction));
- WEBRTC_STUB(InsertExtraRTPPacket, (int channel, unsigned char payloadType,
- bool markerBit, const char* payloadData,
- unsigned short payloadSize));
- WEBRTC_STUB(GetLastRemoteTimeStamp, (int channel,
- uint32_t* lastRemoteTimeStamp));
// webrtc::VoEVideoSync
WEBRTC_STUB(GetPlayoutBufferSize, (int& bufferMs));
@@ -1060,14 +1001,10 @@ class FakeWebRtcVoiceEngine
// webrtc::VoEVolumeControl
WEBRTC_STUB(SetSpeakerVolume, (unsigned int));
WEBRTC_STUB(GetSpeakerVolume, (unsigned int&));
- WEBRTC_STUB(SetSystemOutputMute, (bool));
- WEBRTC_STUB(GetSystemOutputMute, (bool&));
WEBRTC_STUB(SetMicVolume, (unsigned int));
WEBRTC_STUB(GetMicVolume, (unsigned int&));
WEBRTC_STUB(SetInputMute, (int, bool));
WEBRTC_STUB(GetInputMute, (int, bool&));
- WEBRTC_STUB(SetSystemInputMute, (bool));
- WEBRTC_STUB(GetSystemInputMute, (bool&));
WEBRTC_STUB(GetSpeechInputLevel, (unsigned int&));
WEBRTC_STUB(GetSpeechOutputLevel, (int, unsigned int&));
WEBRTC_STUB(GetSpeechInputLevelFullRange, (unsigned int&));
@@ -1253,14 +1190,6 @@ class FakeWebRtcVoiceEngine
media_processor_ = NULL;
return 0;
}
- WEBRTC_STUB(SetExternalRecordingStatus, (bool enable));
- WEBRTC_STUB(SetExternalPlayoutStatus, (bool enable));
- WEBRTC_STUB(ExternalRecordingInsertData,
- (const int16_t speechData10ms[], int lengthSamples,
- int samplingFreqHz, int current_delay_ms));
- WEBRTC_STUB(ExternalPlayoutGetData,
- (int16_t speechData10ms[], int samplingFreqHz,
- int current_delay_ms, int& lengthSamples));
WEBRTC_STUB(GetAudioFrame, (int channel, int desired_sample_rate_hz,
webrtc::AudioFrame* frame));
WEBRTC_STUB(SetExternalMixing, (int channel, bool enable));
@@ -1269,6 +1198,11 @@ class FakeWebRtcVoiceEngine
ASSERT(ch != channels_.end());
return ch->second->neteq_capacity;
}
+ bool GetNetEqFastAccelerate() const {
+ auto ch = channels_.find(last_channel_);
+ ASSERT(ch != channels_.end());
+ return ch->second->neteq_fast_accelerate;
+ }
private:
int GetNumDevices(int& num) {
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/simulcast.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/simulcast.cc
index 6a822b8ad29..759cfef93b2 100755
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/simulcast.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/simulcast.cc
@@ -411,7 +411,7 @@ void LogSimulcastSubstreams(const webrtc::VideoCodec& codec) {
static const int kScreenshareMinBitrateKbps = 50;
static const int kScreenshareMaxBitrateKbps = 6000;
-static const int kScreenshareDefaultTl0BitrateKbps = 100;
+static const int kScreenshareDefaultTl0BitrateKbps = 200;
static const int kScreenshareDefaultTl1BitrateKbps = 1000;
static const char* kScreencastLayerFieldTrialName =
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcexport.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcexport.h
deleted file mode 100644
index a2997dcf077..00000000000
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcexport.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * libjingle
- * Copyright 2004--2013 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
-#define TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
-
-// When building for Chrome a part of the code can be built into
-// a shared library, which is controlled by these macros.
-// For all other builds, we always build a static library.
-#if !defined(GOOGLE_CHROME_BUILD) && !defined(CHROMIUM_BUILD)
-#define LIBPEERCONNECTION_LIB 1
-#endif
-
-#ifndef NON_EXPORTED_BASE
-#ifdef WIN32
-
-// MSVC_SUPPRESS_WARNING disables warning |n| for the remainder of the line and
-// for the next line of the source file.
-#define MSVC_SUPPRESS_WARNING(n) __pragma(warning(suppress:n))
-
-// Allows exporting a class that inherits from a non-exported base class.
-// This uses suppress instead of push/pop because the delimiter after the
-// declaration (either "," or "{") has to be placed before the pop macro.
-//
-// Example usage:
-// class EXPORT_API Foo : NON_EXPORTED_BASE(public Bar) {
-//
-// MSVC Compiler warning C4275:
-// non dll-interface class 'Bar' used as base for dll-interface class 'Foo'.
-// Note that this is intended to be used only when no access to the base class'
-// static data is done through derived classes or inline methods. For more info,
-// see http://msdn.microsoft.com/en-us/library/3tdb471s(VS.80).aspx
-#define NON_EXPORTED_BASE(code) MSVC_SUPPRESS_WARNING(4275) \
- code
-
-#else // Not WIN32
-#define NON_EXPORTED_BASE(code) code
-#endif // WIN32
-#endif // NON_EXPORTED_BASE
-
-#if defined (LIBPEERCONNECTION_LIB)
- #define WRME_EXPORT
-#else
- #if defined(WIN32)
- #if defined(LIBPEERCONNECTION_IMPLEMENTATION)
- #define WRME_EXPORT __declspec(dllexport)
- #else
- #define WRME_EXPORT __declspec(dllimport)
- #endif
- #else // defined(WIN32)
- #if defined(LIBPEERCONNECTION_IMPLEMENTATION)
- #define WRME_EXPORT __attribute__((visibility("default")))
- #else
- #define WRME_EXPORT
- #endif
- #endif
-#endif // LIBPEERCONNECTION_LIB
-
-#endif // TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.cc
index ca36f7331d8..cf6a5cf2e59 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.cc
@@ -25,8 +25,6 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#if defined(LIBPEERCONNECTION_LIB) || defined(LIBPEERCONNECTION_IMPLEMENTATION)
-
#include "talk/media/webrtc/webrtcmediaengine.h"
#include "talk/media/webrtc/webrtcvideoengine2.h"
#include "talk/media/webrtc/webrtcvoiceengine.h"
@@ -37,10 +35,9 @@ class WebRtcMediaEngine2
: public CompositeMediaEngine<WebRtcVoiceEngine, WebRtcVideoEngine2> {
public:
WebRtcMediaEngine2(webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory) {
- voice_.SetAudioDeviceModule(adm, adm_sc);
+ voice_.SetAudioDeviceModule(adm);
video_.SetExternalDecoderFactory(decoder_factory);
video_.SetExternalEncoderFactory(encoder_factory);
}
@@ -48,17 +45,14 @@ class WebRtcMediaEngine2
} // namespace cricket
-WRME_EXPORT
cricket::MediaEngineInterface* CreateWebRtcMediaEngine(
webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
cricket::WebRtcVideoEncoderFactory* encoder_factory,
cricket::WebRtcVideoDecoderFactory* decoder_factory) {
- return new cricket::WebRtcMediaEngine2(adm, adm_sc, encoder_factory,
+ return new cricket::WebRtcMediaEngine2(adm, encoder_factory,
decoder_factory);
}
-WRME_EXPORT
void DestroyWebRtcMediaEngine(cricket::MediaEngineInterface* media_engine) {
delete media_engine;
}
@@ -69,13 +63,9 @@ namespace cricket {
// ChannelManager.
MediaEngineInterface* WebRtcMediaEngineFactory::Create(
webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory) {
- return CreateWebRtcMediaEngine(adm, adm_sc, encoder_factory, decoder_factory);
+ return CreateWebRtcMediaEngine(adm, encoder_factory, decoder_factory);
}
} // namespace cricket
-
-#endif // defined(LIBPEERCONNECTION_LIB) ||
- // defined(LIBPEERCONNECTION_IMPLEMENTATION)
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.h
index 48da1763b59..0fd8a74eb66 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcmediaengine.h
@@ -29,7 +29,6 @@
#define TALK_MEDIA_WEBRTCMEDIAENGINE_H_
#include "talk/media/base/mediaengine.h"
-#include "talk/media/webrtc/webrtcexport.h"
namespace webrtc {
class AudioDeviceModule;
@@ -40,162 +39,17 @@ class WebRtcVideoDecoderFactory;
class WebRtcVideoEncoderFactory;
}
-#if !defined(LIBPEERCONNECTION_LIB) && \
- !defined(LIBPEERCONNECTION_IMPLEMENTATION)
-
-WRME_EXPORT
-cricket::MediaEngineInterface* CreateWebRtcMediaEngine(
- webrtc::AudioDeviceModule* adm, webrtc::AudioDeviceModule* adm_sc,
- cricket::WebRtcVideoEncoderFactory* encoder_factory,
- cricket::WebRtcVideoDecoderFactory* decoder_factory);
-
-WRME_EXPORT
-void DestroyWebRtcMediaEngine(cricket::MediaEngineInterface* media_engine);
-
-#endif // !defined(LIBPEERCONNECTION_LIB) &&
- // !defined(LIBPEERCONNECTION_IMPLEMENTATION)
-
namespace cricket {
class WebRtcMediaEngineFactory {
public:
-#if !defined(LIBPEERCONNECTION_LIB) && \
- !defined(LIBPEERCONNECTION_IMPLEMENTATION)
-// A bare Create() isn't supported when using the delegating media
-// engine.
-#else
static MediaEngineInterface* Create();
-#endif // !defined(LIBPEERCONNECTION_LIB) &&
- // !defined(LIBPEERCONNECTION_IMPLEMENTATION)
static MediaEngineInterface* Create(
webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory);
};
} // namespace cricket
-
-#if !defined(LIBPEERCONNECTION_LIB) && \
- !defined(LIBPEERCONNECTION_IMPLEMENTATION)
-
-namespace cricket {
-
-// TODO(pthacther): Move this code into webrtcmediaengine.cc once
-// Chrome compiles it. Right now it relies on only the .h file.
-class DelegatingWebRtcMediaEngine : public cricket::MediaEngineInterface {
- public:
- DelegatingWebRtcMediaEngine(
- webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
- WebRtcVideoEncoderFactory* encoder_factory,
- WebRtcVideoDecoderFactory* decoder_factory)
- : delegate_(CreateWebRtcMediaEngine(
- adm, adm_sc, encoder_factory, decoder_factory)) {
- }
- virtual ~DelegatingWebRtcMediaEngine() {
- DestroyWebRtcMediaEngine(delegate_);
- }
- bool Init(rtc::Thread* worker_thread) override {
- return delegate_->Init(worker_thread);
- }
- void Terminate() override { delegate_->Terminate(); }
- int GetCapabilities() override { return delegate_->GetCapabilities(); }
- VoiceMediaChannel* CreateChannel() override {
- return delegate_->CreateChannel();
- }
- VideoMediaChannel* CreateVideoChannel(
- const VideoOptions& options,
- VoiceMediaChannel* voice_media_channel) override {
- return delegate_->CreateVideoChannel(options, voice_media_channel);
- }
- SoundclipMedia* CreateSoundclip() override {
- return delegate_->CreateSoundclip();
- }
- AudioOptions GetAudioOptions() const override {
- return delegate_->GetAudioOptions();
- }
- bool SetAudioOptions(const AudioOptions& options) override {
- return delegate_->SetAudioOptions(options);
- }
- bool SetAudioDelayOffset(int offset) override {
- return delegate_->SetAudioDelayOffset(offset);
- }
- bool SetDefaultVideoEncoderConfig(const VideoEncoderConfig& config) override {
- return delegate_->SetDefaultVideoEncoderConfig(config);
- }
- bool SetSoundDevices(const Device* in_device,
- const Device* out_device) override {
- return delegate_->SetSoundDevices(in_device, out_device);
- }
- bool GetOutputVolume(int* level) override {
- return delegate_->GetOutputVolume(level);
- }
- bool SetOutputVolume(int level) override {
- return delegate_->SetOutputVolume(level);
- }
- int GetInputLevel() override { return delegate_->GetInputLevel(); }
- bool SetLocalMonitor(bool enable) override {
- return delegate_->SetLocalMonitor(enable);
- }
- const std::vector<AudioCodec>& audio_codecs() override {
- return delegate_->audio_codecs();
- }
- const std::vector<RtpHeaderExtension>& audio_rtp_header_extensions()
- override {
- return delegate_->audio_rtp_header_extensions();
- }
- const std::vector<VideoCodec>& video_codecs() override {
- return delegate_->video_codecs();
- }
- const std::vector<RtpHeaderExtension>& video_rtp_header_extensions()
- override {
- return delegate_->video_rtp_header_extensions();
- }
- void SetVoiceLogging(int min_sev, const char* filter) override {
- delegate_->SetVoiceLogging(min_sev, filter);
- }
- void SetVideoLogging(int min_sev, const char* filter) override {
- delegate_->SetVideoLogging(min_sev, filter);
- }
- bool StartAecDump(rtc::PlatformFile file) override {
- return delegate_->StartAecDump(file);
- }
- bool RegisterVoiceProcessor(uint32 ssrc,
- VoiceProcessor* video_processor,
- MediaProcessorDirection direction) override {
- return delegate_->RegisterVoiceProcessor(ssrc, video_processor, direction);
- }
- bool UnregisterVoiceProcessor(uint32 ssrc,
- VoiceProcessor* video_processor,
- MediaProcessorDirection direction) override {
- return delegate_->UnregisterVoiceProcessor(ssrc, video_processor,
- direction);
- }
- virtual sigslot::repeater2<VideoCapturer*, CaptureState>&
- SignalVideoCaptureStateChange() {
- return delegate_->SignalVideoCaptureStateChange();
- }
-
- private:
- cricket::MediaEngineInterface* delegate_;
-};
-
-// Used by PeerConnectionFactory to create a media engine passed into
-// ChannelManager.
-MediaEngineInterface* WebRtcMediaEngineFactory::Create(
- webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc,
- WebRtcVideoEncoderFactory* encoder_factory,
- WebRtcVideoDecoderFactory* decoder_factory) {
- return new cricket::DelegatingWebRtcMediaEngine(
- adm, adm_sc, encoder_factory, decoder_factory);
-}
-
-} // namespace cricket
-
-#endif // !defined(LIBPEERCONNECTION_LIB) &&
- // !defined(LIBPEERCONNECTION_IMPLEMENTATION)
-
#endif // TALK_MEDIA_WEBRTCMEDIAENGINE_H_
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.cc
index 89772884c95..e3178607629 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.cc
@@ -41,7 +41,7 @@ class PassthroughStream: public webrtc::VideoRenderCallback {
virtual ~PassthroughStream() {
}
virtual int32_t RenderFrame(const uint32_t stream_id,
- const webrtc::I420VideoFrame& videoFrame) {
+ const webrtc::VideoFrame& videoFrame) {
rtc::CritScope cs(&stream_critical_);
// Send frame for rendering directly
if (running_ && renderer_) {
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.h
index b1e4fd3380a..685cfe881c2 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender.h
@@ -161,12 +161,12 @@ class WebRtcPassthroughRender : public webrtc::VideoRender {
}
int32_t SetStartImage(const uint32_t stream_id,
- const webrtc::I420VideoFrame& videoFrame) override {
+ const webrtc::VideoFrame& videoFrame) override {
return -1;
}
int32_t SetTimeoutImage(const uint32_t stream_id,
- const webrtc::I420VideoFrame& videoFrame,
+ const webrtc::VideoFrame& videoFrame,
const uint32_t timeout) override {
return -1;
}
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender_unittest.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender_unittest.cc
index 9bbba0e340c..65eed05f8ed 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcpassthroughrender_unittest.cc
@@ -44,7 +44,7 @@ class WebRtcPassthroughRenderTest : public testing::Test {
}
virtual int32_t RenderFrame(const uint32_t stream_id,
- const webrtc::I420VideoFrame& videoFrame) {
+ const webrtc::VideoFrame& videoFrame) {
++frame_num_;
LOG(INFO) << "RenderFrame stream_id: " << stream_id
<< " frame_num: " << frame_num_;
@@ -121,7 +121,7 @@ TEST_F(WebRtcPassthroughRenderTest, Streams) {
}
TEST_F(WebRtcPassthroughRenderTest, Renderer) {
- webrtc::I420VideoFrame frame;
+ webrtc::VideoFrame frame;
const int stream_id1 = 1234;
const int stream_id2 = 5678;
const int stream_id3 = 9012; // A stream that doesn't exist.
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.cc
index 285ed01966b..f8c373db36b 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.cc
@@ -129,17 +129,19 @@ static bool FormatToCapability(const VideoFormat& format,
WebRtcVideoCapturer::WebRtcVideoCapturer()
: factory_(new WebRtcVcmFactory),
- module_(NULL),
+ module_(nullptr),
captured_frames_(0),
- start_thread_(nullptr) {
+ start_thread_(nullptr),
+ async_invoker_(nullptr) {
set_frame_factory(new WebRtcVideoFrameFactory());
}
WebRtcVideoCapturer::WebRtcVideoCapturer(WebRtcVcmFactoryInterface* factory)
: factory_(factory),
- module_(NULL),
+ module_(nullptr),
captured_frames_(0),
- start_thread_(nullptr) {
+ start_thread_(nullptr),
+ async_invoker_(nullptr) {
set_frame_factory(new WebRtcVideoFrameFactory());
}
@@ -200,15 +202,12 @@ bool WebRtcVideoCapturer::Init(const Device& device) {
}
}
factory_->DestroyDeviceInfo(info);
-// TODO(fischman): Remove the following check
-// when capabilities for iOS are implemented
-// https://code.google.com/p/webrtc/issues/detail?id=2968
-#if !defined(IOS)
+
if (supported.empty()) {
LOG(LS_ERROR) << "Failed to find usable formats for id: " << device.id;
return false;
}
-#endif
+
module_ = factory_->Create(0, vcm_id);
if (!module_) {
LOG(LS_ERROR) << "Failed to create capturer for id: " << device.id;
@@ -284,16 +283,17 @@ CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
LOG(LS_ERROR) << "The capturer has not been initialized";
return CS_NO_DEVICE;
}
-
- rtc::CritScope cs(&critical_section_stopping_);
- if (IsRunning()) {
+ if (start_thread_) {
LOG(LS_ERROR) << "The capturer is already running";
+ DCHECK(start_thread_->IsCurrent())
+ << "Trying to start capturer on different threads";
return CS_FAILED;
}
- DCHECK(!start_thread_);
-
start_thread_ = rtc::Thread::Current();
+ DCHECK(!async_invoker_);
+ async_invoker_.reset(new rtc::AsyncInvoker());
+ captured_frames_ = 0;
SetCaptureFormat(&capture_format);
@@ -303,44 +303,50 @@ CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
return CS_FAILED;
}
- std::string camera_id(GetId());
uint32 start = rtc::Time();
module_->RegisterCaptureDataCallback(*this);
if (module_->StartCapture(cap) != 0) {
- LOG(LS_ERROR) << "Camera '" << camera_id << "' failed to start";
+ LOG(LS_ERROR) << "Camera '" << GetId() << "' failed to start";
+ module_->DeRegisterCaptureDataCallback();
+ async_invoker_.reset();
+ SetCaptureFormat(nullptr);
start_thread_ = nullptr;
return CS_FAILED;
}
- LOG(LS_INFO) << "Camera '" << camera_id << "' started with format "
+ LOG(LS_INFO) << "Camera '" << GetId() << "' started with format "
<< capture_format.ToString() << ", elapsed time "
<< rtc::TimeSince(start) << " ms";
- captured_frames_ = 0;
SetCaptureState(CS_RUNNING);
return CS_STARTING;
}
-// Critical section blocks Stop from shutting down during callbacks from capture
-// thread to OnIncomingCapturedFrame. Note that the crit is try-locked in
-// OnFrameCaptured, as the lock ordering between this and the system component
-// controlling the camera is reversed: system frame -> OnIncomingCapturedFrame;
-// Stop -> system stop camera).
void WebRtcVideoCapturer::Stop() {
- rtc::CritScope cs(&critical_section_stopping_);
+ if (!start_thread_) {
+ LOG(LS_ERROR) << "The capturer is already stopped";
+ return;
+ }
+ DCHECK(start_thread_);
+ DCHECK(start_thread_->IsCurrent());
+ DCHECK(async_invoker_);
if (IsRunning()) {
- DCHECK(start_thread_);
- rtc::Thread::Current()->Clear(this);
+ // The module is responsible for OnIncomingCapturedFrame being called, if
+ // we stop it we will get no further callbacks.
module_->StopCapture();
- module_->DeRegisterCaptureDataCallback();
-
- // TODO(juberti): Determine if the VCM exposes any drop stats we can use.
- double drop_ratio = 0.0;
- std::string camera_id(GetId());
- LOG(LS_INFO) << "Camera '" << camera_id << "' stopped after capturing "
- << captured_frames_ << " frames and dropping "
- << drop_ratio << "%";
}
+ module_->DeRegisterCaptureDataCallback();
+
+ // TODO(juberti): Determine if the VCM exposes any drop stats we can use.
+ double drop_ratio = 0.0;
+ LOG(LS_INFO) << "Camera '" << GetId() << "' stopped after capturing "
+ << captured_frames_ << " frames and dropping "
+ << drop_ratio << "%";
+
+ // Clear any pending async invokes (that OnIncomingCapturedFrame may have
+ // caused).
+ async_invoker_.reset();
+
SetCaptureFormat(NULL);
start_thread_ = nullptr;
}
@@ -364,38 +370,23 @@ bool WebRtcVideoCapturer::GetPreferredFourccs(
void WebRtcVideoCapturer::OnIncomingCapturedFrame(
const int32_t id,
- const webrtc::I420VideoFrame& sample) {
- // This would be a normal CritScope, except that it's possible that:
- // (1) whatever system component producing this frame has taken a lock, and
- // (2) Stop() probably calls back into that system component, which may take
- // the same lock. Due to the reversed order, we have to try-lock in order to
- // avoid a potential deadlock. Besides, if we can't enter because we're
- // stopping, we may as well drop the frame.
- rtc::TryCritScope cs(&critical_section_stopping_);
- if (!cs.locked() || !IsRunning()) {
- // Capturer has been stopped or is in the process of stopping.
- return;
- }
-
- ++captured_frames_;
- // Log the size and pixel aspect ratio of the first captured frame.
- if (1 == captured_frames_) {
- LOG(LS_INFO) << "Captured frame size "
- << sample.width() << "x" << sample.height()
- << ". Expected format " << GetCaptureFormat()->ToString();
- }
-
+ const webrtc::VideoFrame& sample) {
+ // This can only happen between Start() and Stop().
+ DCHECK(start_thread_);
+ DCHECK(async_invoker_);
if (start_thread_->IsCurrent()) {
- SignalFrameCapturedOnStartThread(&sample);
+ SignalFrameCapturedOnStartThread(sample);
} else {
// This currently happens on with at least VideoCaptureModuleV4L2 and
// possibly other implementations of WebRTC's VideoCaptureModule.
// In order to maintain the threading contract with the upper layers and
// consistency with other capturers such as in Chrome, we need to do a
// thread hop.
- start_thread_->Invoke<void>(
+ // Note that Stop() can cause the async invoke call to be cancelled.
+ async_invoker_->AsyncInvoke<void>(start_thread_,
+ // Note that this results in a shallow copying of the frame.
rtc::Bind(&WebRtcVideoCapturer::SignalFrameCapturedOnStartThread,
- this, &sample));
+ this, sample));
}
}
@@ -405,23 +396,35 @@ void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id,
}
void WebRtcVideoCapturer::SignalFrameCapturedOnStartThread(
- const webrtc::I420VideoFrame* frame) {
+ const webrtc::VideoFrame frame) {
+ // This can only happen between Start() and Stop().
+ DCHECK(start_thread_);
DCHECK(start_thread_->IsCurrent());
+ DCHECK(async_invoker_);
+
+ ++captured_frames_;
+ // Log the size and pixel aspect ratio of the first captured frame.
+ if (1 == captured_frames_) {
+ LOG(LS_INFO) << "Captured frame size "
+ << frame.width() << "x" << frame.height()
+ << ". Expected format " << GetCaptureFormat()->ToString();
+ }
+
// Signal down stream components on captured frame.
// The CapturedFrame class doesn't support planes. We have to ExtractBuffer
// to one block for it.
size_t length =
- webrtc::CalcBufferSize(webrtc::kI420, frame->width(), frame->height());
+ webrtc::CalcBufferSize(webrtc::kI420, frame.width(), frame.height());
capture_buffer_.resize(length);
// TODO(magjed): Refactor the WebRtcCapturedFrame to avoid memory copy or
// take over ownership of the buffer held by |frame| if that's possible.
- webrtc::ExtractBuffer(*frame, length, &capture_buffer_[0]);
- WebRtcCapturedFrame webrtc_frame(*frame, &capture_buffer_[0], length);
+ webrtc::ExtractBuffer(frame, length, &capture_buffer_[0]);
+ WebRtcCapturedFrame webrtc_frame(frame, &capture_buffer_[0], length);
SignalFrameCaptured(this, &webrtc_frame);
}
// WebRtcCapturedFrame
-WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::I420VideoFrame& sample,
+WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::VideoFrame& sample,
void* buffer,
size_t length) {
width = sample.width();
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.h
index 56896f9cd69..fe545ad7474 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideocapturer.h
@@ -35,8 +35,9 @@
#include "talk/media/base/videocapturer.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
-#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/messagehandler.h"
+#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_capture/include/video_capture.h"
@@ -81,7 +82,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
private:
// Callback when a frame is captured by camera.
virtual void OnIncomingCapturedFrame(const int32_t id,
- const webrtc::I420VideoFrame& frame);
+ const webrtc::VideoFrame& frame);
virtual void OnCaptureDelayChanged(const int32_t id,
const int32_t delay);
@@ -91,7 +92,7 @@ class WebRtcVideoCapturer : public VideoCapturer,
// directly from OnIncomingCapturedFrame.
// TODO(tommi): Remove this workaround when we've updated the WebRTC capturers
// to follow the same contract.
- void SignalFrameCapturedOnStartThread(const webrtc::I420VideoFrame* frame);
+ void SignalFrameCapturedOnStartThread(const webrtc::VideoFrame frame);
rtc::scoped_ptr<WebRtcVcmFactoryInterface> factory_;
webrtc::VideoCaptureModule* module_;
@@ -99,14 +100,14 @@ class WebRtcVideoCapturer : public VideoCapturer,
std::vector<uint8_t> capture_buffer_;
rtc::Thread* start_thread_; // Set in Start(), unset in Stop();
- // Critical section to avoid Stop during an OnIncomingCapturedFrame callback.
- rtc::CriticalSection critical_section_stopping_;
+ rtc::scoped_ptr<rtc::AsyncInvoker> async_invoker_;
};
struct WebRtcCapturedFrame : public CapturedFrame {
public:
- WebRtcCapturedFrame(const webrtc::I420VideoFrame& frame,
- void* buffer, size_t length);
+ WebRtcCapturedFrame(const webrtc::VideoFrame& frame,
+ void* buffer,
+ size_t length);
};
} // namespace cricket
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine.cc
deleted file mode 100644
index 6ff09a3c8a1..00000000000
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine.cc
+++ /dev/null
@@ -1 +0,0 @@
-// TODO(pbos): Remove this file when it's no longer built in Chromium.
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.cc
index b296b194556..0a2152e5e4c 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.cc
@@ -36,7 +36,6 @@
#include "talk/media/base/videorenderer.h"
#include "talk/media/webrtc/constants.h"
#include "talk/media/webrtc/simulcast.h"
-#include "talk/media/webrtc/webrtcvideocapturer.h"
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "talk/media/webrtc/webrtcvoiceengine.h"
@@ -44,6 +43,7 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/call.h"
+#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
#include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h"
#include "webrtc/system_wrappers/interface/field_trial.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
@@ -158,6 +158,10 @@ bool CodecIsInternallySupported(const std::string& codec_name) {
webrtc::field_trial::FindFullName("WebRTC-SupportVP9");
return group_name == "Enabled" || group_name == "EnabledByFlag";
}
+ if (CodecNamesEq(codec_name, kH264CodecName)) {
+ return webrtc::H264Encoder::IsSupported() &&
+ webrtc::H264Decoder::IsSupported();
+ }
return false;
}
@@ -317,8 +321,6 @@ static const int kDefaultQpMax = 56;
static const int kDefaultRtcpReceiverReportSsrc = 1;
-const char kH264CodecName[] = "H264";
-
const int kMinBandwidthBps = 30000;
const int kStartBandwidthBps = 300000;
const int kMaxBandwidthBps = 2000000;
@@ -332,6 +334,10 @@ std::vector<VideoCodec> DefaultVideoCodecList() {
}
codecs.push_back(MakeVideoCodecWithDefaultFeedbackParams(kDefaultVp8PlType,
kVp8CodecName));
+ if (CodecIsInternallySupported(kH264CodecName)) {
+ codecs.push_back(MakeVideoCodecWithDefaultFeedbackParams(kDefaultH264PlType,
+ kH264CodecName));
+ }
codecs.push_back(
VideoCodec::CreateRtxCodec(kDefaultRtxVp8PlType, kDefaultVp8PlType));
codecs.push_back(VideoCodec(kDefaultRedPlType, kRedCodecName));
@@ -530,8 +536,7 @@ void DefaultUnsignalledSsrcHandler::SetDefaultRenderer(
}
WebRtcVideoEngine2::WebRtcVideoEngine2(WebRtcVoiceEngine* voice_engine)
- : worker_thread_(NULL),
- voice_engine_(voice_engine),
+ : voice_engine_(voice_engine),
initialized_(false),
call_factory_(&default_call_factory_),
external_decoder_factory_(NULL),
@@ -551,10 +556,6 @@ WebRtcVideoEngine2::WebRtcVideoEngine2(WebRtcVoiceEngine* voice_engine)
WebRtcVideoEngine2::~WebRtcVideoEngine2() {
LOG(LS_INFO) << "WebRtcVideoEngine2::~WebRtcVideoEngine2";
-
- if (initialized_) {
- Terminate();
- }
}
void WebRtcVideoEngine2::SetCallFactory(WebRtcCallFactory* call_factory) {
@@ -562,19 +563,9 @@ void WebRtcVideoEngine2::SetCallFactory(WebRtcCallFactory* call_factory) {
call_factory_ = call_factory;
}
-bool WebRtcVideoEngine2::Init(rtc::Thread* worker_thread) {
+void WebRtcVideoEngine2::Init() {
LOG(LS_INFO) << "WebRtcVideoEngine2::Init";
- worker_thread_ = worker_thread;
- DCHECK(worker_thread_ != NULL);
-
initialized_ = true;
- return true;
-}
-
-void WebRtcVideoEngine2::Terminate() {
- LOG(LS_INFO) << "WebRtcVideoEngine2::Terminate";
-
- initialized_ = false;
}
int WebRtcVideoEngine2::GetCapabilities() { return VIDEO_RECV | VIDEO_SEND; }
@@ -1181,7 +1172,7 @@ bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp,
}
receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
- call_.get(), sp.ssrcs, external_decoder_factory_, default_stream, config,
+ call_.get(), sp, external_decoder_factory_, default_stream, config,
recv_codecs_);
return true;
@@ -1660,6 +1651,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
const StreamParams& sp,
const std::vector<webrtc::RtpExtension>& rtp_extensions)
: ssrcs_(sp.ssrcs),
+ ssrc_groups_(sp.ssrc_groups),
call_(call),
external_encoder_factory_(external_encoder_factory),
stream_(NULL),
@@ -1694,7 +1686,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::~WebRtcVideoSendStream() {
DestroyVideoEncoder(&allocated_encoder_);
}
-static void CreateBlackFrame(webrtc::I420VideoFrame* video_frame,
+static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
int width,
int height) {
video_frame->CreateEmptyFrame(width, height, width, (width + 1) / 2,
@@ -1711,8 +1703,8 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
VideoCapturer* capturer,
const VideoFrame* frame) {
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::InputFrame");
- webrtc::I420VideoFrame video_frame(frame->GetVideoFrameBuffer(), 0, 0,
- frame->GetVideoRotation());
+ webrtc::VideoFrame video_frame(frame->GetVideoFrameBuffer(), 0, 0,
+ frame->GetVideoRotation());
rtc::CritScope cs(&lock_);
if (stream_ == NULL) {
// Frame input before send codecs are configured, dropping frame.
@@ -1759,7 +1751,7 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
if (capturer == NULL) {
if (stream_ != NULL) {
LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
- webrtc::I420VideoFrame black_frame;
+ webrtc::VideoFrame black_frame;
CreateBlackFrame(&black_frame, last_dimensions_.width,
last_dimensions_.height);
@@ -1891,6 +1883,9 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoder(
} else if (type == webrtc::kVideoCodecVP9) {
return AllocatedEncoder(
webrtc::VideoEncoder::Create(webrtc::VideoEncoder::kVp9), type, false);
+ } else if (type == webrtc::kVideoCodecH264) {
+ return AllocatedEncoder(
+ webrtc::VideoEncoder::Create(webrtc::VideoEncoder::kH264), type, false);
}
// This shouldn't happen, we should not be trying to create something we don't
@@ -2116,6 +2111,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() {
}
}
}
+ info.ssrc_groups = ssrc_groups_;
info.framerate_input = stats.input_frame_rate;
info.framerate_sent = stats.encode_frame_rate;
info.avg_encode_ms = stats.avg_encode_time_ms;
@@ -2218,13 +2214,14 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
webrtc::Call* call,
- const std::vector<uint32>& ssrcs,
+ const StreamParams& sp,
WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream,
const webrtc::VideoReceiveStream::Config& config,
const std::vector<VideoCodecSettings>& recv_codecs)
: call_(call),
- ssrcs_(ssrcs),
+ ssrcs_(sp.ssrcs),
+ ssrc_groups_(sp.ssrc_groups),
stream_(NULL),
default_stream_(default_stream),
config_(config),
@@ -2297,6 +2294,11 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::CreateOrReuseVideoDecoder(
webrtc::VideoDecoder::Create(webrtc::VideoDecoder::kVp9), type, false);
}
+ if (type == webrtc::kVideoCodecH264) {
+ return AllocatedDecoder(
+ webrtc::VideoDecoder::Create(webrtc::VideoDecoder::kH264), type, false);
+ }
+
// This shouldn't happen, we should not be trying to create something we don't
// support.
DCHECK(false);
@@ -2381,7 +2383,7 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::ClearDecoders(
}
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
- const webrtc::I420VideoFrame& frame,
+ const webrtc::VideoFrame& frame,
int time_to_render_ms) {
rtc::CritScope crit(&renderer_lock_);
@@ -2448,6 +2450,7 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetSize(int width,
VideoReceiverInfo
WebRtcVideoChannel2::WebRtcVideoReceiveStream::GetVideoReceiverInfo() {
VideoReceiverInfo info;
+ info.ssrc_groups = ssrc_groups_;
info.add_ssrc(config_.rtp.remote_ssrc);
webrtc::VideoReceiveStream::Stats stats = stream_->GetStats();
info.bytes_rcvd = stats.rtp_stats.transmitted.payload_bytes +
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.h
index 5ee2c1e185d..e3059758400 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2.h
@@ -123,8 +123,7 @@ class WebRtcVideoEngine2 : public sigslot::has_slots<> {
void SetCallFactory(WebRtcCallFactory* call_factory);
// Basic video engine implementation.
- bool Init(rtc::Thread* worker_thread);
- void Terminate();
+ void Init();
int GetCapabilities();
bool SetDefaultEncoderConfig(const VideoEncoderConfig& config);
@@ -158,7 +157,6 @@ class WebRtcVideoEngine2 : public sigslot::has_slots<> {
private:
std::vector<VideoCodec> GetSupportedCodecs() const;
- rtc::Thread* worker_thread_;
WebRtcVoiceEngine* voice_engine_;
std::vector<VideoCodec> video_codecs_;
std::vector<RtpHeaderExtension> rtp_header_extensions_;
@@ -378,6 +376,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
EXCLUSIVE_LOCKS_REQUIRED(lock_);
const std::vector<uint32> ssrcs_;
+ const std::vector<SsrcGroup> ssrc_groups_;
webrtc::Call* const call_;
WebRtcVideoEncoderFactory* const external_encoder_factory_
GUARDED_BY(lock_);
@@ -402,8 +401,8 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
class WebRtcVideoReceiveStream : public webrtc::VideoRenderer {
public:
WebRtcVideoReceiveStream(
- webrtc::Call*,
- const std::vector<uint32>& ssrcs,
+ webrtc::Call* call,
+ const StreamParams& sp,
WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream,
const webrtc::VideoReceiveStream::Config& config,
@@ -417,7 +416,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
void SetRecvCodecs(const std::vector<VideoCodecSettings>& recv_codecs);
void SetRtpExtensions(const std::vector<webrtc::RtpExtension>& extensions);
- void RenderFrame(const webrtc::I420VideoFrame& frame,
+ void RenderFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) override;
bool IsTextureSupported() const override;
bool IsDefaultStream() const;
@@ -449,6 +448,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
webrtc::Call* const call_;
const std::vector<uint32> ssrcs_;
+ const std::vector<SsrcGroup> ssrc_groups_;
webrtc::VideoReceiveStream* stream_;
const bool default_stream_;
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2_unittest.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2_unittest.cc
index 21693eccc81..230ca3b2b06 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -74,7 +74,7 @@ void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec) {
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
}
-static void CreateBlackFrame(webrtc::I420VideoFrame* video_frame,
+static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
int width,
int height) {
video_frame->CreateEmptyFrame(
@@ -173,13 +173,12 @@ class WebRtcVideoEngine2VoiceTest : public WebRtcVideoEngine2Test {
TEST_F(WebRtcVideoEngine2VoiceTest, ConfiguresAvSyncForFirstReceiveChannel) {
FakeCallFactory call_factory;
engine_.SetCallFactory(&call_factory);
-
voice_engine_.Init(rtc::Thread::Current());
- engine_.Init(rtc::Thread::Current());
+ engine_.Init();
rtc::scoped_ptr<VoiceMediaChannel> voice_channel(
- voice_engine_.CreateChannel());
- ASSERT_TRUE(voice_channel.get() != NULL);
+ voice_engine_.CreateChannel(cricket::AudioOptions()));
+ ASSERT_TRUE(voice_channel.get() != nullptr);
WebRtcVoiceMediaChannel* webrtc_voice_channel =
static_cast<WebRtcVoiceMediaChannel*>(voice_channel.get());
ASSERT_NE(webrtc_voice_channel->voe_channel(), -1);
@@ -187,11 +186,11 @@ TEST_F(WebRtcVideoEngine2VoiceTest, ConfiguresAvSyncForFirstReceiveChannel) {
engine_.CreateChannel(cricket::VideoOptions(), voice_channel.get()));
FakeCall* fake_call = call_factory.GetCall();
- ASSERT_TRUE(fake_call != NULL);
+ ASSERT_TRUE(fake_call != nullptr);
webrtc::Call::Config call_config = fake_call->GetConfig();
- ASSERT_TRUE(voice_engine_.voe()->engine() != NULL);
+ ASSERT_TRUE(voice_engine_.voe()->engine() != nullptr);
ASSERT_EQ(voice_engine_.voe()->engine(), call_config.voice_engine);
EXPECT_TRUE(channel->AddRecvStream(StreamParams::CreateLegacy(kSsrc)));
@@ -386,7 +385,7 @@ TEST_F(WebRtcVideoEngine2Test, CVOSetHeaderExtensionAfterCapturer) {
}
TEST_F(WebRtcVideoEngine2Test, SetSendFailsBeforeSettingCodecs) {
- engine_.Init(rtc::Thread::Current());
+ engine_.Init();
rtc::scoped_ptr<VideoMediaChannel> channel(
engine_.CreateChannel(cricket::VideoOptions(), NULL));
@@ -399,7 +398,7 @@ TEST_F(WebRtcVideoEngine2Test, SetSendFailsBeforeSettingCodecs) {
}
TEST_F(WebRtcVideoEngine2Test, GetStatsWithoutSendCodecsSetDoesNotCrash) {
- engine_.Init(rtc::Thread::Current());
+ engine_.Init();
rtc::scoped_ptr<VideoMediaChannel> channel(
engine_.CreateChannel(cricket::VideoOptions(), NULL));
EXPECT_TRUE(channel->AddSendStream(StreamParams::CreateLegacy(123)));
@@ -461,7 +460,7 @@ VideoMediaChannel* WebRtcVideoEngine2Test::SetUpForExternalEncoderFactory(
cricket::WebRtcVideoEncoderFactory* encoder_factory,
const std::vector<VideoCodec>& codecs) {
engine_.SetExternalEncoderFactory(encoder_factory);
- engine_.Init(rtc::Thread::Current());
+ engine_.Init();
VideoMediaChannel* channel =
engine_.CreateChannel(cricket::VideoOptions(), NULL);
@@ -474,7 +473,7 @@ VideoMediaChannel* WebRtcVideoEngine2Test::SetUpForExternalDecoderFactory(
cricket::WebRtcVideoDecoderFactory* decoder_factory,
const std::vector<VideoCodec>& codecs) {
engine_.SetExternalDecoderFactory(decoder_factory);
- engine_.Init(rtc::Thread::Current());
+ engine_.Init();
VideoMediaChannel* channel =
engine_.CreateChannel(cricket::VideoOptions(), NULL);
@@ -623,8 +622,7 @@ TEST_F(WebRtcVideoEngine2Test, ReportSupportedExternalCodecs) {
cricket::FakeWebRtcVideoEncoderFactory encoder_factory;
encoder_factory.AddSupportedVideoCodecType(webrtc::kVideoCodecH264, "H264");
engine_.SetExternalEncoderFactory(&encoder_factory);
-
- engine_.Init(rtc::Thread::Current());
+ engine_.Init();
std::vector<cricket::VideoCodec> codecs(engine_.codecs());
ASSERT_GE(codecs.size(), 2u);
@@ -822,7 +820,7 @@ class WebRtcVideoChannel2Test : public WebRtcVideoEngine2Test,
WebRtcVideoChannel2Test() : fake_call_(NULL), last_ssrc_(0) {}
void SetUp() override {
engine_.SetCallFactory(this);
- engine_.Init(rtc::Thread::Current());
+ engine_.Init();
channel_.reset(engine_.CreateChannel(cricket::VideoOptions(), NULL));
ASSERT_TRUE(fake_call_ != NULL) << "Call not created through factory.";
last_ssrc_ = 123;
@@ -1134,11 +1132,11 @@ TEST_F(WebRtcVideoChannel2Test, IdenticalRecvExtensionsDoesntRecreateStream) {
kRtpVideoRotationHeaderExtension, kVideoRotationId));
EXPECT_TRUE(channel_->SetRecvRtpHeaderExtensions(extensions));
- FakeVideoReceiveStream* send_stream =
+ FakeVideoReceiveStream* recv_stream =
AddRecvStream(cricket::StreamParams::CreateLegacy(123));
EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams());
- ASSERT_EQ(3u, send_stream->GetConfig().rtp.extensions.size());
+ ASSERT_EQ(3u, recv_stream->GetConfig().rtp.extensions.size());
// Setting the same extensions (even if in different order) shouldn't
// reallocate the stream.
@@ -1440,7 +1438,8 @@ TEST_F(WebRtcVideoChannel2Test, UsesCorrectSettingsForScreencast) {
TEST_F(WebRtcVideoChannel2Test,
ConferenceModeScreencastConfiguresTemporalLayer) {
- static const int kConferenceScreencastTemporalBitrateBps = 100000;
+ static const int kConferenceScreencastTemporalBitrateBps =
+ ScreenshareLayerConfig::GetDefault().tl0_bitrate_kbps * 1000;
VideoOptions options;
options.conference_mode.Set(true);
channel_->SetOptions(options);
@@ -1592,8 +1591,6 @@ class Vp9SettingsTest : public WebRtcVideoChannel2Test {
void TearDown() override {
// Remove references to encoder_factory_ since this will be destroyed
// before channel_ and engine_.
- engine_.Terminate();
- engine_.SetExternalEncoderFactory(nullptr);
ASSERT_TRUE(channel_->SetSendCodecs(engine_.codecs()));
}
@@ -1736,7 +1733,7 @@ TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeAndElapsedTimeCorrectly) {
EXPECT_TRUE(channel_->SetRenderer(last_ssrc_, &renderer));
EXPECT_TRUE(channel_->SetRender(true));
- webrtc::I420VideoFrame video_frame;
+ webrtc::VideoFrame video_frame;
CreateBlackFrame(&video_frame, 4, 4);
video_frame.set_timestamp(kInitialTimestamp);
// Initial NTP time is not available on the first frame, but should still be
@@ -2556,6 +2553,35 @@ TEST_F(WebRtcVideoChannel2Test,
EXPECT_TRUE(channel_->AddRecvStream(sp));
}
+TEST_F(WebRtcVideoChannel2Test, ReportsSsrcGroupsInStats) {
+ EXPECT_TRUE(channel_->SetSendCodecs(engine_.codecs()));
+
+ static const uint32_t kSenderSsrcs[] = {4, 7, 10};
+ static const uint32_t kSenderRtxSsrcs[] = {5, 8, 11};
+
+ StreamParams sender_sp = cricket::CreateSimWithRtxStreamParams(
+ "cname", MAKE_VECTOR(kSenderSsrcs), MAKE_VECTOR(kSenderRtxSsrcs));
+
+ EXPECT_TRUE(channel_->AddSendStream(sender_sp));
+
+ static const uint32_t kReceiverSsrcs[] = {3};
+ static const uint32_t kReceiverRtxSsrcs[] = {2};
+
+ StreamParams receiver_sp = cricket::CreateSimWithRtxStreamParams(
+ "cname", MAKE_VECTOR(kReceiverSsrcs), MAKE_VECTOR(kReceiverRtxSsrcs));
+ EXPECT_TRUE(channel_->AddRecvStream(receiver_sp));
+
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(&info));
+
+ ASSERT_EQ(1u, info.senders.size());
+ ASSERT_EQ(1u, info.receivers.size());
+
+ EXPECT_NE(sender_sp.ssrc_groups, receiver_sp.ssrc_groups);
+ EXPECT_EQ(sender_sp.ssrc_groups, info.senders[0].ssrc_groups);
+ EXPECT_EQ(receiver_sp.ssrc_groups, info.receivers[0].ssrc_groups);
+}
+
void WebRtcVideoChannel2Test::TestReceiverLocalSsrcConfiguration(
bool receiver_first) {
EXPECT_TRUE(channel_->SetSendCodecs(engine_.codecs()));
@@ -2603,7 +2629,7 @@ class WebRtcVideoChannel2SimulcastTest : public WebRtcVideoEngine2SimulcastTest,
void SetUp() override {
engine_.SetCallFactory(this);
- engine_.Init(rtc::Thread::Current());
+ engine_.Init();
channel_.reset(engine_.CreateChannel(VideoOptions(), NULL));
ASSERT_TRUE(fake_call_ != NULL) << "Call not created through factory.";
last_ssrc_ = 123;
@@ -2899,16 +2925,6 @@ TEST_F(WebRtcVideoEngine2SimulcastTest, DISABLED_TestAdaptToOutputFormat) {
FAIL() << "Not implemented.";
}
-TEST_F(WebRtcVideoEngine2SimulcastTest, DISABLED_TestAdaptToCpuLoad) {
- // TODO(pbos): Implement.
- FAIL() << "Not implemented.";
-}
-
-TEST_F(WebRtcVideoEngine2SimulcastTest, DISABLED_TestAdaptToCpuLoadDisabled) {
- // TODO(pbos): Implement.
- FAIL() << "Not implemented.";
-}
-
TEST_F(WebRtcVideoEngine2SimulcastTest,
DISABLED_TestAdaptWithCpuOveruseObserver) {
// TODO(pbos): Implement.
@@ -2929,7 +2945,7 @@ TEST_F(WebRtcVideoEngine2SimulcastTest,
}
TEST_F(WebRtcVideoEngine2SimulcastTest,
- DISABLED_DontUseSimulcastAdapterOnNoneVp8Factory) {
+ DISABLED_DontUseSimulcastAdapterOnNonVp8Factory) {
// TODO(pbos): Implement.
FAIL() << "Not implemented.";
}
@@ -2969,8 +2985,7 @@ TEST_F(WebRtcVideoChannel2SimulcastTest, DISABLED_SimulcastSend_480x300) {
FAIL() << "Not implemented.";
}
-TEST_F(WebRtcVideoChannel2SimulcastTest,
- DISABLED_DISABLED_SimulcastSend_480x270) {
+TEST_F(WebRtcVideoChannel2SimulcastTest, DISABLED_SimulcastSend_480x270) {
// TODO(pbos): Implement.
FAIL() << "Not implemented.";
}
@@ -2985,14 +3000,6 @@ TEST_F(WebRtcVideoChannel2SimulcastTest, DISABLED_SimulcastSend_320x180) {
FAIL() << "Not implemented.";
}
-// Test reset send codec with simulcast.
-// Disabled per b/6773425
-TEST_F(WebRtcVideoChannel2SimulcastTest,
- DISABLED_DISABLED_SimulcastResetSendCodec) {
- // TODO(pbos): Implement.
- FAIL() << "Not implemented.";
-}
-
// Test simulcast streams are decodeable with expected sizes.
TEST_F(WebRtcVideoChannel2SimulcastTest, DISABLED_SimulcastStreams) {
// TODO(pbos): Implement.
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h
index 06ac52bfb2b..790047277dd 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h
@@ -131,7 +131,9 @@ class WebRtcVideoFrame : public VideoFrame {
const VideoFrame* GetCopyWithRotationApplied() const override;
protected:
- void SetRotation(webrtc::VideoRotation rotation) { rotation_ = rotation; }
+ void SetRotation(webrtc::VideoRotation rotation) override {
+ rotation_ = rotation;
+ }
private:
virtual VideoFrame* CreateEmptyFrame(int w, int h, size_t pixel_width,
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe_unittest.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe_unittest.cc
index 7386f51e7b9..daa8ffa46ef 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe_unittest.cc
@@ -29,6 +29,7 @@
#include "talk/media/base/videoframe_unittest.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
+#include "webrtc/test/fake_texture_frame.h"
namespace {
@@ -297,10 +298,11 @@ TEST_F(WebRtcVideoFrameTest, InitRotated90DontApplyRotation) {
}
TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
- void* dummy_handle = reinterpret_cast<void*>(0x1);
- webrtc::TextureBuffer* buffer =
- new rtc::RefCountedObject<webrtc::TextureBuffer>(dummy_handle, 640, 480,
- rtc::Callback0<void>());
+ webrtc::test::FakeNativeHandle* dummy_handle =
+ new webrtc::test::FakeNativeHandle();
+ webrtc::NativeHandleBuffer* buffer =
+ new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
+ dummy_handle, 640, 480);
cricket::WebRtcVideoFrame frame(buffer, 100, 200, webrtc::kVideoRotation_0);
EXPECT_EQ(dummy_handle, frame.GetNativeHandle());
EXPECT_EQ(640u, frame.GetWidth());
@@ -314,10 +316,11 @@ TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
}
TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
- void* dummy_handle = reinterpret_cast<void*>(0x1);
- webrtc::TextureBuffer* buffer =
- new rtc::RefCountedObject<webrtc::TextureBuffer>(dummy_handle, 640, 480,
- rtc::Callback0<void>());
+ webrtc::test::FakeNativeHandle* dummy_handle =
+ new webrtc::test::FakeNativeHandle();
+ webrtc::NativeHandleBuffer* buffer =
+ new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
+ dummy_handle, 640, 480);
cricket::WebRtcVideoFrame frame1(buffer, 100, 200, webrtc::kVideoRotation_0);
cricket::VideoFrame* frame2 = frame1.Copy();
EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframefactory_unittest.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframefactory_unittest.cc
new file mode 100644
index 00000000000..4dbad0556f2
--- /dev/null
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvideoframefactory_unittest.cc
@@ -0,0 +1,125 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string.h>
+
+#include "talk/media/base/videoframe_unittest.h"
+#include "talk/media/webrtc/webrtcvideoframe.h"
+#include "talk/media/webrtc/webrtcvideoframefactory.h"
+
+class WebRtcVideoFrameFactoryTest
+ : public VideoFrameTest<cricket::WebRtcVideoFrameFactory> {
+ public:
+ WebRtcVideoFrameFactoryTest() {}
+
+ void InitFrame(webrtc::VideoRotation frame_rotation) {
+ const int frame_width = 1920;
+ const int frame_height = 1080;
+
+ // Build the CapturedFrame.
+ captured_frame_.fourcc = cricket::FOURCC_I420;
+ captured_frame_.pixel_width = 1;
+ captured_frame_.pixel_height = 1;
+ captured_frame_.elapsed_time = 1234;
+ captured_frame_.time_stamp = 5678;
+ captured_frame_.rotation = frame_rotation;
+ captured_frame_.width = frame_width;
+ captured_frame_.height = frame_height;
+ captured_frame_.data_size =
+ (frame_width * frame_height) +
+ ((frame_width + 1) / 2) * ((frame_height + 1) / 2) * 2;
+ captured_frame_buffer_.reset(new uint8[captured_frame_.data_size]);
+ // Initialize memory to satisfy DrMemory tests.
+ memset(captured_frame_buffer_.get(), 0, captured_frame_.data_size);
+ captured_frame_.data = captured_frame_buffer_.get();
+ }
+
+ void VerifyFrame(cricket::VideoFrame* dest_frame,
+ webrtc::VideoRotation src_rotation,
+ int src_width,
+ int src_height,
+ bool apply_rotation) {
+ if (!apply_rotation) {
+ EXPECT_EQ(dest_frame->GetRotation(), src_rotation);
+ EXPECT_EQ(dest_frame->GetWidth(), src_width);
+ EXPECT_EQ(dest_frame->GetHeight(), src_height);
+ } else {
+ EXPECT_EQ(dest_frame->GetRotation(), webrtc::kVideoRotation_0);
+ if (src_rotation == webrtc::kVideoRotation_90 ||
+ src_rotation == webrtc::kVideoRotation_270) {
+ EXPECT_EQ(dest_frame->GetWidth(), src_height);
+ EXPECT_EQ(dest_frame->GetHeight(), src_width);
+ } else {
+ EXPECT_EQ(dest_frame->GetWidth(), src_width);
+ EXPECT_EQ(dest_frame->GetHeight(), src_height);
+ }
+ }
+ }
+
+ void TestCreateAliasedFrame(bool apply_rotation) {
+ cricket::VideoFrameFactory& factory = factory_;
+ factory.SetApplyRotation(apply_rotation);
+ InitFrame(webrtc::kVideoRotation_270);
+ const cricket::CapturedFrame& captured_frame = get_captured_frame();
+ // Create the new frame from the CapturedFrame.
+ rtc::scoped_ptr<cricket::VideoFrame> frame;
+ int new_width = captured_frame.width / 2;
+ int new_height = captured_frame.height / 2;
+ frame.reset(factory.CreateAliasedFrame(&captured_frame, new_width,
+ new_height, new_width, new_height));
+ VerifyFrame(frame.get(), webrtc::kVideoRotation_270, new_width, new_height,
+ apply_rotation);
+
+ frame.reset(factory.CreateAliasedFrame(
+ &captured_frame, new_width, new_height, new_width / 2, new_height / 2));
+ VerifyFrame(frame.get(), webrtc::kVideoRotation_270, new_width / 2,
+ new_height / 2, apply_rotation);
+
+ // Reset the frame first so it's exclusive hence we could go through the
+ // StretchToFrame code path in CreateAliasedFrame.
+ frame.reset();
+ frame.reset(factory.CreateAliasedFrame(
+ &captured_frame, new_width, new_height, new_width / 2, new_height / 2));
+ VerifyFrame(frame.get(), webrtc::kVideoRotation_270, new_width / 2,
+ new_height / 2, apply_rotation);
+ }
+
+ const cricket::CapturedFrame& get_captured_frame() { return captured_frame_; }
+
+ private:
+ cricket::CapturedFrame captured_frame_;
+ rtc::scoped_ptr<uint8[]> captured_frame_buffer_;
+ cricket::WebRtcVideoFrameFactory factory_;
+};
+
+TEST_F(WebRtcVideoFrameFactoryTest, NoApplyRotation) {
+ TestCreateAliasedFrame(false);
+}
+
+TEST_F(WebRtcVideoFrameFactoryTest, ApplyRotation) {
+ TestCreateAliasedFrame(true);
+}
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc
index 62404db9f21..284afed4d11 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.cc
@@ -103,7 +103,6 @@ static const CodecPref kCodecPrefs[] = {
#ifdef WIN32
static const int kDefaultAudioDeviceId = -1;
-static const int kDefaultSoundclipDeviceId = -2;
#else
static const int kDefaultAudioDeviceId = 0;
#endif
@@ -353,11 +352,12 @@ static AudioOptions GetDefaultEngineOptions() {
options.highpass_filter.Set(true);
options.stereo_swapping.Set(false);
options.audio_jitter_buffer_max_packets.Set(50);
+ options.audio_jitter_buffer_fast_accelerate.Set(false);
options.typing_detection.Set(true);
options.conference_mode.Set(false);
options.adjust_agc_delta.Set(0);
options.experimental_agc.Set(false);
- options.experimental_aec.Set(false);
+ options.extended_filter_aec.Set(false);
options.delay_agnostic_aec.Set(false);
options.experimental_ns.Set(false);
options.aec_dump.Set(false);
@@ -368,103 +368,10 @@ static std::string GetEnableString(bool enable) {
return enable ? "enable" : "disable";
}
-class WebRtcSoundclipMedia : public SoundclipMedia {
- public:
- explicit WebRtcSoundclipMedia(WebRtcVoiceEngine *engine)
- : engine_(engine), webrtc_channel_(-1) {
- engine_->RegisterSoundclip(this);
- }
-
- ~WebRtcSoundclipMedia() override {
- engine_->UnregisterSoundclip(this);
- if (webrtc_channel_ != -1) {
- // We shouldn't have to call Disable() here. DeleteChannel() should call
- // StopPlayout() while deleting the channel. We should fix the bug
- // inside WebRTC and remove the Disable() call bellow. This work is
- // tracked by bug http://b/issue?id=5382855.
- PlaySound(NULL, 0, 0);
- Disable();
- if (engine_->voe_sc()->base()->DeleteChannel(webrtc_channel_)
- == -1) {
- LOG_RTCERR1(DeleteChannel, webrtc_channel_);
- }
- }
- }
-
- bool Init() {
- if (!engine_->voe_sc()) {
- return false;
- }
- webrtc_channel_ = engine_->CreateSoundclipVoiceChannel();
- if (webrtc_channel_ == -1) {
- LOG_RTCERR0(CreateChannel);
- return false;
- }
- return true;
- }
-
- bool Enable() {
- if (engine_->voe_sc()->base()->StartPlayout(webrtc_channel_) == -1) {
- LOG_RTCERR1(StartPlayout, webrtc_channel_);
- return false;
- }
- return true;
- }
-
- bool Disable() {
- if (engine_->voe_sc()->base()->StopPlayout(webrtc_channel_) == -1) {
- LOG_RTCERR1(StopPlayout, webrtc_channel_);
- return false;
- }
- return true;
- }
-
- bool PlaySound(const char* buf, int len, int flags) override {
- // The voe file api is not available in chrome.
- if (!engine_->voe_sc()->file()) {
- return false;
- }
- // Must stop playing the current sound (if any), because we are about to
- // modify the stream.
- if (engine_->voe_sc()->file()->StopPlayingFileLocally(webrtc_channel_)
- == -1) {
- LOG_RTCERR1(StopPlayingFileLocally, webrtc_channel_);
- return false;
- }
-
- if (buf) {
- stream_.reset(new WebRtcSoundclipStream(buf, len));
- stream_->set_loop((flags & SF_LOOP) != 0);
- stream_->Rewind();
-
- // Play it.
- if (engine_->voe_sc()->file()->StartPlayingFileLocally(
- webrtc_channel_, stream_.get()) == -1) {
- LOG_RTCERR2(StartPlayingFileLocally, webrtc_channel_, stream_.get());
- LOG(LS_ERROR) << "Unable to start soundclip";
- return false;
- }
- } else {
- stream_.reset();
- }
- return true;
- }
-
- int GetLastEngineError() const { return engine_->voe_sc()->error(); }
-
- private:
- WebRtcVoiceEngine *engine_;
- int webrtc_channel_;
- rtc::scoped_ptr<WebRtcSoundclipStream> stream_;
-};
-
WebRtcVoiceEngine::WebRtcVoiceEngine()
: voe_wrapper_(new VoEWrapper()),
- voe_wrapper_sc_(new VoEWrapper()),
- voe_wrapper_sc_initialized_(false),
tracing_(new VoETraceWrapper()),
adm_(NULL),
- adm_sc_(NULL),
log_filter_(SeverityToFilter(kDefaultLogSeverity)),
is_dumping_aec_(false),
desired_local_monitor_enable_(false),
@@ -474,14 +381,10 @@ WebRtcVoiceEngine::WebRtcVoiceEngine()
}
WebRtcVoiceEngine::WebRtcVoiceEngine(VoEWrapper* voe_wrapper,
- VoEWrapper* voe_wrapper_sc,
VoETraceWrapper* tracing)
: voe_wrapper_(voe_wrapper),
- voe_wrapper_sc_(voe_wrapper_sc),
- voe_wrapper_sc_initialized_(false),
tracing_(tracing),
adm_(NULL),
- adm_sc_(NULL),
log_filter_(SeverityToFilter(kDefaultLogSeverity)),
is_dumping_aec_(false),
desired_local_monitor_enable_(false),
@@ -593,11 +496,6 @@ WebRtcVoiceEngine::~WebRtcVoiceEngine() {
adm_->Release();
adm_ = NULL;
}
- if (adm_sc_) {
- voe_wrapper_sc_.reset();
- adm_sc_->Release();
- adm_sc_ = NULL;
- }
// Test to see if the media processor was deregistered properly
DCHECK(SignalRxMediaFrame.is_empty());
@@ -673,61 +571,12 @@ bool WebRtcVoiceEngine::InitInternal() {
return true;
}
-bool WebRtcVoiceEngine::EnsureSoundclipEngineInit() {
- if (voe_wrapper_sc_initialized_) {
- return true;
- }
- // Note that, if initialization fails, voe_wrapper_sc_initialized_ will still
- // be false, so subsequent calls to EnsureSoundclipEngineInit will
- // probably just fail again. That's acceptable behavior.
-#if defined(LINUX) && !defined(HAVE_LIBPULSE)
- voe_wrapper_sc_->hw()->SetAudioDeviceLayer(webrtc::kAudioLinuxAlsa);
-#endif
-
- // Initialize the VoiceEngine instance that we'll use to play out sound clips.
- if (voe_wrapper_sc_->base()->Init(adm_sc_) == -1) {
- LOG_RTCERR0_EX(Init, voe_wrapper_sc_->error());
- return false;
- }
-
- // On Windows, tell it to use the default sound (not communication) devices.
- // First check whether there is a valid sound device for playback.
- // TODO(juberti): Clean this up when we support setting the soundclip device.
-#ifdef WIN32
- // The SetPlayoutDevice may not be implemented in the case of external ADM.
- // TODO(ronghuawu): We should only check the adm_sc_ here, but current
- // PeerConnection interface never set the adm_sc_, so need to check both
- // in order to determine if the external adm is used.
- if (!adm_ && !adm_sc_) {
- int num_of_devices = 0;
- if (voe_wrapper_sc_->hw()->GetNumOfPlayoutDevices(num_of_devices) != -1 &&
- num_of_devices > 0) {
- if (voe_wrapper_sc_->hw()->SetPlayoutDevice(kDefaultSoundclipDeviceId)
- == -1) {
- LOG_RTCERR1_EX(SetPlayoutDevice, kDefaultSoundclipDeviceId,
- voe_wrapper_sc_->error());
- return false;
- }
- } else {
- LOG(LS_WARNING) << "No valid sound playout device found.";
- }
- }
-#endif
- voe_wrapper_sc_initialized_ = true;
- LOG(LS_INFO) << "Initialized WebRtc soundclip engine.";
- return true;
-}
-
void WebRtcVoiceEngine::Terminate() {
LOG(LS_INFO) << "WebRtcVoiceEngine::Terminate";
initialized_ = false;
StopAecDump();
- if (voe_wrapper_sc_) {
- voe_wrapper_sc_initialized_ = false;
- voe_wrapper_sc_->base()->Terminate();
- }
voe_wrapper_->base()->Terminate();
desired_local_monitor_enable_ = false;
}
@@ -736,27 +585,17 @@ int WebRtcVoiceEngine::GetCapabilities() {
return AUDIO_SEND | AUDIO_RECV;
}
-VoiceMediaChannel *WebRtcVoiceEngine::CreateChannel() {
+VoiceMediaChannel* WebRtcVoiceEngine::CreateChannel(
+ const AudioOptions& options) {
WebRtcVoiceMediaChannel* ch = new WebRtcVoiceMediaChannel(this);
if (!ch->valid()) {
delete ch;
- ch = NULL;
- }
- return ch;
-}
-
-SoundclipMedia *WebRtcVoiceEngine::CreateSoundclip() {
- if (!EnsureSoundclipEngineInit()) {
- LOG(LS_ERROR) << "Unable to create soundclip: soundclip engine failed to "
- << "initialize.";
- return NULL;
+ return nullptr;
}
- WebRtcSoundclipMedia *soundclip = new WebRtcSoundclipMedia(this);
- if (!soundclip->Init() || !soundclip->Enable()) {
- delete soundclip;
- return NULL;
+ if (!ch->SetOptions(options)) {
+ LOG(LS_WARNING) << "Failed to set options while creating channel.";
}
- return soundclip;
+ return ch;
}
bool WebRtcVoiceEngine::SetOptions(const AudioOptions& options) {
@@ -822,7 +661,7 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
agc_mode = webrtc::kAgcFixedDigital;
options.typing_detection.Set(false);
options.experimental_agc.Set(false);
- options.experimental_aec.Set(false);
+ options.extended_filter_aec.Set(false);
options.experimental_ns.Set(false);
#endif
@@ -833,7 +672,7 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
if (options.delay_agnostic_aec.Get(&use_delay_agnostic_aec)) {
if (use_delay_agnostic_aec) {
options.echo_cancellation.Set(true);
- options.experimental_aec.Set(true);
+ options.extended_filter_aec.Set(true);
ec_mode = webrtc::kEcConference;
}
}
@@ -850,12 +689,14 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
// TODO(henrika): investigate possibility to support built-in EC also
// in combination with Open SL ES audio.
const bool built_in_aec = voe_wrapper_->hw()->BuiltInAECIsAvailable();
- if (built_in_aec && !use_delay_agnostic_aec) {
+ if (built_in_aec) {
// Built-in EC exists on this device and use_delay_agnostic_aec is not
// overriding it. Enable/Disable it according to the echo_cancellation
// audio option.
- if (voe_wrapper_->hw()->EnableBuiltInAEC(echo_cancellation) == 0 &&
- echo_cancellation) {
+ const bool enable_built_in_aec =
+ echo_cancellation && !use_delay_agnostic_aec;
+ if (voe_wrapper_->hw()->EnableBuiltInAEC(enable_built_in_aec) == 0 &&
+ enable_built_in_aec) {
// Disable internal software EC if built-in EC is enabled,
// i.e., replace the software EC with the built-in EC.
options.echo_cancellation.Set(false);
@@ -963,6 +804,14 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
new webrtc::NetEqCapacityConfig(audio_jitter_buffer_max_packets));
}
+ bool audio_jitter_buffer_fast_accelerate;
+ if (options.audio_jitter_buffer_fast_accelerate.Get(
+ &audio_jitter_buffer_fast_accelerate)) {
+ LOG(LS_INFO) << "NetEq fast mode? " << audio_jitter_buffer_fast_accelerate;
+ voe_config_.Set<webrtc::NetEqFastAccelerate>(
+ new webrtc::NetEqFastAccelerate(audio_jitter_buffer_fast_accelerate));
+ }
+
bool typing_detection;
if (options.typing_detection.Get(&typing_detection)) {
LOG(LS_INFO) << "Typing detection is enabled? " << typing_detection;
@@ -995,16 +844,16 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
bool delay_agnostic_aec;
if (delay_agnostic_aec_.Get(&delay_agnostic_aec)) {
LOG(LS_INFO) << "Delay agnostic aec is enabled? " << delay_agnostic_aec;
- config.Set<webrtc::ReportedDelay>(
- new webrtc::ReportedDelay(!delay_agnostic_aec));
+ config.Set<webrtc::DelayAgnostic>(
+ new webrtc::DelayAgnostic(delay_agnostic_aec));
}
- experimental_aec_.SetFrom(options.experimental_aec);
- bool experimental_aec;
- if (experimental_aec_.Get(&experimental_aec)) {
- LOG(LS_INFO) << "Experimental aec is enabled? " << experimental_aec;
- config.Set<webrtc::DelayCorrection>(
- new webrtc::DelayCorrection(experimental_aec));
+ extended_filter_aec_.SetFrom(options.extended_filter_aec);
+ bool extended_filter;
+ if (extended_filter_aec_.Get(&extended_filter)) {
+ LOG(LS_INFO) << "Extended filter aec is enabled? " << extended_filter;
+ config.Set<webrtc::ExtendedFilter>(
+ new webrtc::ExtendedFilter(extended_filter));
}
experimental_ns_.SetFrom(options.experimental_ns);
@@ -1532,19 +1381,6 @@ void WebRtcVoiceEngine::UnregisterChannel(WebRtcVoiceMediaChannel *channel) {
}
}
-void WebRtcVoiceEngine::RegisterSoundclip(WebRtcSoundclipMedia *soundclip) {
- soundclips_.push_back(soundclip);
-}
-
-void WebRtcVoiceEngine::UnregisterSoundclip(WebRtcSoundclipMedia *soundclip) {
- SoundclipList::iterator i = std::find(soundclips_.begin(),
- soundclips_.end(),
- soundclip);
- if (i != soundclips_.end()) {
- soundclips_.erase(i);
- }
-}
-
// Adjusts the default AGC target level by the specified delta.
// NB: If we start messing with other config fields, we'll want
// to save the current webrtc::AgcConfig as well.
@@ -1563,8 +1399,7 @@ bool WebRtcVoiceEngine::AdjustAgcLevel(int delta) {
return true;
}
-bool WebRtcVoiceEngine::SetAudioDeviceModule(webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc) {
+bool WebRtcVoiceEngine::SetAudioDeviceModule(webrtc::AudioDeviceModule* adm) {
if (initialized_) {
LOG(LS_WARNING) << "SetAudioDeviceModule can not be called after Init.";
return false;
@@ -1577,15 +1412,6 @@ bool WebRtcVoiceEngine::SetAudioDeviceModule(webrtc::AudioDeviceModule* adm,
adm_ = adm;
adm_->AddRef();
}
-
- if (adm_sc_) {
- adm_sc_->Release();
- adm_sc_ = NULL;
- }
- if (adm_sc) {
- adm_sc_ = adm_sc;
- adm_sc_->AddRef();
- }
return true;
}
@@ -1791,10 +1617,6 @@ int WebRtcVoiceEngine::CreateMediaVoiceChannel() {
return CreateVoiceChannel(voe_wrapper_.get());
}
-int WebRtcVoiceEngine::CreateSoundclipVoiceChannel() {
- return CreateVoiceChannel(voe_wrapper_sc_.get());
-}
-
class WebRtcVoiceMediaChannel::WebRtcVoiceChannelRenderer
: public AudioRenderer::Sink {
public:
@@ -3525,6 +3347,10 @@ bool WebRtcVoiceMediaChannel::GetStats(VoiceMediaInfo* info) {
static_cast<float>(ns.currentSpeechExpandRate) / (1 << 14);
rinfo.secondary_decoded_rate =
static_cast<float>(ns.currentSecondaryDecodedRate) / (1 << 14);
+ rinfo.accelerate_rate =
+ static_cast<float>(ns.currentAccelerateRate) / (1 << 14);
+ rinfo.preemptive_expand_rate =
+ static_cast<float>(ns.currentPreemptiveRate) / (1 << 14);
}
webrtc::AudioDecodingCallStats ds;
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h
index 242467dd990..35f2dbc93ef 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine.h
@@ -35,7 +35,6 @@
#include "talk/media/base/rtputils.h"
#include "talk/media/webrtc/webrtccommon.h"
-#include "talk/media/webrtc/webrtcexport.h"
#include "talk/media/webrtc/webrtcvoe.h"
#include "talk/session/media/channel.h"
#include "webrtc/base/buffer.h"
@@ -48,15 +47,6 @@
#include "webrtc/common.h"
#include "webrtc/config.h"
-#if !defined(LIBPEERCONNECTION_LIB) && \
- !defined(LIBPEERCONNECTION_IMPLEMENTATION)
-// If you hit this, then you've tried to include this header from outside
-// the shared library. An instance of this class must only be created from
-// within the library that actually implements it. Otherwise use the
-// WebRtcMediaEngine to construct an instance.
-#error "Bogus include."
-#endif
-
namespace webrtc {
class VideoEngine;
}
@@ -91,7 +81,6 @@ class AudioRenderer;
class VoETraceWrapper;
class VoEWrapper;
class VoiceProcessor;
-class WebRtcSoundclipMedia;
class WebRtcVoiceMediaChannel;
// WebRtcVoiceEngine is a class to be used with CompositeMediaEngine.
@@ -100,36 +89,21 @@ class WebRtcVoiceEngine
: public webrtc::VoiceEngineObserver,
public webrtc::TraceCallback,
public webrtc::VoEMediaProcess {
+ friend class WebRtcVoiceMediaChannel;
+
public:
WebRtcVoiceEngine();
// Dependency injection for testing.
- WebRtcVoiceEngine(VoEWrapper* voe_wrapper,
- VoEWrapper* voe_wrapper_sc,
- VoETraceWrapper* tracing);
+ WebRtcVoiceEngine(VoEWrapper* voe_wrapper, VoETraceWrapper* tracing);
~WebRtcVoiceEngine();
bool Init(rtc::Thread* worker_thread);
void Terminate();
int GetCapabilities();
- VoiceMediaChannel* CreateChannel();
-
- SoundclipMedia* CreateSoundclip();
+ VoiceMediaChannel* CreateChannel(const AudioOptions& options);
AudioOptions GetOptions() const { return options_; }
bool SetOptions(const AudioOptions& options);
- // Overrides, when set, take precedence over the options on a
- // per-option basis. For example, if AGC is set in options and AEC
- // is set in overrides, AGC and AEC will be both be set. Overrides
- // can also turn off options. For example, if AGC is set to "on" in
- // options and AGC is set to "off" in overrides, the result is that
- // AGC will be off until different overrides are applied or until
- // the overrides are cleared. Only one set of overrides is present
- // at a time (they do not "stack"). And when the overrides are
- // cleared, the media engine's state reverts back to the options set
- // via SetOptions. This allows us to have both "persistent options"
- // (the normal options) and "temporary options" (overrides).
- bool SetOptionOverrides(const AudioOptions& options);
- bool ClearOptionOverrides();
bool SetDelayOffset(int offset);
bool SetDevices(const Device* in_device, const Device* out_device);
bool GetOutputVolume(int* level);
@@ -166,21 +140,15 @@ class WebRtcVoiceEngine
void RegisterChannel(WebRtcVoiceMediaChannel *channel);
void UnregisterChannel(WebRtcVoiceMediaChannel *channel);
- // May only be called by WebRtcSoundclipMedia.
- void RegisterSoundclip(WebRtcSoundclipMedia *channel);
- void UnregisterSoundclip(WebRtcSoundclipMedia *channel);
-
// Called by WebRtcVoiceMediaChannel to set a gain offset from
// the default AGC target level.
bool AdjustAgcLevel(int delta);
VoEWrapper* voe() { return voe_wrapper_.get(); }
- VoEWrapper* voe_sc() { return voe_wrapper_sc_.get(); }
int GetLastEngineError();
- // Set the external ADMs. This can only be called before Init.
- bool SetAudioDeviceModule(webrtc::AudioDeviceModule* adm,
- webrtc::AudioDeviceModule* adm_sc);
+ // Set the external ADM. This can only be called before Init.
+ bool SetAudioDeviceModule(webrtc::AudioDeviceModule* adm);
// Starts AEC dump using existing file.
bool StartAecDump(rtc::PlatformFile file);
@@ -190,10 +158,8 @@ class WebRtcVoiceEngine
// Create a VoiceEngine Channel.
int CreateMediaVoiceChannel();
- int CreateSoundclipVoiceChannel();
private:
- typedef std::vector<WebRtcSoundclipMedia *> SoundclipList;
typedef std::vector<WebRtcVoiceMediaChannel *> ChannelList;
typedef sigslot::
signal3<uint32, MediaProcessorDirection, AudioFrame*> FrameSignal;
@@ -202,7 +168,6 @@ class WebRtcVoiceEngine
void ConstructCodecs();
bool GetVoeCodec(int index, webrtc::CodecInst* codec);
bool InitInternal();
- bool EnsureSoundclipEngineInit();
void SetTraceFilter(int filter);
void SetTraceOptions(const std::string& options);
// Applies either options or overrides. Every option that is "set"
@@ -210,6 +175,19 @@ class WebRtcVoiceEngine
// allows us to selectively turn on and off different options easily
// at any time.
bool ApplyOptions(const AudioOptions& options);
+ // Overrides, when set, take precedence over the options on a
+ // per-option basis. For example, if AGC is set in options and AEC
+ // is set in overrides, AGC and AEC will be both be set. Overrides
+ // can also turn off options. For example, if AGC is set to "on" in
+ // options and AGC is set to "off" in overrides, the result is that
+ // AGC will be off until different overrides are applied or until
+ // the overrides are cleared. Only one set of overrides is present
+ // at a time (they do not "stack"). And when the overrides are
+ // cleared, the media engine's state reverts back to the options set
+ // via SetOptions. This allows us to have both "persistent options"
+ // (the normal options) and "temporary options" (overrides).
+ bool SetOptionOverrides(const AudioOptions& options);
+ bool ClearOptionOverrides();
// webrtc::TraceCallback:
void Print(webrtc::TraceLevel level, const char* trace, int length) override;
@@ -250,13 +228,9 @@ class WebRtcVoiceEngine
// The primary instance of WebRtc VoiceEngine.
rtc::scoped_ptr<VoEWrapper> voe_wrapper_;
- // A secondary instance, for playing out soundclips (on the 'ring' device).
- rtc::scoped_ptr<VoEWrapper> voe_wrapper_sc_;
- bool voe_wrapper_sc_initialized_;
rtc::scoped_ptr<VoETraceWrapper> tracing_;
// The external audio device manager
webrtc::AudioDeviceModule* adm_;
- webrtc::AudioDeviceModule* adm_sc_;
int log_filter_;
std::string log_options_;
bool is_dumping_aec_;
@@ -264,7 +238,6 @@ class WebRtcVoiceEngine
std::vector<RtpHeaderExtension> rtp_header_extensions_;
bool desired_local_monitor_enable_;
rtc::scoped_ptr<WebRtcMonitorStream> monitor_;
- SoundclipList soundclips_;
ChannelList channels_;
// channels_ can be read from WebRtc callback thread. We need a lock on that
// callback as well as the RegisterChannel/UnregisterChannel.
@@ -293,11 +266,11 @@ class WebRtcVoiceEngine
rtc::CriticalSection signal_media_critical_;
- // Cache received experimental_aec, delay_agnostic_aec and experimental_ns
+ // Cache received extended_filter_aec, delay_agnostic_aec and experimental_ns
// values, and apply them in case they are missing in the audio options. We
// need to do this because SetExtraOptions() will revert to defaults for
// options which are not provided.
- Settable<bool> experimental_aec_;
+ Settable<bool> extended_filter_aec_;
Settable<bool> delay_agnostic_aec_;
Settable<bool> experimental_ns_;
};
diff --git a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc
index bc14770ea40..0dc7b510eea 100644
--- a/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/media/webrtc/webrtcvoiceengine_unittest.cc
@@ -128,12 +128,9 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
WebRtcVoiceEngineTestFake()
: voe_(kAudioCodecs, ARRAY_SIZE(kAudioCodecs)),
- voe_sc_(kAudioCodecs, ARRAY_SIZE(kAudioCodecs)),
trace_wrapper_(new FakeVoETraceWrapper()),
- engine_(new FakeVoEWrapper(&voe_),
- new FakeVoEWrapper(&voe_sc_),
- trace_wrapper_),
- channel_(NULL), soundclip_(NULL) {
+ engine_(new FakeVoEWrapper(&voe_), trace_wrapper_),
+ channel_(nullptr) {
options_conference_.conference_mode.Set(true);
options_adjust_agc_.adjust_agc_delta.Set(-10);
}
@@ -141,8 +138,8 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
if (!engine_.Init(rtc::Thread::Current())) {
return false;
}
- channel_ = engine_.CreateChannel();
- return (channel_ != NULL);
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
+ return (channel_ != nullptr);
}
bool SetupEngine() {
if (!SetupEngineWithoutStream()) {
@@ -168,15 +165,14 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
channel_->OnPacketReceived(&packet, rtc::PacketTime());
}
void TearDown() override {
- delete soundclip_;
delete channel_;
engine_.Terminate();
}
void TestInsertDtmf(uint32 ssrc, bool caller) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- channel_ = engine_.CreateChannel();
- EXPECT_TRUE(channel_ != NULL);
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
+ EXPECT_TRUE(channel_ != nullptr);
if (caller) {
// if this is a caller, local description will be applied and add the
// send stream.
@@ -335,11 +331,9 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
protected:
cricket::FakeWebRtcVoiceEngine voe_;
- cricket::FakeWebRtcVoiceEngine voe_sc_;
FakeVoETraceWrapper* trace_wrapper_;
cricket::WebRtcVoiceEngine engine_;
cricket::VoiceMediaChannel* channel_;
- cricket::SoundclipMedia* soundclip_;
cricket::AudioOptions options_conference_;
cricket::AudioOptions options_adjust_agc_;
@@ -348,29 +342,25 @@ class WebRtcVoiceEngineTestFake : public testing::Test {
// Tests that our stub library "works".
TEST_F(WebRtcVoiceEngineTestFake, StartupShutdown) {
EXPECT_FALSE(voe_.IsInited());
- EXPECT_FALSE(voe_sc_.IsInited());
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
EXPECT_TRUE(voe_.IsInited());
- // The soundclip engine is lazily initialized.
- EXPECT_FALSE(voe_sc_.IsInited());
engine_.Terminate();
EXPECT_FALSE(voe_.IsInited());
- EXPECT_FALSE(voe_sc_.IsInited());
}
// Tests that we can create and destroy a channel.
TEST_F(WebRtcVoiceEngineTestFake, CreateChannel) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- channel_ = engine_.CreateChannel();
- EXPECT_TRUE(channel_ != NULL);
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
+ EXPECT_TRUE(channel_ != nullptr);
}
// Tests that we properly handle failures in CreateChannel.
TEST_F(WebRtcVoiceEngineTestFake, CreateChannelFail) {
voe_.set_fail_create_channel(true);
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- channel_ = engine_.CreateChannel();
- EXPECT_TRUE(channel_ == NULL);
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
+ EXPECT_TRUE(channel_ == nullptr);
}
// Tests that the list of supported codecs is created properly and ordered
@@ -678,8 +668,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthFixedRateAsCaller) {
TEST_F(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCallee) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- channel_ = engine_.CreateChannel();
- EXPECT_TRUE(channel_ != NULL);
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
+ EXPECT_TRUE(channel_ != nullptr);
EXPECT_TRUE(channel_->SetSendCodecs(engine_.codecs()));
int desired_bitrate = 128000;
@@ -1052,8 +1042,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCaller) {
// Test that we can enable NACK with opus as callee.
TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- channel_ = engine_.CreateChannel();
- EXPECT_TRUE(channel_ != NULL);
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
+ EXPECT_TRUE(channel_ != nullptr);
int channel_num = voe_.GetLastChannel();
std::vector<cricket::AudioCodec> codecs;
@@ -1632,8 +1622,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) {
// Test that we set VAD and DTMF types correctly as callee.
TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- channel_ = engine_.CreateChannel();
- EXPECT_TRUE(channel_ != NULL);
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
+ EXPECT_TRUE(channel_ != nullptr);
int channel_num = voe_.GetLastChannel();
std::vector<cricket::AudioCodec> codecs;
@@ -1749,8 +1739,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsREDAsCaller) {
// Test that we set up RED correctly as callee.
TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsREDAsCallee) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- channel_ = engine_.CreateChannel();
- EXPECT_TRUE(channel_ != NULL);
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
+ EXPECT_TRUE(channel_ != nullptr);
int channel_num = voe_.GetLastChannel();
std::vector<cricket::AudioCodec> codecs;
@@ -2076,6 +2066,12 @@ TEST_F(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) {
(1 << 14), info.receivers[0].speech_expand_rate);
EXPECT_EQ(static_cast<float>(cricket::kNetStats.currentSecondaryDecodedRate) /
(1 << 14), info.receivers[0].secondary_decoded_rate);
+ EXPECT_EQ(
+ static_cast<float>(cricket::kNetStats.currentAccelerateRate) / (1 << 14),
+ info.receivers[0].accelerate_rate);
+ EXPECT_EQ(
+ static_cast<float>(cricket::kNetStats.currentPreemptiveRate) / (1 << 14),
+ info.receivers[0].preemptive_expand_rate);
}
// Test that we can add and remove receive streams, and do proper send/playout.
@@ -2432,7 +2428,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrcWithMultipleStreams) {
// receive channel is created before the send channel.
TEST_F(WebRtcVoiceEngineTestFake, SetSendSsrcAfterCreatingReceiveChannel) {
EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- channel_ = engine_.CreateChannel();
+ channel_ = engine_.CreateChannel(cricket::AudioOptions());
EXPECT_TRUE(channel_->SetOptions(options_conference_));
EXPECT_TRUE(channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1)));
@@ -2652,35 +2648,6 @@ TEST_F(WebRtcVoiceEngineTestFake, PlayRingbackWithMultipleStreams) {
EXPECT_EQ(0, voe_.IsPlayingFileLocally(channel_num));
}
-// Tests creating soundclips, and make sure they come from the right engine.
-TEST_F(WebRtcVoiceEngineTestFake, CreateSoundclip) {
- EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- EXPECT_FALSE(voe_sc_.IsInited());
- soundclip_ = engine_.CreateSoundclip();
- EXPECT_TRUE(voe_sc_.IsInited());
- ASSERT_TRUE(soundclip_ != NULL);
- EXPECT_EQ(0, voe_.GetNumChannels());
- EXPECT_EQ(1, voe_sc_.GetNumChannels());
- int channel_num = voe_sc_.GetLastChannel();
- EXPECT_TRUE(voe_sc_.GetPlayout(channel_num));
- delete soundclip_;
- soundclip_ = NULL;
- EXPECT_EQ(0, voe_sc_.GetNumChannels());
- // Make sure the soundclip engine is uninitialized on shutdown, now that
- // we've initialized it by creating a soundclip.
- engine_.Terminate();
- EXPECT_FALSE(voe_sc_.IsInited());
-}
-
-// Tests playing out a fake sound.
-TEST_F(WebRtcVoiceEngineTestFake, PlaySoundclip) {
- static const char kZeroes[16000] = {};
- EXPECT_TRUE(engine_.Init(rtc::Thread::Current()));
- soundclip_ = engine_.CreateSoundclip();
- ASSERT_TRUE(soundclip_ != NULL);
- EXPECT_TRUE(soundclip_->PlaySound(kZeroes, sizeof(kZeroes), 0));
-}
-
TEST_F(WebRtcVoiceEngineTestFake, MediaEngineCallbackOnError) {
rtc::scoped_ptr<ChannelErrorListener> listener;
cricket::WebRtcVoiceMediaChannel* media_channel;
@@ -2882,6 +2849,8 @@ TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
EXPECT_EQ(ec_mode, webrtc::kEcConference);
EXPECT_EQ(ns_mode, webrtc::kNsHighSuppression);
EXPECT_EQ(50, voe_.GetNetEqCapacity()); // From GetDefaultEngineOptions().
+ EXPECT_FALSE(
+ voe_.GetNetEqFastAccelerate()); // From GetDefaultEngineOptions().
// Turn echo cancellation off
options.echo_cancellation.Set(false);
@@ -2926,7 +2895,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetAudioOptions) {
// Turn off echo cancellation and delay agnostic aec.
options.delay_agnostic_aec.Set(false);
- options.experimental_aec.Set(false);
+ options.extended_filter_aec.Set(false);
options.echo_cancellation.Set(false);
ASSERT_TRUE(engine_.SetOptions(options));
voe_.GetEcStatus(ec_enabled, ec_mode);
@@ -3029,9 +2998,9 @@ TEST_F(WebRtcVoiceEngineTestFake, InitDoesNotOverwriteDefaultAgcConfig) {
TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
EXPECT_TRUE(SetupEngine());
rtc::scoped_ptr<cricket::VoiceMediaChannel> channel1(
- engine_.CreateChannel());
+ engine_.CreateChannel(cricket::AudioOptions()));
rtc::scoped_ptr<cricket::VoiceMediaChannel> channel2(
- engine_.CreateChannel());
+ engine_.CreateChannel(cricket::AudioOptions()));
// Have to add a stream to make SetSend work.
cricket::StreamParams stream1;
@@ -3149,7 +3118,7 @@ TEST_F(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) {
TEST_F(WebRtcVoiceEngineTestFake, TestSetDscpOptions) {
EXPECT_TRUE(SetupEngine());
rtc::scoped_ptr<cricket::VoiceMediaChannel> channel(
- engine_.CreateChannel());
+ engine_.CreateChannel(cricket::AudioOptions()));
rtc::scoped_ptr<cricket::FakeNetworkInterface> network_interface(
new cricket::FakeNetworkInterface);
channel->SetInterface(network_interface.get());
@@ -3231,8 +3200,9 @@ TEST_F(WebRtcVoiceEngineTestFake, SetOutputScaling) {
TEST(WebRtcVoiceEngineTest, StartupShutdown) {
cricket::WebRtcVoiceEngine engine;
EXPECT_TRUE(engine.Init(rtc::Thread::Current()));
- cricket::VoiceMediaChannel* channel = engine.CreateChannel();
- EXPECT_TRUE(channel != NULL);
+ cricket::VoiceMediaChannel* channel =
+ engine.CreateChannel(cricket::AudioOptions());
+ EXPECT_TRUE(channel != nullptr);
delete channel;
engine.Terminate();
@@ -3241,30 +3211,6 @@ TEST(WebRtcVoiceEngineTest, StartupShutdown) {
engine.Terminate();
}
-// Tests that the logging from the library is cleartext.
-TEST(WebRtcVoiceEngineTest, DISABLED_HasUnencryptedLogging) {
- cricket::WebRtcVoiceEngine engine;
- rtc::scoped_ptr<rtc::MemoryStream> stream(
- new rtc::MemoryStream);
- size_t size = 0;
- bool cleartext = true;
- rtc::LogMessage::AddLogToStream(stream.get(), rtc::LS_VERBOSE);
- engine.SetLogging(rtc::LS_VERBOSE, "");
- EXPECT_TRUE(engine.Init(rtc::Thread::Current()));
- EXPECT_TRUE(stream->GetSize(&size));
- EXPECT_GT(size, 0U);
- engine.Terminate();
- rtc::LogMessage::RemoveLogToStream(stream.get());
- const char* buf = stream->GetBuffer();
- for (size_t i = 0; i < size && cleartext; ++i) {
- int ch = static_cast<int>(buf[i]);
- ASSERT_GE(ch, 0) << "Out of bounds character in WebRtc VoE log: "
- << std::hex << ch;
- cleartext = (isprint(ch) || isspace(ch));
- }
- EXPECT_TRUE(cleartext);
-}
-
// Tests that the library is configured with the codecs we want.
TEST(WebRtcVoiceEngineTest, HasCorrectCodecs) {
cricket::WebRtcVoiceEngine engine;
@@ -3354,7 +3300,8 @@ TEST(WebRtcVoiceEngineTest, Has32Channels) {
int num_channels = 0;
while (num_channels < ARRAY_SIZE(channels)) {
- cricket::VoiceMediaChannel* channel = engine.CreateChannel();
+ cricket::VoiceMediaChannel* channel =
+ engine.CreateChannel(cricket::AudioOptions());
if (!channel)
break;
diff --git a/chromium/third_party/libjingle/source/talk/session/media/channel.cc b/chromium/third_party/libjingle/source/talk/session/media/channel.cc
index 0034f15d9b1..d30972db06f 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/channel.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/channel.cc
@@ -204,11 +204,6 @@ bool BaseChannel::Init() {
return false;
}
- session_->SignalNewLocalDescription.connect(
- this, &BaseChannel::OnNewLocalDescription);
- session_->SignalNewRemoteDescription.connect(
- this, &BaseChannel::OnNewRemoteDescription);
-
// Both RTP and RTCP channels are set, we can call SetInterface on
// media channel and it can set network options.
media_channel_->SetInterface(this);
@@ -502,12 +497,6 @@ bool BaseChannel::SendPacket(bool rtcp, rtc::Buffer* packet,
return false;
}
- // Signal to the media sink before protecting the packet.
- {
- rtc::CritScope cs(&signal_send_packet_cs_);
- SignalSendPacketPreCrypto(packet->data(), packet->size(), rtcp);
- }
-
rtc::PacketOptions options(dscp);
// Protect if needed.
if (srtp_filter_.IsActive()) {
@@ -576,12 +565,6 @@ bool BaseChannel::SendPacket(bool rtcp, rtc::Buffer* packet,
return false;
}
- // Signal to the media sink after protecting the packet.
- {
- rtc::CritScope cs(&signal_send_packet_cs_);
- SignalSendPacketPostCrypto(packet->data(), packet->size(), rtcp);
- }
-
// Bon voyage.
int ret =
channel->SendPacket(packet->data<char>(), packet->size(), options,
@@ -622,12 +605,6 @@ void BaseChannel::HandlePacket(bool rtcp, rtc::Buffer* packet,
signaling_thread()->Post(this, MSG_FIRSTPACKETRECEIVED);
}
- // Signal to the media sink before unprotecting the packet.
- {
- rtc::CritScope cs(&signal_recv_packet_cs_);
- SignalRecvPacketPostCrypto(packet->data(), packet->size(), rtcp);
- }
-
// Unprotect the packet, if needed.
if (srtp_filter_.IsActive()) {
char* data = packet->data<char>();
@@ -673,12 +650,6 @@ void BaseChannel::HandlePacket(bool rtcp, rtc::Buffer* packet,
return;
}
- // Signal to the media sink after unprotecting the packet.
- {
- rtc::CritScope cs(&signal_recv_packet_cs_);
- SignalRecvPacketPreCrypto(packet->data(), packet->size(), rtcp);
- }
-
// Push it down to the media channel.
if (!rtcp) {
media_channel_->OnPacketReceived(packet, packet_time);
@@ -687,24 +658,6 @@ void BaseChannel::HandlePacket(bool rtcp, rtc::Buffer* packet,
}
}
-void BaseChannel::OnNewLocalDescription(
- BaseSession* session, ContentAction action) {
- std::string error_desc;
- if (!PushdownLocalDescription(
- session->local_description(), action, &error_desc)) {
- SetSessionError(session_, BaseSession::ERROR_CONTENT, error_desc);
- }
-}
-
-void BaseChannel::OnNewRemoteDescription(
- BaseSession* session, ContentAction action) {
- std::string error_desc;
- if (!PushdownRemoteDescription(
- session->remote_description(), action, &error_desc)) {
- SetSessionError(session_, BaseSession::ERROR_CONTENT, error_desc);
- }
-}
-
bool BaseChannel::PushdownLocalDescription(
const SessionDescription* local_desc, ContentAction action,
std::string* error_desc) {
@@ -1042,6 +995,18 @@ bool BaseChannel::SetSrtp_w(const std::vector<CryptoParams>& cryptos,
return true;
}
+void BaseChannel::ActivateRtcpMux() {
+ worker_thread_->Invoke<void>(Bind(
+ &BaseChannel::ActivateRtcpMux_w, this));
+}
+
+void BaseChannel::ActivateRtcpMux_w() {
+ if (!rtcp_mux_filter_.IsActive()) {
+ rtcp_mux_filter_.SetActive();
+ set_rtcp_transport_channel(NULL);
+ }
+}
+
bool BaseChannel::SetRtcpMux_w(bool enable, ContentAction action,
ContentSource src,
std::string* error_desc) {
diff --git a/chromium/third_party/libjingle/source/talk/session/media/channel.h b/chromium/third_party/libjingle/source/talk/session/media/channel.h
index 441fe64b684..bb2dffdc905 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/channel.h
+++ b/chromium/third_party/libjingle/source/talk/session/media/channel.h
@@ -108,6 +108,11 @@ class BaseChannel
bool writable() const { return writable_; }
bool IsStreamMuted(uint32 ssrc);
+ // Activate RTCP mux, regardless of the state so far. Once
+ // activated, it can not be deactivated, and if the remote
+ // description doesn't support RTCP mux, setting the remote
+ // description will fail.
+ void ActivateRtcpMux();
bool PushdownLocalDescription(const SessionDescription* local_desc,
ContentAction action,
std::string* error_desc);
@@ -143,72 +148,6 @@ class BaseChannel
srtp_filter_.set_signal_silent_time(silent_time);
}
- template <class T>
- void RegisterSendSink(T* sink,
- void (T::*OnPacket)(const void*, size_t, bool),
- SinkType type) {
- rtc::CritScope cs(&signal_send_packet_cs_);
- if (SINK_POST_CRYPTO == type) {
- SignalSendPacketPostCrypto.disconnect(sink);
- SignalSendPacketPostCrypto.connect(sink, OnPacket);
- } else {
- SignalSendPacketPreCrypto.disconnect(sink);
- SignalSendPacketPreCrypto.connect(sink, OnPacket);
- }
- }
-
- void UnregisterSendSink(sigslot::has_slots<>* sink,
- SinkType type) {
- rtc::CritScope cs(&signal_send_packet_cs_);
- if (SINK_POST_CRYPTO == type) {
- SignalSendPacketPostCrypto.disconnect(sink);
- } else {
- SignalSendPacketPreCrypto.disconnect(sink);
- }
- }
-
- bool HasSendSinks(SinkType type) {
- rtc::CritScope cs(&signal_send_packet_cs_);
- if (SINK_POST_CRYPTO == type) {
- return !SignalSendPacketPostCrypto.is_empty();
- } else {
- return !SignalSendPacketPreCrypto.is_empty();
- }
- }
-
- template <class T>
- void RegisterRecvSink(T* sink,
- void (T::*OnPacket)(const void*, size_t, bool),
- SinkType type) {
- rtc::CritScope cs(&signal_recv_packet_cs_);
- if (SINK_POST_CRYPTO == type) {
- SignalRecvPacketPostCrypto.disconnect(sink);
- SignalRecvPacketPostCrypto.connect(sink, OnPacket);
- } else {
- SignalRecvPacketPreCrypto.disconnect(sink);
- SignalRecvPacketPreCrypto.connect(sink, OnPacket);
- }
- }
-
- void UnregisterRecvSink(sigslot::has_slots<>* sink,
- SinkType type) {
- rtc::CritScope cs(&signal_recv_packet_cs_);
- if (SINK_POST_CRYPTO == type) {
- SignalRecvPacketPostCrypto.disconnect(sink);
- } else {
- SignalRecvPacketPreCrypto.disconnect(sink);
- }
- }
-
- bool HasRecvSinks(SinkType type) {
- rtc::CritScope cs(&signal_recv_packet_cs_);
- if (SINK_POST_CRYPTO == type) {
- return !SignalRecvPacketPostCrypto.is_empty();
- } else {
- return !SignalRecvPacketPreCrypto.is_empty();
- }
- }
-
BundleFilter* bundle_filter() { return &bundle_filter_; }
const std::vector<StreamParams>& local_streams() const {
@@ -350,6 +289,7 @@ class BaseChannel
ContentAction action,
ContentSource src,
std::string* error_desc);
+ void ActivateRtcpMux_w();
bool SetRtcpMux_w(bool enable,
ContentAction action,
ContentSource src,
@@ -371,13 +311,6 @@ class BaseChannel
}
private:
- sigslot::signal3<const void*, size_t, bool> SignalSendPacketPreCrypto;
- sigslot::signal3<const void*, size_t, bool> SignalSendPacketPostCrypto;
- sigslot::signal3<const void*, size_t, bool> SignalRecvPacketPreCrypto;
- sigslot::signal3<const void*, size_t, bool> SignalRecvPacketPostCrypto;
- rtc::CriticalSection signal_send_packet_cs_;
- rtc::CriticalSection signal_recv_packet_cs_;
-
rtc::Thread* worker_thread_;
MediaEngineInterface* media_engine_;
BaseSession* session_;
diff --git a/chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc b/chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc
index a55f6e8cb03..2573454b22f 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/channel_unittest.cc
@@ -35,7 +35,6 @@
#include "talk/media/base/testutils.h"
#include "webrtc/p2p/base/fakesession.h"
#include "talk/session/media/channel.h"
-#include "talk/session/media/mediarecorder.h"
#include "talk/session/media/typingmonitor.h"
#include "webrtc/base/fileutils.h"
#include "webrtc/base/gunit.h"
@@ -1140,6 +1139,89 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
EXPECT_TRUE(CheckNoRtcp2());
}
+ // Check that RTP and RTCP are transmitted ok when both sides
+ // support mux and one the offerer requires mux.
+ void SendRequireRtcpMuxToRtcpMux() {
+ CreateChannels(RTCP | RTCP_MUX, RTCP | RTCP_MUX);
+ channel1_->ActivateRtcpMux();
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_EQ(1U, GetTransport1()->channels().size());
+ EXPECT_EQ(1U, GetTransport2()->channels().size());
+ EXPECT_TRUE(SendAccept());
+ EXPECT_TRUE(SendRtp1());
+ EXPECT_TRUE(SendRtp2());
+ EXPECT_TRUE(SendRtcp1());
+ EXPECT_TRUE(SendRtcp2());
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+ EXPECT_TRUE(CheckRtcp1());
+ EXPECT_TRUE(CheckRtcp2());
+ EXPECT_TRUE(CheckNoRtcp1());
+ EXPECT_TRUE(CheckNoRtcp2());
+ }
+
+ // Check that RTP and RTCP are transmitted ok when both sides
+ // support mux and one the answerer requires rtcp mux.
+ void SendRtcpMuxToRequireRtcpMux() {
+ CreateChannels(RTCP | RTCP_MUX, RTCP | RTCP_MUX);
+ channel2_->ActivateRtcpMux();
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_EQ(2U, GetTransport1()->channels().size());
+ EXPECT_EQ(1U, GetTransport2()->channels().size());
+ EXPECT_TRUE(SendAccept());
+ EXPECT_EQ(1U, GetTransport1()->channels().size());
+ EXPECT_TRUE(SendRtp1());
+ EXPECT_TRUE(SendRtp2());
+ EXPECT_TRUE(SendRtcp1());
+ EXPECT_TRUE(SendRtcp2());
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+ EXPECT_TRUE(CheckRtcp1());
+ EXPECT_TRUE(CheckRtcp2());
+ EXPECT_TRUE(CheckNoRtcp1());
+ EXPECT_TRUE(CheckNoRtcp2());
+ }
+
+ // Check that RTP and RTCP are transmitted ok when both sides
+ // require mux.
+ void SendRequireRtcpMuxToRequireRtcpMux() {
+ CreateChannels(RTCP | RTCP_MUX, RTCP | RTCP_MUX);
+ channel1_->ActivateRtcpMux();
+ channel2_->ActivateRtcpMux();
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_EQ(1U, GetTransport1()->channels().size());
+ EXPECT_EQ(1U, GetTransport2()->channels().size());
+ EXPECT_TRUE(SendAccept());
+ EXPECT_EQ(1U, GetTransport1()->channels().size());
+ EXPECT_TRUE(SendRtp1());
+ EXPECT_TRUE(SendRtp2());
+ EXPECT_TRUE(SendRtcp1());
+ EXPECT_TRUE(SendRtcp2());
+ EXPECT_TRUE(CheckRtp1());
+ EXPECT_TRUE(CheckRtp2());
+ EXPECT_TRUE(CheckNoRtp1());
+ EXPECT_TRUE(CheckNoRtp2());
+ EXPECT_TRUE(CheckRtcp1());
+ EXPECT_TRUE(CheckRtcp2());
+ EXPECT_TRUE(CheckNoRtcp1());
+ EXPECT_TRUE(CheckNoRtcp2());
+ }
+
+ // Check that SendAccept fails if the answerer doesn't support mux
+ // and the offerer requires it.
+ void SendRequireRtcpMuxToNoRtcpMux() {
+ CreateChannels(RTCP | RTCP_MUX, RTCP);
+ channel1_->ActivateRtcpMux();
+ EXPECT_TRUE(SendInitiate());
+ EXPECT_EQ(1U, GetTransport1()->channels().size());
+ EXPECT_EQ(2U, GetTransport2()->channels().size());
+ EXPECT_FALSE(SendAccept());
+ }
+
// Check that RTCP data sent by the initiator before the accept is not muxed.
void SendEarlyRtcpMuxToRtcp() {
CreateChannels(RTCP | RTCP_MUX, RTCP);
@@ -1475,125 +1557,44 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
channel1_->StopMediaMonitor();
}
- void TestMediaSinks() {
- CreateChannels(0, 0);
- EXPECT_TRUE(SendInitiate());
- EXPECT_TRUE(SendAccept());
- EXPECT_FALSE(channel1_->HasSendSinks(cricket::SINK_POST_CRYPTO));
- EXPECT_FALSE(channel1_->HasRecvSinks(cricket::SINK_POST_CRYPTO));
- EXPECT_FALSE(channel1_->HasSendSinks(cricket::SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel1_->HasRecvSinks(cricket::SINK_PRE_CRYPTO));
-
- rtc::Pathname path;
- EXPECT_TRUE(rtc::Filesystem::GetTemporaryFolder(path, true, NULL));
- path.SetFilename("sink-test.rtpdump");
- rtc::scoped_ptr<cricket::RtpDumpSink> sink(
- new cricket::RtpDumpSink(Open(path.pathname())));
- sink->set_packet_filter(cricket::PF_ALL);
- EXPECT_TRUE(sink->Enable(true));
- channel1_->RegisterSendSink(
- sink.get(), &cricket::RtpDumpSink::OnPacket, cricket::SINK_POST_CRYPTO);
- EXPECT_TRUE(channel1_->HasSendSinks(cricket::SINK_POST_CRYPTO));
- EXPECT_FALSE(channel1_->HasRecvSinks(cricket::SINK_POST_CRYPTO));
- EXPECT_FALSE(channel1_->HasSendSinks(cricket::SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel1_->HasRecvSinks(cricket::SINK_PRE_CRYPTO));
-
- // The first packet is recorded with header + data.
- EXPECT_TRUE(SendRtp1());
- // The second packet is recorded with header only.
- sink->set_packet_filter(cricket::PF_RTPHEADER);
- EXPECT_TRUE(SendRtp1());
- // The third packet is not recorded since sink is disabled.
- EXPECT_TRUE(sink->Enable(false));
- EXPECT_TRUE(SendRtp1());
- // The fourth packet is not recorded since sink is unregistered.
- EXPECT_TRUE(sink->Enable(true));
- channel1_->UnregisterSendSink(sink.get(), cricket::SINK_POST_CRYPTO);
- EXPECT_TRUE(SendRtp1());
- sink.reset(); // This will close the file.
-
- // Read the recorded file and verify two packets.
- rtc::scoped_ptr<rtc::StreamInterface> stream(
- rtc::Filesystem::OpenFile(path, "rb"));
-
- cricket::RtpDumpReader reader(stream.get());
- cricket::RtpDumpPacket packet;
- EXPECT_EQ(rtc::SR_SUCCESS, reader.ReadPacket(&packet));
- std::string read_packet(reinterpret_cast<const char*>(&packet.data[0]),
- packet.data.size());
- EXPECT_EQ(rtp_packet_, read_packet);
-
- EXPECT_EQ(rtc::SR_SUCCESS, reader.ReadPacket(&packet));
- size_t len = 0;
- packet.GetRtpHeaderLen(&len);
- EXPECT_EQ(len, packet.data.size());
- EXPECT_EQ(0, memcmp(&packet.data[0], rtp_packet_.c_str(), len));
-
- EXPECT_EQ(rtc::SR_EOS, reader.ReadPacket(&packet));
-
- // Delete the file for media recording.
- stream.reset();
- EXPECT_TRUE(rtc::Filesystem::DeleteFile(path));
- }
-
void TestSetContentFailure() {
CreateChannels(0, 0);
- typename T::Content content;
- cricket::SessionDescription* sdesc_loc = new cricket::SessionDescription();
- cricket::SessionDescription* sdesc_rem = new cricket::SessionDescription();
- // Set up the session description.
- CreateContent(0, kPcmuCodec, kH264Codec, &content);
- sdesc_loc->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
- new cricket::AudioContentDescription());
- sdesc_loc->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
- new cricket::VideoContentDescription());
- session1_.set_local_description(sdesc_loc);
- sdesc_rem->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
- new cricket::AudioContentDescription());
- sdesc_rem->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
- new cricket::VideoContentDescription());
- session1_.set_remote_description(sdesc_rem);
-
- // Test failures in SetLocalContent.
- media_channel1_->set_fail_set_recv_codecs(true);
- session1_.SetError(cricket::BaseSession::ERROR_NONE, "");
- session1_.SetState(cricket::BaseSession::STATE_SENTINITIATE);
- EXPECT_EQ(cricket::BaseSession::ERROR_CONTENT, session1_.error());
+ auto sdesc = cricket::SessionDescription();
+ sdesc.AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
+ new cricket::AudioContentDescription());
+ sdesc.AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
+ new cricket::VideoContentDescription());
+
+ std::string err;
media_channel1_->set_fail_set_recv_codecs(true);
- session1_.SetError(cricket::BaseSession::ERROR_NONE, "");
- session1_.SetState(cricket::BaseSession::STATE_SENTACCEPT);
- EXPECT_EQ(cricket::BaseSession::ERROR_CONTENT, session1_.error());
+ EXPECT_FALSE(channel1_->PushdownLocalDescription(
+ &sdesc, cricket::CA_OFFER, &err));
+ EXPECT_FALSE(channel1_->PushdownLocalDescription(
+ &sdesc, cricket::CA_ANSWER, &err));
- // Test failures in SetRemoteContent.
media_channel1_->set_fail_set_send_codecs(true);
- session1_.SetError(cricket::BaseSession::ERROR_NONE, "");
- session1_.SetState(cricket::BaseSession::STATE_RECEIVEDINITIATE);
- EXPECT_EQ(cricket::BaseSession::ERROR_CONTENT, session1_.error());
+ EXPECT_FALSE(channel1_->PushdownRemoteDescription(
+ &sdesc, cricket::CA_OFFER, &err));
media_channel1_->set_fail_set_send_codecs(true);
- session1_.SetError(cricket::BaseSession::ERROR_NONE, "");
- session1_.SetState(cricket::BaseSession::STATE_RECEIVEDACCEPT);
- EXPECT_EQ(cricket::BaseSession::ERROR_CONTENT, session1_.error());
+ EXPECT_FALSE(channel1_->PushdownRemoteDescription(
+ &sdesc, cricket::CA_ANSWER, &err));
}
void TestSendTwoOffers() {
CreateChannels(0, 0);
- // Set up the initial session description.
- cricket::SessionDescription* sdesc = CreateSessionDescriptionWithStream(1);
- session1_.set_local_description(sdesc);
-
- session1_.SetError(cricket::BaseSession::ERROR_NONE, "");
- session1_.SetState(cricket::BaseSession::STATE_SENTINITIATE);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ std::string err;
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc1(
+ CreateSessionDescriptionWithStream(1));
+ EXPECT_TRUE(channel1_->PushdownLocalDescription(
+ sdesc1.get(), cricket::CA_OFFER, &err));
EXPECT_TRUE(media_channel1_->HasSendStream(1));
- // Update the local description and set the state again.
- sdesc = CreateSessionDescriptionWithStream(2);
- session1_.set_local_description(sdesc);
-
- session1_.SetState(cricket::BaseSession::STATE_SENTINITIATE);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc2(
+ CreateSessionDescriptionWithStream(2));
+ EXPECT_TRUE(channel1_->PushdownLocalDescription(
+ sdesc2.get(), cricket::CA_OFFER, &err));
EXPECT_FALSE(media_channel1_->HasSendStream(1));
EXPECT_TRUE(media_channel1_->HasSendStream(2));
}
@@ -1601,19 +1602,17 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
void TestReceiveTwoOffers() {
CreateChannels(0, 0);
- // Set up the initial session description.
- cricket::SessionDescription* sdesc = CreateSessionDescriptionWithStream(1);
- session1_.set_remote_description(sdesc);
-
- session1_.SetError(cricket::BaseSession::ERROR_NONE, "");
- session1_.SetState(cricket::BaseSession::STATE_RECEIVEDINITIATE);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ std::string err;
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc1(
+ CreateSessionDescriptionWithStream(1));
+ EXPECT_TRUE(channel1_->PushdownRemoteDescription(
+ sdesc1.get(), cricket::CA_OFFER, &err));
EXPECT_TRUE(media_channel1_->HasRecvStream(1));
- sdesc = CreateSessionDescriptionWithStream(2);
- session1_.set_remote_description(sdesc);
- session1_.SetState(cricket::BaseSession::STATE_RECEIVEDINITIATE);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc2(
+ CreateSessionDescriptionWithStream(2));
+ EXPECT_TRUE(channel1_->PushdownRemoteDescription(
+ sdesc2.get(), cricket::CA_OFFER, &err));
EXPECT_FALSE(media_channel1_->HasRecvStream(1));
EXPECT_TRUE(media_channel1_->HasRecvStream(2));
}
@@ -1621,30 +1620,27 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
void TestSendPrAnswer() {
CreateChannels(0, 0);
- // Set up the initial session description.
- cricket::SessionDescription* sdesc = CreateSessionDescriptionWithStream(1);
- session1_.set_remote_description(sdesc);
-
- session1_.SetError(cricket::BaseSession::ERROR_NONE, "");
- session1_.SetState(cricket::BaseSession::STATE_RECEIVEDINITIATE);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ std::string err;
+ // Receive offer
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc1(
+ CreateSessionDescriptionWithStream(1));
+ EXPECT_TRUE(channel1_->PushdownRemoteDescription(
+ sdesc1.get(), cricket::CA_OFFER, &err));
EXPECT_TRUE(media_channel1_->HasRecvStream(1));
- // Send PRANSWER
- sdesc = CreateSessionDescriptionWithStream(2);
- session1_.set_local_description(sdesc);
-
- session1_.SetState(cricket::BaseSession::STATE_SENTPRACCEPT);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ // Send PR answer
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc2(
+ CreateSessionDescriptionWithStream(2));
+ EXPECT_TRUE(channel1_->PushdownLocalDescription(
+ sdesc2.get(), cricket::CA_PRANSWER, &err));
EXPECT_TRUE(media_channel1_->HasRecvStream(1));
EXPECT_TRUE(media_channel1_->HasSendStream(2));
- // Send ACCEPT
- sdesc = CreateSessionDescriptionWithStream(3);
- session1_.set_local_description(sdesc);
-
- session1_.SetState(cricket::BaseSession::STATE_SENTACCEPT);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ // Send answer
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc3(
+ CreateSessionDescriptionWithStream(3));
+ EXPECT_TRUE(channel1_->PushdownLocalDescription(
+ sdesc3.get(), cricket::CA_ANSWER, &err));
EXPECT_TRUE(media_channel1_->HasRecvStream(1));
EXPECT_FALSE(media_channel1_->HasSendStream(2));
EXPECT_TRUE(media_channel1_->HasSendStream(3));
@@ -1653,30 +1649,27 @@ class ChannelTest : public testing::Test, public sigslot::has_slots<> {
void TestReceivePrAnswer() {
CreateChannels(0, 0);
- // Set up the initial session description.
- cricket::SessionDescription* sdesc = CreateSessionDescriptionWithStream(1);
- session1_.set_local_description(sdesc);
-
- session1_.SetError(cricket::BaseSession::ERROR_NONE, "");
- session1_.SetState(cricket::BaseSession::STATE_SENTINITIATE);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ std::string err;
+ // Send offer
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc1(
+ CreateSessionDescriptionWithStream(1));
+ EXPECT_TRUE(channel1_->PushdownLocalDescription(
+ sdesc1.get(), cricket::CA_OFFER, &err));
EXPECT_TRUE(media_channel1_->HasSendStream(1));
- // Receive PRANSWER
- sdesc = CreateSessionDescriptionWithStream(2);
- session1_.set_remote_description(sdesc);
-
- session1_.SetState(cricket::BaseSession::STATE_RECEIVEDPRACCEPT);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ // Receive PR answer
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc2(
+ CreateSessionDescriptionWithStream(2));
+ EXPECT_TRUE(channel1_->PushdownRemoteDescription(
+ sdesc2.get(), cricket::CA_PRANSWER, &err));
EXPECT_TRUE(media_channel1_->HasSendStream(1));
EXPECT_TRUE(media_channel1_->HasRecvStream(2));
- // Receive ACCEPT
- sdesc = CreateSessionDescriptionWithStream(3);
- session1_.set_remote_description(sdesc);
-
- session1_.SetState(cricket::BaseSession::STATE_RECEIVEDACCEPT);
- EXPECT_EQ(cricket::BaseSession::ERROR_NONE, session1_.error());
+ // Receive answer
+ rtc::scoped_ptr<cricket::SessionDescription> sdesc3(
+ CreateSessionDescriptionWithStream(3));
+ EXPECT_TRUE(channel1_->PushdownRemoteDescription(
+ sdesc3.get(), cricket::CA_ANSWER, &err));
EXPECT_TRUE(media_channel1_->HasSendStream(1));
EXPECT_FALSE(media_channel1_->HasRecvStream(2));
EXPECT_TRUE(media_channel1_->HasRecvStream(3));
@@ -2085,6 +2078,22 @@ TEST_F(VoiceChannelTest, SendRtcpMuxToRtcpMux) {
Base::SendRtcpMuxToRtcpMux();
}
+TEST_F(VoiceChannelTest, SendRequireRtcpMuxToRtcpMux) {
+ Base::SendRequireRtcpMuxToRtcpMux();
+}
+
+TEST_F(VoiceChannelTest, SendRtcpMuxToRequireRtcpMux) {
+ Base::SendRtcpMuxToRequireRtcpMux();
+}
+
+TEST_F(VoiceChannelTest, SendRequireRtcpMuxToRequireRtcpMux) {
+ Base::SendRequireRtcpMuxToRequireRtcpMux();
+}
+
+TEST_F(VoiceChannelTest, SendRequireRtcpMuxToNoRtcpMux) {
+ Base::SendRequireRtcpMuxToNoRtcpMux();
+}
+
TEST_F(VoiceChannelTest, SendEarlyRtcpMuxToRtcp) {
Base::SendEarlyRtcpMuxToRtcp();
}
@@ -2217,10 +2226,6 @@ TEST_F(VoiceChannelTest, TestInsertDtmf) {
3, 7, 120, cricket::DF_PLAY | cricket::DF_SEND));
}
-TEST_F(VoiceChannelTest, TestMediaSinks) {
- Base::TestMediaSinks();
-}
-
TEST_F(VoiceChannelTest, TestSetContentFailure) {
Base::TestSetContentFailure();
}
@@ -2507,6 +2512,22 @@ TEST_F(VideoChannelTest, SendRtcpMuxToRtcpMux) {
Base::SendRtcpMuxToRtcpMux();
}
+TEST_F(VideoChannelTest, SendRequireRtcpMuxToRtcpMux) {
+ Base::SendRequireRtcpMuxToRtcpMux();
+}
+
+TEST_F(VideoChannelTest, SendRtcpMuxToRequireRtcpMux) {
+ Base::SendRtcpMuxToRequireRtcpMux();
+}
+
+TEST_F(VideoChannelTest, SendRequireRtcpMuxToRequireRtcpMux) {
+ Base::SendRequireRtcpMuxToRequireRtcpMux();
+}
+
+TEST_F(VideoChannelTest, SendRequireRtcpMuxToNoRtcpMux) {
+ Base::SendRequireRtcpMuxToNoRtcpMux();
+}
+
TEST_F(VideoChannelTest, SendEarlyRtcpMuxToRtcp) {
Base::SendEarlyRtcpMuxToRtcp();
}
@@ -2562,10 +2583,6 @@ TEST_F(VideoChannelTest, TestMediaMonitor) {
Base::TestMediaMonitor();
}
-TEST_F(VideoChannelTest, TestMediaSinks) {
- Base::TestMediaSinks();
-}
-
TEST_F(VideoChannelTest, TestSetContentFailure) {
Base::TestSetContentFailure();
}
diff --git a/chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc b/chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc
index 5013fc48b3b..52116ddb39c 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/channelmanager.cc
@@ -41,7 +41,6 @@
#ifdef HAVE_SCTP
#include "talk/media/sctp/sctpdataengine.h"
#endif
-#include "talk/session/media/soundclip.h"
#include "talk/session/media/srtpfilter.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/common.h"
@@ -311,9 +310,6 @@ void ChannelManager::Terminate_w() {
while (!voice_channels_.empty()) {
DestroyVoiceChannel_w(voice_channels_.back(), nullptr);
}
- while (!soundclips_.empty()) {
- DestroySoundclip_w(soundclips_.back());
- }
if (!SetCaptureDevice_w(NULL)) {
LOG(LS_WARNING) << "failed to delete video capturer";
}
@@ -321,26 +317,32 @@ void ChannelManager::Terminate_w() {
}
VoiceChannel* ChannelManager::CreateVoiceChannel(
- BaseSession* session, const std::string& content_name, bool rtcp) {
+ BaseSession* session,
+ const std::string& content_name,
+ bool rtcp,
+ const AudioOptions& options) {
return worker_thread_->Invoke<VoiceChannel*>(
- Bind(&ChannelManager::CreateVoiceChannel_w, this,
- session, content_name, rtcp));
+ Bind(&ChannelManager::CreateVoiceChannel_w, this, session, content_name,
+ rtcp, options));
}
VoiceChannel* ChannelManager::CreateVoiceChannel_w(
- BaseSession* session, const std::string& content_name, bool rtcp) {
+ BaseSession* session,
+ const std::string& content_name,
+ bool rtcp,
+ const AudioOptions& options) {
ASSERT(initialized_);
ASSERT(worker_thread_ == rtc::Thread::Current());
- VoiceMediaChannel* media_channel = media_engine_->CreateChannel();
- if (media_channel == NULL)
- return NULL;
+ VoiceMediaChannel* media_channel = media_engine_->CreateChannel(options);
+ if (!media_channel)
+ return nullptr;
VoiceChannel* voice_channel = new VoiceChannel(
worker_thread_, media_engine_.get(), media_channel,
session, content_name, rtcp);
if (!voice_channel->Init()) {
delete voice_channel;
- return NULL;
+ return nullptr;
}
voice_channels_.push_back(voice_channel);
return voice_channel;
@@ -504,45 +506,6 @@ void ChannelManager::DestroyDataChannel_w(DataChannel* data_channel) {
delete data_channel;
}
-Soundclip* ChannelManager::CreateSoundclip() {
- return worker_thread_->Invoke<Soundclip*>(
- Bind(&ChannelManager::CreateSoundclip_w, this));
-}
-
-Soundclip* ChannelManager::CreateSoundclip_w() {
- ASSERT(initialized_);
- ASSERT(worker_thread_ == rtc::Thread::Current());
-
- SoundclipMedia* soundclip_media = media_engine_->CreateSoundclip();
- if (!soundclip_media) {
- return NULL;
- }
-
- Soundclip* soundclip = new Soundclip(worker_thread_, soundclip_media);
- soundclips_.push_back(soundclip);
- return soundclip;
-}
-
-void ChannelManager::DestroySoundclip(Soundclip* soundclip) {
- if (soundclip) {
- worker_thread_->Invoke<void>(
- Bind(&ChannelManager::DestroySoundclip_w, this, soundclip));
- }
-}
-
-void ChannelManager::DestroySoundclip_w(Soundclip* soundclip) {
- // Destroy soundclip.
- ASSERT(initialized_);
- Soundclips::iterator it = std::find(soundclips_.begin(),
- soundclips_.end(), soundclip);
- ASSERT(it != soundclips_.end());
- if (it == soundclips_.end())
- return;
-
- soundclips_.erase(it);
- delete soundclip;
-}
-
bool ChannelManager::GetAudioOptions(std::string* in_name,
std::string* out_name,
AudioOptions* options) {
@@ -615,29 +578,6 @@ bool ChannelManager::SetAudioOptions_w(
return ret;
}
-// Sets Engine-specific audio options according to enabled experiments.
-bool ChannelManager::SetEngineAudioOptions(const AudioOptions& options) {
- // If we're initialized, pass the settings to the media engine.
- bool ret = false;
- if (initialized_) {
- ret = worker_thread_->Invoke<bool>(
- Bind(&ChannelManager::SetEngineAudioOptions_w, this, options));
- }
-
- // If all worked well, save the audio options.
- if (ret) {
- audio_options_ = options;
- }
- return ret;
-}
-
-bool ChannelManager::SetEngineAudioOptions_w(const AudioOptions& options) {
- ASSERT(worker_thread_ == rtc::Thread::Current());
- ASSERT(initialized_);
-
- return media_engine_->SetAudioOptions(options);
-}
-
bool ChannelManager::GetOutputVolume(int* level) {
if (!initialized_) {
return false;
diff --git a/chromium/third_party/libjingle/source/talk/session/media/channelmanager.h b/chromium/third_party/libjingle/source/talk/session/media/channelmanager.h
index 27f874b59d6..898ae1cb2db 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/channelmanager.h
+++ b/chromium/third_party/libjingle/source/talk/session/media/channelmanager.h
@@ -44,7 +44,6 @@ namespace cricket {
const int kDefaultAudioDelayOffset = 0;
-class Soundclip;
class VideoProcessor;
class VoiceChannel;
class VoiceProcessor;
@@ -104,8 +103,10 @@ class ChannelManager : public rtc::MessageHandler,
// The operations below all occur on the worker thread.
// Creates a voice channel, to be associated with the specified session.
- VoiceChannel* CreateVoiceChannel(
- BaseSession* session, const std::string& content_name, bool rtcp);
+ VoiceChannel* CreateVoiceChannel(BaseSession* session,
+ const std::string& content_name,
+ bool rtcp,
+ const AudioOptions& options);
// Destroys a voice channel created with the Create API.
void DestroyVoiceChannel(VoiceChannel* voice_channel,
VideoChannel* video_channel);
@@ -129,15 +130,9 @@ class ChannelManager : public rtc::MessageHandler,
// Destroys a data channel created with the Create API.
void DestroyDataChannel(DataChannel* data_channel);
- // Creates a soundclip.
- Soundclip* CreateSoundclip();
- // Destroys a soundclip created with the Create API.
- void DestroySoundclip(Soundclip* soundclip);
-
// Indicates whether any channels exist.
bool has_channels() const {
- return (!voice_channels_.empty() || !video_channels_.empty() ||
- !soundclips_.empty());
+ return (!voice_channels_.empty() || !video_channels_.empty());
}
// Configures the audio and video devices. A null pointer can be passed to
@@ -148,8 +143,6 @@ class ChannelManager : public rtc::MessageHandler,
bool SetAudioOptions(const std::string& wave_in_device,
const std::string& wave_out_device,
const AudioOptions& options);
- // Sets Engine-specific audio options according to enabled experiments.
- bool SetEngineAudioOptions(const AudioOptions& options);
bool GetOutputVolume(int* level);
bool SetOutputVolume(int level);
bool IsSameCapturer(const std::string& capturer_name,
@@ -253,7 +246,6 @@ class ChannelManager : public rtc::MessageHandler,
typedef std::vector<VoiceChannel*> VoiceChannels;
typedef std::vector<VideoChannel*> VideoChannels;
typedef std::vector<DataChannel*> DataChannels;
- typedef std::vector<Soundclip*> Soundclips;
void Construct(MediaEngineInterface* me,
DataEngineInterface* dme,
@@ -263,8 +255,10 @@ class ChannelManager : public rtc::MessageHandler,
bool InitMediaEngine_w();
void DestructorDeletes_w();
void Terminate_w();
- VoiceChannel* CreateVoiceChannel_w(
- BaseSession* session, const std::string& content_name, bool rtcp);
+ VoiceChannel* CreateVoiceChannel_w(BaseSession* session,
+ const std::string& content_name,
+ bool rtcp,
+ const AudioOptions& options);
void DestroyVoiceChannel_w(VoiceChannel* voice_channel,
VideoChannel* video_channel);
VideoChannel* CreateVideoChannel_w(BaseSession* session,
@@ -277,11 +271,8 @@ class ChannelManager : public rtc::MessageHandler,
BaseSession* session, const std::string& content_name,
bool rtcp, DataChannelType data_channel_type);
void DestroyDataChannel_w(DataChannel* data_channel);
- Soundclip* CreateSoundclip_w();
- void DestroySoundclip_w(Soundclip* soundclip);
bool SetAudioOptions_w(const AudioOptions& options, int delay_offset,
const Device* in_dev, const Device* out_dev);
- bool SetEngineAudioOptions_w(const AudioOptions& options);
bool SetCaptureDevice_w(const Device* cam_device);
void OnVideoCaptureStateChange(VideoCapturer* capturer,
CaptureState result);
@@ -306,7 +297,6 @@ class ChannelManager : public rtc::MessageHandler,
VoiceChannels voice_channels_;
VideoChannels video_channels_;
DataChannels data_channels_;
- Soundclips soundclips_;
std::string audio_in_device_;
std::string audio_out_device_;
diff --git a/chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc b/chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc
index b0abf0487ee..1ffdaf28369 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/channelmanager_unittest.cc
@@ -125,15 +125,15 @@ TEST_F(ChannelManagerTest, StartupShutdownOnThread) {
TEST_F(ChannelManagerTest, CreateDestroyChannels) {
EXPECT_TRUE(cm_->Init());
cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel(
- session_, cricket::CN_AUDIO, false);
- EXPECT_TRUE(voice_channel != NULL);
+ session_, cricket::CN_AUDIO, false, AudioOptions());
+ EXPECT_TRUE(voice_channel != nullptr);
cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
session_, cricket::CN_VIDEO, false, VideoOptions(), voice_channel);
- EXPECT_TRUE(video_channel != NULL);
+ EXPECT_TRUE(video_channel != nullptr);
cricket::DataChannel* data_channel =
cm_->CreateDataChannel(session_, cricket::CN_DATA,
false, cricket::DCT_RTP);
- EXPECT_TRUE(data_channel != NULL);
+ EXPECT_TRUE(data_channel != nullptr);
cm_->DestroyVideoChannel(video_channel);
cm_->DestroyVoiceChannel(voice_channel, nullptr);
cm_->DestroyDataChannel(data_channel);
@@ -148,15 +148,15 @@ TEST_F(ChannelManagerTest, CreateDestroyChannelsOnThread) {
delete session_;
session_ = new cricket::FakeSession(&worker_, true);
cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel(
- session_, cricket::CN_AUDIO, false);
- EXPECT_TRUE(voice_channel != NULL);
+ session_, cricket::CN_AUDIO, false, AudioOptions());
+ EXPECT_TRUE(voice_channel != nullptr);
cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
session_, cricket::CN_VIDEO, false, VideoOptions(), voice_channel);
- EXPECT_TRUE(video_channel != NULL);
+ EXPECT_TRUE(video_channel != nullptr);
cricket::DataChannel* data_channel =
cm_->CreateDataChannel(session_, cricket::CN_DATA,
false, cricket::DCT_RTP);
- EXPECT_TRUE(data_channel != NULL);
+ EXPECT_TRUE(data_channel != nullptr);
cm_->DestroyVideoChannel(video_channel);
cm_->DestroyVoiceChannel(voice_channel, nullptr);
cm_->DestroyDataChannel(data_channel);
@@ -171,18 +171,18 @@ TEST_F(ChannelManagerTest, NoTransportChannelTest) {
// The test is useless unless the session does not fail creating
// cricket::TransportChannel.
ASSERT_TRUE(session_->CreateChannel(
- "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP) == NULL);
+ "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP) == nullptr);
cricket::VoiceChannel* voice_channel = cm_->CreateVoiceChannel(
- session_, cricket::CN_AUDIO, false);
- EXPECT_TRUE(voice_channel == NULL);
+ session_, cricket::CN_AUDIO, false, AudioOptions());
+ EXPECT_TRUE(voice_channel == nullptr);
cricket::VideoChannel* video_channel = cm_->CreateVideoChannel(
session_, cricket::CN_VIDEO, false, VideoOptions(), voice_channel);
- EXPECT_TRUE(video_channel == NULL);
+ EXPECT_TRUE(video_channel == nullptr);
cricket::DataChannel* data_channel =
cm_->CreateDataChannel(session_, cricket::CN_DATA,
false, cricket::DCT_RTP);
- EXPECT_TRUE(data_channel == NULL);
+ EXPECT_TRUE(data_channel == nullptr);
cm_->Terminate();
}
@@ -309,25 +309,6 @@ TEST_F(ChannelManagerTest, SetAudioOptions) {
EXPECT_FALSE(cm_->SetAudioOptions("audio-in9", "audio-out2", options));
}
-TEST_F(ChannelManagerTest, SetEngineAudioOptions) {
- EXPECT_TRUE(cm_->Init());
- // Test setting specific values.
- AudioOptions options;
- options.experimental_ns.Set(true);
- EXPECT_TRUE(cm_->SetEngineAudioOptions(options));
- bool experimental_ns = false;
- EXPECT_TRUE(fme_->audio_options().experimental_ns.Get(&experimental_ns));
- EXPECT_TRUE(experimental_ns);
-}
-
-TEST_F(ChannelManagerTest, SetEngineAudioOptionsBeforeInitFails) {
- // Test that values that we set before Init are not applied.
- AudioOptions options;
- options.experimental_ns.Set(true);
- EXPECT_FALSE(cm_->SetEngineAudioOptions(options));
- EXPECT_FALSE(fme_->audio_options().experimental_ns.IsSet());
-}
-
TEST_F(ChannelManagerTest, SetCaptureDeviceBeforeInit) {
// Test that values that we set before Init are applied.
EXPECT_TRUE(cm_->SetCaptureDevice("video-in2"));
diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediarecorder.cc b/chromium/third_party/libjingle/source/talk/session/media/mediarecorder.cc
deleted file mode 100644
index 9ce84f38389..00000000000
--- a/chromium/third_party/libjingle/source/talk/session/media/mediarecorder.cc
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * libjingle
- * Copyright 2010 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "talk/session/media/mediarecorder.h"
-
-#include <limits.h>
-
-#include <string>
-
-#include "talk/media/base/rtpdump.h"
-#include "webrtc/base/fileutils.h"
-#include "webrtc/base/logging.h"
-#include "webrtc/base/pathutils.h"
-
-
-namespace cricket {
-
-///////////////////////////////////////////////////////////////////////////
-// Implementation of RtpDumpSink.
-///////////////////////////////////////////////////////////////////////////
-RtpDumpSink::RtpDumpSink(rtc::StreamInterface* stream)
- : max_size_(INT_MAX),
- recording_(false),
- packet_filter_(PF_NONE) {
- stream_.reset(stream);
-}
-
-RtpDumpSink::~RtpDumpSink() {}
-
-void RtpDumpSink::SetMaxSize(size_t size) {
- rtc::CritScope cs(&critical_section_);
- max_size_ = size;
-}
-
-bool RtpDumpSink::Enable(bool enable) {
- rtc::CritScope cs(&critical_section_);
-
- recording_ = enable;
-
- // Create a file and the RTP writer if we have not done yet.
- if (recording_ && !writer_) {
- if (!stream_) {
- return false;
- }
- writer_.reset(new RtpDumpWriter(stream_.get()));
- writer_->set_packet_filter(packet_filter_);
- } else if (!recording_ && stream_) {
- stream_->Flush();
- }
- return true;
-}
-
-void RtpDumpSink::OnPacket(const void* data, size_t size, bool rtcp) {
- rtc::CritScope cs(&critical_section_);
-
- if (recording_ && writer_) {
- size_t current_size;
- if (writer_->GetDumpSize(&current_size) &&
- current_size + RtpDumpPacket::kHeaderLength + size <= max_size_) {
- if (!rtcp) {
- writer_->WriteRtpPacket(data, size);
- } else {
- // TODO(whyuan): Enable recording RTCP.
- }
- }
- }
-}
-
-void RtpDumpSink::set_packet_filter(int filter) {
- rtc::CritScope cs(&critical_section_);
- packet_filter_ = filter;
- if (writer_) {
- writer_->set_packet_filter(packet_filter_);
- }
-}
-
-void RtpDumpSink::Flush() {
- rtc::CritScope cs(&critical_section_);
- if (stream_) {
- stream_->Flush();
- }
-}
-
-///////////////////////////////////////////////////////////////////////////
-// Implementation of MediaRecorder.
-///////////////////////////////////////////////////////////////////////////
-MediaRecorder::MediaRecorder() {}
-
-MediaRecorder::~MediaRecorder() {
- rtc::CritScope cs(&critical_section_);
- std::map<BaseChannel*, SinkPair*>::iterator itr;
- for (itr = sinks_.begin(); itr != sinks_.end(); ++itr) {
- delete itr->second;
- }
-}
-
-bool MediaRecorder::AddChannel(VoiceChannel* channel,
- rtc::StreamInterface* send_stream,
- rtc::StreamInterface* recv_stream,
- int filter) {
- return InternalAddChannel(channel, false, send_stream, recv_stream,
- filter);
-}
-bool MediaRecorder::AddChannel(VideoChannel* channel,
- rtc::StreamInterface* send_stream,
- rtc::StreamInterface* recv_stream,
- int filter) {
- return InternalAddChannel(channel, true, send_stream, recv_stream,
- filter);
-}
-
-bool MediaRecorder::InternalAddChannel(BaseChannel* channel,
- bool video_channel,
- rtc::StreamInterface* send_stream,
- rtc::StreamInterface* recv_stream,
- int filter) {
- if (!channel) {
- return false;
- }
-
- rtc::CritScope cs(&critical_section_);
- if (sinks_.end() != sinks_.find(channel)) {
- return false; // The channel was added already.
- }
-
- SinkPair* sink_pair = new SinkPair;
- sink_pair->video_channel = video_channel;
- sink_pair->filter = filter;
- sink_pair->send_sink.reset(new RtpDumpSink(send_stream));
- sink_pair->send_sink->set_packet_filter(filter);
- sink_pair->recv_sink.reset(new RtpDumpSink(recv_stream));
- sink_pair->recv_sink->set_packet_filter(filter);
- sinks_[channel] = sink_pair;
-
- return true;
-}
-
-void MediaRecorder::RemoveChannel(BaseChannel* channel,
- SinkType type) {
- rtc::CritScope cs(&critical_section_);
- std::map<BaseChannel*, SinkPair*>::iterator itr = sinks_.find(channel);
- if (sinks_.end() != itr) {
- channel->UnregisterSendSink(itr->second->send_sink.get(), type);
- channel->UnregisterRecvSink(itr->second->recv_sink.get(), type);
- delete itr->second;
- sinks_.erase(itr);
- }
-}
-
-bool MediaRecorder::EnableChannel(
- BaseChannel* channel, bool enable_send, bool enable_recv,
- SinkType type) {
- rtc::CritScope cs(&critical_section_);
- std::map<BaseChannel*, SinkPair*>::iterator itr = sinks_.find(channel);
- if (sinks_.end() == itr) {
- return false;
- }
-
- SinkPair* sink_pair = itr->second;
- RtpDumpSink* sink = sink_pair->send_sink.get();
- sink->Enable(enable_send);
- if (enable_send) {
- channel->RegisterSendSink(sink, &RtpDumpSink::OnPacket, type);
- } else {
- channel->UnregisterSendSink(sink, type);
- }
-
- sink = sink_pair->recv_sink.get();
- sink->Enable(enable_recv);
- if (enable_recv) {
- channel->RegisterRecvSink(sink, &RtpDumpSink::OnPacket, type);
- } else {
- channel->UnregisterRecvSink(sink, type);
- }
-
- if (sink_pair->video_channel &&
- (sink_pair->filter & PF_RTPPACKET) == PF_RTPPACKET) {
- // Request a full intra frame.
- VideoChannel* video_channel = static_cast<VideoChannel*>(channel);
- if (enable_send) {
- video_channel->SendIntraFrame();
- }
- if (enable_recv) {
- video_channel->RequestIntraFrame();
- }
- }
-
- return true;
-}
-
-void MediaRecorder::FlushSinks() {
- rtc::CritScope cs(&critical_section_);
- std::map<BaseChannel*, SinkPair*>::iterator itr;
- for (itr = sinks_.begin(); itr != sinks_.end(); ++itr) {
- itr->second->send_sink->Flush();
- itr->second->recv_sink->Flush();
- }
-}
-
-} // namespace cricket
diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediarecorder.h b/chromium/third_party/libjingle/source/talk/session/media/mediarecorder.h
deleted file mode 100644
index 095f355c389..00000000000
--- a/chromium/third_party/libjingle/source/talk/session/media/mediarecorder.h
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * libjingle
- * Copyright 2010 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef TALK_SESSION_MEDIA_MEDIARECORDER_H_
-#define TALK_SESSION_MEDIA_MEDIARECORDER_H_
-
-#include <map>
-#include <string>
-
-#include "talk/session/media/channel.h"
-#include "talk/session/media/mediasink.h"
-#include "webrtc/base/criticalsection.h"
-#include "webrtc/base/scoped_ptr.h"
-#include "webrtc/base/sigslot.h"
-
-namespace rtc {
-class Pathname;
-class FileStream;
-}
-
-namespace cricket {
-
-class BaseChannel;
-class VideoChannel;
-class VoiceChannel;
-class RtpDumpWriter;
-
-// RtpDumpSink implements MediaSinkInterface by dumping the RTP/RTCP packets to
-// a file.
-class RtpDumpSink : public MediaSinkInterface, public sigslot::has_slots<> {
- public:
- // Takes ownership of stream.
- explicit RtpDumpSink(rtc::StreamInterface* stream);
- virtual ~RtpDumpSink();
-
- virtual void SetMaxSize(size_t size);
- virtual bool Enable(bool enable);
- virtual bool IsEnabled() const { return recording_; }
- virtual void OnPacket(const void* data, size_t size, bool rtcp);
- virtual void set_packet_filter(int filter);
- int packet_filter() const { return packet_filter_; }
- void Flush();
-
- private:
- size_t max_size_;
- bool recording_;
- int packet_filter_;
- rtc::scoped_ptr<rtc::StreamInterface> stream_;
- rtc::scoped_ptr<RtpDumpWriter> writer_;
- rtc::CriticalSection critical_section_;
-
- DISALLOW_COPY_AND_ASSIGN(RtpDumpSink);
-};
-
-class MediaRecorder {
- public:
- MediaRecorder();
- virtual ~MediaRecorder();
-
- bool AddChannel(VoiceChannel* channel,
- rtc::StreamInterface* send_stream,
- rtc::StreamInterface* recv_stream,
- int filter);
- bool AddChannel(VideoChannel* channel,
- rtc::StreamInterface* send_stream,
- rtc::StreamInterface* recv_stream,
- int filter);
- void RemoveChannel(BaseChannel* channel, SinkType type);
- bool EnableChannel(BaseChannel* channel, bool enable_send, bool enable_recv,
- SinkType type);
- void FlushSinks();
-
- private:
- struct SinkPair {
- bool video_channel;
- int filter;
- rtc::scoped_ptr<RtpDumpSink> send_sink;
- rtc::scoped_ptr<RtpDumpSink> recv_sink;
- };
-
- bool InternalAddChannel(BaseChannel* channel,
- bool video_channel,
- rtc::StreamInterface* send_stream,
- rtc::StreamInterface* recv_stream,
- int filter);
-
- std::map<BaseChannel*, SinkPair*> sinks_;
- rtc::CriticalSection critical_section_;
-
- DISALLOW_COPY_AND_ASSIGN(MediaRecorder);
-};
-
-} // namespace cricket
-
-#endif // TALK_SESSION_MEDIA_MEDIARECORDER_H_
diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediarecorder_unittest.cc b/chromium/third_party/libjingle/source/talk/session/media/mediarecorder_unittest.cc
deleted file mode 100644
index 52a92457fa2..00000000000
--- a/chromium/third_party/libjingle/source/talk/session/media/mediarecorder_unittest.cc
+++ /dev/null
@@ -1,356 +0,0 @@
-/*
- * libjingle
- * Copyright 2010 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include <string>
-
-#include "talk/media/base/fakemediaengine.h"
-#include "talk/media/base/rtpdump.h"
-#include "talk/media/base/testutils.h"
-#include "webrtc/p2p/base/fakesession.h"
-#include "talk/session/media/channel.h"
-#include "talk/session/media/mediarecorder.h"
-#include "webrtc/base/bytebuffer.h"
-#include "webrtc/base/fileutils.h"
-#include "webrtc/base/gunit.h"
-#include "webrtc/base/pathutils.h"
-#include "webrtc/base/thread.h"
-
-namespace cricket {
-
-rtc::StreamInterface* Open(const std::string& path) {
- return rtc::Filesystem::OpenFile(
- rtc::Pathname(path), "wb");
-}
-
-/////////////////////////////////////////////////////////////////////////
-// Test RtpDumpSink
-/////////////////////////////////////////////////////////////////////////
-class RtpDumpSinkTest : public testing::Test {
- public:
- virtual void SetUp() {
- EXPECT_TRUE(rtc::Filesystem::GetTemporaryFolder(path_, true, NULL));
- path_.SetPathname(rtc::Filesystem::TempFilename(path_, "sink-test"));
- sink_.reset(new RtpDumpSink(Open(path_.pathname())));
-
- for (int i = 0; i < ARRAY_SIZE(rtp_buf_); ++i) {
- RtpTestUtility::kTestRawRtpPackets[i].WriteToByteBuffer(
- RtpTestUtility::kDefaultSsrc, &rtp_buf_[i]);
- }
- }
-
- virtual void TearDown() {
- stream_.reset();
- EXPECT_TRUE(rtc::Filesystem::DeleteFile(path_));
- }
-
- protected:
- void OnRtpPacket(const RawRtpPacket& raw) {
- rtc::ByteBuffer buf;
- raw.WriteToByteBuffer(RtpTestUtility::kDefaultSsrc, &buf);
- sink_->OnPacket(buf.Data(), buf.Length(), false);
- }
-
- rtc::StreamResult ReadPacket(RtpDumpPacket* packet) {
- if (!stream_.get()) {
- sink_.reset(); // This will close the file. So we can read it.
- stream_.reset(rtc::Filesystem::OpenFile(path_, "rb"));
- reader_.reset(new RtpDumpReader(stream_.get()));
- }
- return reader_->ReadPacket(packet);
- }
-
- rtc::Pathname path_;
- rtc::scoped_ptr<RtpDumpSink> sink_;
- rtc::ByteBuffer rtp_buf_[3];
- rtc::scoped_ptr<rtc::StreamInterface> stream_;
- rtc::scoped_ptr<RtpDumpReader> reader_;
-};
-
-TEST_F(RtpDumpSinkTest, TestRtpDumpSink) {
- // By default, the sink is disabled. The 1st packet is not written.
- EXPECT_FALSE(sink_->IsEnabled());
- sink_->set_packet_filter(PF_ALL);
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[0]);
-
- // Enable the sink. The 2nd packet is written.
- EXPECT_TRUE(sink_->Enable(true));
- EXPECT_TRUE(sink_->IsEnabled());
- EXPECT_TRUE(rtc::Filesystem::IsFile(path_.pathname()));
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[1]);
-
- // Disable the sink. The 3rd packet is not written.
- EXPECT_TRUE(sink_->Enable(false));
- EXPECT_FALSE(sink_->IsEnabled());
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[2]);
-
- // Read the recorded file and verify it contains only the 2nd packet.
- RtpDumpPacket packet;
- EXPECT_EQ(rtc::SR_SUCCESS, ReadPacket(&packet));
- EXPECT_TRUE(RtpTestUtility::VerifyPacket(
- &packet, &RtpTestUtility::kTestRawRtpPackets[1], false));
- EXPECT_EQ(rtc::SR_EOS, ReadPacket(&packet));
-}
-
-TEST_F(RtpDumpSinkTest, TestRtpDumpSinkMaxSize) {
- EXPECT_TRUE(sink_->Enable(true));
- sink_->set_packet_filter(PF_ALL);
- sink_->SetMaxSize(strlen(RtpDumpFileHeader::kFirstLine) +
- RtpDumpFileHeader::kHeaderLength +
- RtpDumpPacket::kHeaderLength +
- RtpTestUtility::kTestRawRtpPackets[0].size());
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[0]);
-
- // Exceed the limit size: the 2nd and 3rd packets are not written.
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[1]);
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[2]);
-
- // Read the recorded file and verify that it contains only the first packet.
- RtpDumpPacket packet;
- EXPECT_EQ(rtc::SR_SUCCESS, ReadPacket(&packet));
- EXPECT_TRUE(RtpTestUtility::VerifyPacket(
- &packet, &RtpTestUtility::kTestRawRtpPackets[0], false));
- EXPECT_EQ(rtc::SR_EOS, ReadPacket(&packet));
-}
-
-TEST_F(RtpDumpSinkTest, TestRtpDumpSinkFilter) {
- // The default filter is PF_NONE.
- EXPECT_EQ(PF_NONE, sink_->packet_filter());
-
- // Set to PF_RTPHEADER before enable.
- sink_->set_packet_filter(PF_RTPHEADER);
- EXPECT_EQ(PF_RTPHEADER, sink_->packet_filter());
- EXPECT_TRUE(sink_->Enable(true));
- // We dump only the header of the first packet.
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[0]);
-
- // Set the filter to PF_RTPPACKET. We dump all the second packet.
- sink_->set_packet_filter(PF_RTPPACKET);
- EXPECT_EQ(PF_RTPPACKET, sink_->packet_filter());
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[1]);
-
- // Set the filter to PF_NONE. We do not dump the third packet.
- sink_->set_packet_filter(PF_NONE);
- EXPECT_EQ(PF_NONE, sink_->packet_filter());
- OnRtpPacket(RtpTestUtility::kTestRawRtpPackets[2]);
-
- // Read the recorded file and verify the header of the first packet and
- // the whole packet for the second packet.
- RtpDumpPacket packet;
- EXPECT_EQ(rtc::SR_SUCCESS, ReadPacket(&packet));
- EXPECT_TRUE(RtpTestUtility::VerifyPacket(
- &packet, &RtpTestUtility::kTestRawRtpPackets[0], true));
- EXPECT_EQ(rtc::SR_SUCCESS, ReadPacket(&packet));
- EXPECT_TRUE(RtpTestUtility::VerifyPacket(
- &packet, &RtpTestUtility::kTestRawRtpPackets[1], false));
- EXPECT_EQ(rtc::SR_EOS, ReadPacket(&packet));
-}
-
-/////////////////////////////////////////////////////////////////////////
-// Test MediaRecorder
-/////////////////////////////////////////////////////////////////////////
-void TestMediaRecorder(BaseChannel* channel,
- FakeVideoMediaChannel* video_media_channel,
- int filter) {
- // Create media recorder.
- rtc::scoped_ptr<MediaRecorder> recorder(new MediaRecorder);
- // Fail to EnableChannel before AddChannel.
- EXPECT_FALSE(recorder->EnableChannel(channel, true, true, SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel->HasSendSinks(SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel->HasRecvSinks(SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel->HasSendSinks(SINK_POST_CRYPTO));
- EXPECT_FALSE(channel->HasRecvSinks(SINK_POST_CRYPTO));
-
- // Add the channel to the recorder.
- rtc::Pathname path;
- EXPECT_TRUE(rtc::Filesystem::GetTemporaryFolder(path, true, NULL));
- std::string send_file =
- rtc::Filesystem::TempFilename(path, "send");
- std::string recv_file =
- rtc::Filesystem::TempFilename(path, "recv");
- if (video_media_channel) {
- EXPECT_TRUE(recorder->AddChannel(static_cast<VideoChannel*>(channel),
- Open(send_file), Open(recv_file), filter));
- } else {
- EXPECT_TRUE(recorder->AddChannel(static_cast<VoiceChannel*>(channel),
- Open(send_file), Open(recv_file), filter));
- }
-
- // Enable recording only the sent media.
- EXPECT_TRUE(recorder->EnableChannel(channel, true, false, SINK_PRE_CRYPTO));
- EXPECT_TRUE(channel->HasSendSinks(SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel->HasRecvSinks(SINK_POST_CRYPTO));
- EXPECT_FALSE(channel->HasSendSinks(SINK_POST_CRYPTO));
- EXPECT_FALSE(channel->HasRecvSinks(SINK_POST_CRYPTO));
- if (video_media_channel) {
- EXPECT_TRUE_WAIT(video_media_channel->sent_intra_frame(), 100);
- }
-
- // Enable recording only the received meida.
- EXPECT_TRUE(recorder->EnableChannel(channel, false, true, SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel->HasSendSinks(SINK_PRE_CRYPTO));
- EXPECT_TRUE(channel->HasRecvSinks(SINK_PRE_CRYPTO));
- if (video_media_channel) {
- EXPECT_TRUE(video_media_channel->requested_intra_frame());
- }
-
- // Enable recording both the sent and the received video.
- EXPECT_TRUE(recorder->EnableChannel(channel, true, true, SINK_PRE_CRYPTO));
- EXPECT_TRUE(channel->HasSendSinks(SINK_PRE_CRYPTO));
- EXPECT_TRUE(channel->HasRecvSinks(SINK_PRE_CRYPTO));
-
- // Enable recording only headers.
- if (video_media_channel) {
- video_media_channel->set_sent_intra_frame(false);
- video_media_channel->set_requested_intra_frame(false);
- }
- EXPECT_TRUE(recorder->EnableChannel(channel, true, true, SINK_PRE_CRYPTO));
- EXPECT_TRUE(channel->HasSendSinks(SINK_PRE_CRYPTO));
- EXPECT_TRUE(channel->HasRecvSinks(SINK_PRE_CRYPTO));
- if (video_media_channel) {
- if ((filter & PF_RTPPACKET) == PF_RTPPACKET) {
- // If record the whole RTP packet, trigers FIR.
- EXPECT_TRUE(video_media_channel->requested_intra_frame());
- EXPECT_TRUE(video_media_channel->sent_intra_frame());
- } else {
- // If record only the RTP header, does not triger FIR.
- EXPECT_FALSE(video_media_channel->requested_intra_frame());
- EXPECT_FALSE(video_media_channel->sent_intra_frame());
- }
- }
-
- // Remove the voice channel from the recorder.
- recorder->RemoveChannel(channel, SINK_PRE_CRYPTO);
- EXPECT_FALSE(channel->HasSendSinks(SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel->HasRecvSinks(SINK_PRE_CRYPTO));
-
- // Delete all files.
- recorder.reset();
- EXPECT_TRUE(rtc::Filesystem::DeleteFile(send_file));
- EXPECT_TRUE(rtc::Filesystem::DeleteFile(recv_file));
-}
-
-// Fisrt start recording header and then start recording media. Verify that
-// differnt files are created for header and media.
-void TestRecordHeaderAndMedia(BaseChannel* channel,
- FakeVideoMediaChannel* video_media_channel) {
- // Create RTP header recorder.
- rtc::scoped_ptr<MediaRecorder> header_recorder(new MediaRecorder);
-
- rtc::Pathname path;
- EXPECT_TRUE(rtc::Filesystem::GetTemporaryFolder(path, true, NULL));
- std::string send_header_file =
- rtc::Filesystem::TempFilename(path, "send-header");
- std::string recv_header_file =
- rtc::Filesystem::TempFilename(path, "recv-header");
- if (video_media_channel) {
- EXPECT_TRUE(header_recorder->AddChannel(
- static_cast<VideoChannel*>(channel),
- Open(send_header_file), Open(recv_header_file), PF_RTPHEADER));
- } else {
- EXPECT_TRUE(header_recorder->AddChannel(
- static_cast<VoiceChannel*>(channel),
- Open(send_header_file), Open(recv_header_file), PF_RTPHEADER));
- }
-
- // Enable recording both sent and received.
- EXPECT_TRUE(
- header_recorder->EnableChannel(channel, true, true, SINK_POST_CRYPTO));
- EXPECT_TRUE(channel->HasSendSinks(SINK_POST_CRYPTO));
- EXPECT_TRUE(channel->HasRecvSinks(SINK_POST_CRYPTO));
- EXPECT_FALSE(channel->HasSendSinks(SINK_PRE_CRYPTO));
- EXPECT_FALSE(channel->HasRecvSinks(SINK_PRE_CRYPTO));
- if (video_media_channel) {
- EXPECT_FALSE(video_media_channel->sent_intra_frame());
- EXPECT_FALSE(video_media_channel->requested_intra_frame());
- }
-
- // Verify that header files are created.
- EXPECT_TRUE(rtc::Filesystem::IsFile(send_header_file));
- EXPECT_TRUE(rtc::Filesystem::IsFile(recv_header_file));
-
- // Create RTP header recorder.
- rtc::scoped_ptr<MediaRecorder> recorder(new MediaRecorder);
- std::string send_file =
- rtc::Filesystem::TempFilename(path, "send");
- std::string recv_file =
- rtc::Filesystem::TempFilename(path, "recv");
- if (video_media_channel) {
- EXPECT_TRUE(recorder->AddChannel(
- static_cast<VideoChannel*>(channel),
- Open(send_file), Open(recv_file), PF_RTPPACKET));
- } else {
- EXPECT_TRUE(recorder->AddChannel(
- static_cast<VoiceChannel*>(channel),
- Open(send_file), Open(recv_file), PF_RTPPACKET));
- }
-
- // Enable recording both sent and received.
- EXPECT_TRUE(recorder->EnableChannel(channel, true, true, SINK_PRE_CRYPTO));
- EXPECT_TRUE(channel->HasSendSinks(SINK_POST_CRYPTO));
- EXPECT_TRUE(channel->HasRecvSinks(SINK_POST_CRYPTO));
- EXPECT_TRUE(channel->HasSendSinks(SINK_PRE_CRYPTO));
- EXPECT_TRUE(channel->HasRecvSinks(SINK_PRE_CRYPTO));
- if (video_media_channel) {
- EXPECT_TRUE_WAIT(video_media_channel->sent_intra_frame(), 100);
- EXPECT_TRUE(video_media_channel->requested_intra_frame());
- }
-
- // Verify that media files are created.
- EXPECT_TRUE(rtc::Filesystem::IsFile(send_file));
- EXPECT_TRUE(rtc::Filesystem::IsFile(recv_file));
-
- // Delete all files.
- header_recorder.reset();
- recorder.reset();
- EXPECT_TRUE(rtc::Filesystem::DeleteFile(send_header_file));
- EXPECT_TRUE(rtc::Filesystem::DeleteFile(recv_header_file));
- EXPECT_TRUE(rtc::Filesystem::DeleteFile(send_file));
- EXPECT_TRUE(rtc::Filesystem::DeleteFile(recv_file));
-}
-
-TEST(MediaRecorderTest, TestMediaRecorderVoiceChannel) {
- // Create the voice channel.
- FakeMediaEngine media_engine;
- VoiceChannel channel(rtc::Thread::Current(), &media_engine,
- new FakeVoiceMediaChannel(NULL), NULL, "", false);
- TestMediaRecorder(&channel, NULL, PF_RTPPACKET);
- TestMediaRecorder(&channel, NULL, PF_RTPHEADER);
- TestRecordHeaderAndMedia(&channel, NULL);
-}
-
-TEST(MediaRecorderTest, TestMediaRecorderVideoChannel) {
- // Create the video channel.
- FakeMediaEngine media_engine;
- FakeVideoMediaChannel* media_channel = new FakeVideoMediaChannel(NULL);
- VideoChannel channel(rtc::Thread::Current(), &media_engine,
- media_channel, NULL, "", false);
- TestMediaRecorder(&channel, media_channel, PF_RTPPACKET);
- TestMediaRecorder(&channel, media_channel, PF_RTPHEADER);
- TestRecordHeaderAndMedia(&channel, media_channel);
-}
-
-} // namespace cricket
diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediasession.cc b/chromium/third_party/libjingle/source/talk/session/media/mediasession.cc
index b7285179613..b2999445193 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/mediasession.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/mediasession.cc
@@ -436,8 +436,8 @@ static bool AddStreamParams(
StreamParamsVec* current_streams,
MediaContentDescriptionImpl<C>* content_description,
const bool add_legacy_stream) {
- const bool include_rtx_stream =
- ContainsRtxCodec(content_description->codecs());
+ const bool include_rtx_streams =
+ ContainsRtxCodec(content_description->codecs());
if (streams.empty() && add_legacy_stream) {
// TODO(perkj): Remove this legacy stream when all apps use StreamParams.
@@ -445,10 +445,10 @@ static bool AddStreamParams(
if (IsSctp(content_description)) {
GenerateSctpSids(*current_streams, &ssrcs);
} else {
- int num_ssrcs = include_rtx_stream ? 2 : 1;
+ int num_ssrcs = include_rtx_streams ? 2 : 1;
GenerateSsrcs(*current_streams, num_ssrcs, &ssrcs);
}
- if (include_rtx_stream) {
+ if (include_rtx_streams) {
content_description->AddLegacyStream(ssrcs[0], ssrcs[1]);
content_description->set_multistream(true);
} else {
@@ -492,11 +492,15 @@ static bool AddStreamParams(
SsrcGroup group(kSimSsrcGroupSemantics, stream_param.ssrcs);
stream_param.ssrc_groups.push_back(group);
}
- // Generate an extra ssrc for include_rtx_stream case.
- if (include_rtx_stream) {
- std::vector<uint32> rtx_ssrc;
- GenerateSsrcs(*current_streams, 1, &rtx_ssrc);
- stream_param.AddFidSsrc(ssrcs[0], rtx_ssrc[0]);
+ // Generate extra ssrcs for include_rtx_streams case.
+ if (include_rtx_streams) {
+ // Generate an RTX ssrc for every ssrc in the group.
+ std::vector<uint32> rtx_ssrcs;
+ GenerateSsrcs(*current_streams, static_cast<int>(ssrcs.size()),
+ &rtx_ssrcs);
+ for (size_t i = 0; i < ssrcs.size(); ++i) {
+ stream_param.AddFidSsrc(ssrcs[i], rtx_ssrcs[i]);
+ }
content_description->set_multistream(true);
}
stream_param.cname = cname;
diff --git a/chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc b/chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc
index f487baa9c56..7fd1d638239 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/mediasession_unittest.cc
@@ -1767,6 +1767,47 @@ TEST_F(MediaSessionDescriptionFactoryTest,
EXPECT_EQ(expected_codecs, vcd->codecs());
}
+// Test that when RTX is used in conjunction with simulcast, an RTX ssrc is
+// generated for each simulcast ssrc and correctly grouped.
+TEST_F(MediaSessionDescriptionFactoryTest, SimSsrcsGenerateMultipleRtxSsrcs) {
+ MediaSessionOptions opts;
+ opts.recv_video = true;
+ opts.recv_audio = false;
+
+ // Add simulcast streams.
+ opts.AddSendVideoStream("stream1", "stream1label", 3);
+
+ // Use a single real codec, and then add RTX for it.
+ std::vector<VideoCodec> f1_codecs;
+ f1_codecs.push_back(VideoCodec(97, "H264", 320, 200, 30, 1));
+ AddRtxCodec(VideoCodec::CreateRtxCodec(125, 97), &f1_codecs);
+ f1_.set_video_codecs(f1_codecs);
+
+ // Ensure that the offer has an RTX ssrc for each regular ssrc, and that there
+ // is a FID ssrc + grouping for each.
+ rtc::scoped_ptr<SessionDescription> offer(f1_.CreateOffer(opts, NULL));
+ ASSERT_TRUE(offer.get() != NULL);
+ VideoContentDescription* desc = static_cast<VideoContentDescription*>(
+ offer->GetContentDescriptionByName(cricket::CN_VIDEO));
+ ASSERT_TRUE(desc != NULL);
+ EXPECT_TRUE(desc->multistream());
+ const StreamParamsVec& streams = desc->streams();
+ // Single stream.
+ ASSERT_EQ(1u, streams.size());
+ // Stream should have 6 ssrcs: 3 for video, 3 for RTX.
+ EXPECT_EQ(6u, streams[0].ssrcs.size());
+ // And should have a SIM group for the simulcast.
+ EXPECT_TRUE(streams[0].has_ssrc_group("SIM"));
+ // And a FID group for RTX.
+ EXPECT_TRUE(streams[0].has_ssrc_group("FID"));
+ std::vector<uint32> primary_ssrcs;
+ streams[0].GetPrimarySsrcs(&primary_ssrcs);
+ EXPECT_EQ(3u, primary_ssrcs.size());
+ std::vector<uint32> fid_ssrcs;
+ streams[0].GetFidSsrcs(primary_ssrcs, &fid_ssrcs);
+ EXPECT_EQ(3u, fid_ssrcs.size());
+}
+
// Create an updated offer after creating an answer to the original offer and
// verify that the RTP header extensions that were part of the original answer
// are not changed in the updated offer.
diff --git a/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.cc b/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.cc
index f95199241e5..f21e0eeb5b7 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.cc
@@ -40,7 +40,16 @@ bool RtcpMuxFilter::IsActive() const {
state_ == ST_ACTIVE;
}
+void RtcpMuxFilter::SetActive() {
+ state_ = ST_ACTIVE;
+}
+
bool RtcpMuxFilter::SetOffer(bool offer_enable, ContentSource src) {
+ if (state_ == ST_ACTIVE) {
+ // Fail if we try to deactivate and no-op if we try and activate.
+ return offer_enable;
+ }
+
if (!ExpectOffer(offer_enable, src)) {
LOG(LS_ERROR) << "Invalid state for change of RTCP mux offer";
return false;
@@ -53,6 +62,11 @@ bool RtcpMuxFilter::SetOffer(bool offer_enable, ContentSource src) {
bool RtcpMuxFilter::SetProvisionalAnswer(bool answer_enable,
ContentSource src) {
+ if (state_ == ST_ACTIVE) {
+ // Fail if we try to deactivate and no-op if we try and activate.
+ return answer_enable;
+ }
+
if (!ExpectAnswer(src)) {
LOG(LS_ERROR) << "Invalid state for RTCP mux provisional answer";
return false;
@@ -83,6 +97,11 @@ bool RtcpMuxFilter::SetProvisionalAnswer(bool answer_enable,
}
bool RtcpMuxFilter::SetAnswer(bool answer_enable, ContentSource src) {
+ if (state_ == ST_ACTIVE) {
+ // Fail if we try to deactivate and no-op if we try and activate.
+ return answer_enable;
+ }
+
if (!ExpectAnswer(src)) {
LOG(LS_ERROR) << "Invalid state for RTCP mux answer";
return false;
@@ -100,19 +119,24 @@ bool RtcpMuxFilter::SetAnswer(bool answer_enable, ContentSource src) {
return true;
}
-bool RtcpMuxFilter::DemuxRtcp(const char* data, int len) {
- // If we're muxing RTP/RTCP, we must inspect each packet delivered and
- // determine whether it is RTP or RTCP. We do so by checking the packet type,
- // and assuming RTP if type is 0-63 or 96-127. For additional details, see
- // http://tools.ietf.org/html/rfc5761.
- // Note that if we offer RTCP mux, we may receive muxed RTCP before we
- // receive the answer, so we operate in that state too.
- if (!offer_enable_ || state_ < ST_SENTOFFER) {
+// Check the RTP payload type. If 63 < payload type < 96, it's RTCP.
+// For additional details, see http://tools.ietf.org/html/rfc5761.
+bool IsRtcp(const char* data, int len) {
+ if (len < 2) {
return false;
}
+ char pt = data[1] & 0x7F;
+ return (63 < pt) && (pt < 96);
+}
- int type = (len >= 2) ? (static_cast<uint8>(data[1]) & 0x7F) : 0;
- return (type >= 64 && type < 96);
+bool RtcpMuxFilter::DemuxRtcp(const char* data, int len) {
+ // If we're muxing RTP/RTCP, we must inspect each packet delivered
+ // and determine whether it is RTP or RTCP. We do so by looking at
+ // the RTP payload type (see IsRtcp). Note that if we offer RTCP
+ // mux, we may receive muxed RTCP before we receive the answer, so
+ // we operate in that state too.
+ bool offered_mux = ((state_ == ST_SENTOFFER) && offer_enable_);
+ return (IsActive() || offered_mux) && IsRtcp(data, len);
}
bool RtcpMuxFilter::ExpectOffer(bool offer_enable, ContentSource source) {
diff --git a/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.h b/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.h
index 948b3c33f08..8888fd4ffb5 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.h
+++ b/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter.h
@@ -41,6 +41,9 @@ class RtcpMuxFilter {
// Whether the filter is active, i.e. has RTCP mux been properly negotiated.
bool IsActive() const;
+ // Make the filter active, regardless of the current state.
+ void SetActive();
+
// Specifies whether the offer indicates the use of RTCP mux.
bool SetOffer(bool offer_enable, ContentSource src);
diff --git a/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter_unittest.cc b/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter_unittest.cc
index 1305c891a26..d4e6376e6d7 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter_unittest.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/rtcpmuxfilter_unittest.cc
@@ -212,3 +212,44 @@ TEST(RtcpMuxFilterTest, KeepFilterDisabledDuringUpdate) {
EXPECT_TRUE(filter.SetAnswer(false, cricket::CS_LOCAL));
EXPECT_FALSE(filter.IsActive());
}
+
+// Test that we can SetActive and then can't deactivate.
+TEST(RtcpMuxFilterTest, SetActiveCantDeactivate) {
+ cricket::RtcpMuxFilter filter;
+ const char data[] = { 0, 73, 0, 0 };
+ const int len = 4;
+
+ filter.SetActive();
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.DemuxRtcp(data, len));
+
+ EXPECT_FALSE(filter.SetOffer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetProvisionalAnswer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetAnswer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetOffer(false, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetProvisionalAnswer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+
+ EXPECT_FALSE(filter.SetAnswer(false, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+ EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL));
+ EXPECT_TRUE(filter.IsActive());
+}
diff --git a/chromium/third_party/libjingle/source/talk/session/media/soundclip.cc b/chromium/third_party/libjingle/source/talk/session/media/soundclip.cc
index 70a3b18e261..be8d0f99fce 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/soundclip.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/soundclip.cc
@@ -1,82 +1 @@
-/*
- * libjingle
- * Copyright 2004 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "talk/session/media/soundclip.h"
-
-namespace cricket {
-
-enum {
- MSG_PLAYSOUND = 1,
-};
-
-struct PlaySoundMessageData : rtc::MessageData {
- PlaySoundMessageData(const void *c,
- int l,
- SoundclipMedia::SoundclipFlags f)
- : clip(c),
- len(l),
- flags(f),
- result(false) {
- }
-
- const void *clip;
- int len;
- SoundclipMedia::SoundclipFlags flags;
- bool result;
-};
-
-Soundclip::Soundclip(rtc::Thread *thread, SoundclipMedia *soundclip_media)
- : worker_thread_(thread),
- soundclip_media_(soundclip_media) {
-}
-
-bool Soundclip::PlaySound(const void *clip,
- int len,
- SoundclipMedia::SoundclipFlags flags) {
- PlaySoundMessageData data(clip, len, flags);
- worker_thread_->Send(this, MSG_PLAYSOUND, &data);
- return data.result;
-}
-
-bool Soundclip::PlaySound_w(const void *clip,
- int len,
- SoundclipMedia::SoundclipFlags flags) {
- return soundclip_media_->PlaySound(static_cast<const char *>(clip),
- len,
- flags);
-}
-
-void Soundclip::OnMessage(rtc::Message *message) {
- ASSERT(message->message_id == MSG_PLAYSOUND);
- PlaySoundMessageData *data =
- static_cast<PlaySoundMessageData *>(message->pdata);
- data->result = PlaySound_w(data->clip,
- data->len,
- data->flags);
-}
-
-} // namespace cricket
+// TODO(solenberg): Remove this file when it's no longer built in Chromium.
diff --git a/chromium/third_party/libjingle/source/talk/session/media/soundclip.h b/chromium/third_party/libjingle/source/talk/session/media/soundclip.h
index aaf85797d4e..818ea9d4ebe 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/soundclip.h
+++ b/chromium/third_party/libjingle/source/talk/session/media/soundclip.h
@@ -1,70 +1,2 @@
-/*
- * libjingle
- * Copyright 2004 Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- * 3. The name of the author may not be used to endorse or promote products
- * derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
+// TODO(solenberg): Remove this file when it's no longer built in Chromium.
-#ifndef TALK_SESSION_MEDIA_SOUNDCLIP_H_
-#define TALK_SESSION_MEDIA_SOUNDCLIP_H_
-
-#include "talk/media/base/mediaengine.h"
-#include "webrtc/base/scoped_ptr.h"
-
-namespace rtc {
-
-class Thread;
-
-}
-
-namespace cricket {
-
-// Soundclip wraps SoundclipMedia to support marshalling calls to the proper
-// thread.
-class Soundclip : private rtc::MessageHandler {
- public:
- Soundclip(rtc::Thread* thread, SoundclipMedia* soundclip_media);
-
- // Plays a sound out to the speakers with the given audio stream. The stream
- // must be 16-bit little-endian 16 kHz PCM. If a stream is already playing
- // on this Soundclip, it is stopped. If clip is NULL, nothing is played.
- // Returns whether it was successful.
- bool PlaySound(const void* clip,
- int len,
- SoundclipMedia::SoundclipFlags flags);
-
- private:
- bool PlaySound_w(const void* clip,
- int len,
- SoundclipMedia::SoundclipFlags flags);
-
- // From MessageHandler
- virtual void OnMessage(rtc::Message* message);
-
- rtc::Thread* worker_thread_;
- rtc::scoped_ptr<SoundclipMedia> soundclip_media_;
-};
-
-} // namespace cricket
-
-#endif // TALK_SESSION_MEDIA_SOUNDCLIP_H_
diff --git a/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.cc b/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.cc
index dc93dd463d8..33e42c09d4a 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.cc
+++ b/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.cc
@@ -475,18 +475,27 @@ bool SrtpFilter::ParseKeyParams(const std::string& key_params,
bool SrtpSession::inited_ = false;
+// This lock protects SrtpSession::inited_ and SrtpSession::sessions_.
+rtc::GlobalLockPod SrtpSession::lock_;
+
SrtpSession::SrtpSession()
: session_(NULL),
rtp_auth_tag_len_(0),
rtcp_auth_tag_len_(0),
srtp_stat_(new SrtpStat()),
last_send_seq_num_(-1) {
- sessions()->push_back(this);
+ {
+ rtc::GlobalLockScope ls(&lock_);
+ sessions()->push_back(this);
+ }
SignalSrtpError.repeat(srtp_stat_->SignalSrtpError);
}
SrtpSession::~SrtpSession() {
- sessions()->erase(std::find(sessions()->begin(), sessions()->end(), this));
+ {
+ rtc::GlobalLockScope ls(&lock_);
+ sessions()->erase(std::find(sessions()->begin(), sessions()->end(), this));
+ }
if (session_) {
srtp_dealloc(session_);
}
@@ -691,6 +700,7 @@ bool SrtpSession::SetKey(int type, const std::string& cs,
int err = srtp_create(&session_, &policy);
if (err != err_status_ok) {
+ session_ = NULL;
LOG(LS_ERROR) << "Failed to create SRTP session, err=" << err;
return false;
}
@@ -702,6 +712,8 @@ bool SrtpSession::SetKey(int type, const std::string& cs,
}
bool SrtpSession::Init() {
+ rtc::GlobalLockScope ls(&lock_);
+
if (!inited_) {
int err;
err = srtp_init();
@@ -729,6 +741,8 @@ bool SrtpSession::Init() {
}
void SrtpSession::Terminate() {
+ rtc::GlobalLockScope ls(&lock_);
+
if (inited_) {
int err = srtp_shutdown();
if (err) {
@@ -760,6 +774,8 @@ void SrtpSession::HandleEvent(const srtp_event_data_t* ev) {
}
void SrtpSession::HandleEventThunk(srtp_event_data_t* ev) {
+ rtc::GlobalLockScope ls(&lock_);
+
for (std::list<SrtpSession*>::iterator it = sessions()->begin();
it != sessions()->end(); ++it) {
if ((*it)->session_ == ev->session) {
@@ -770,7 +786,7 @@ void SrtpSession::HandleEventThunk(srtp_event_data_t* ev) {
}
std::list<SrtpSession*>* SrtpSession::sessions() {
- LIBJINGLE_DEFINE_STATIC_LOCAL(std::list<SrtpSession*>, sessions, ());
+ RTC_DEFINE_STATIC_LOCAL(std::list<SrtpSession*>, sessions, ());
return &sessions;
}
diff --git a/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h b/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h
index 43cb241b756..f171f5fe660 100644
--- a/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h
+++ b/chromium/third_party/libjingle/source/talk/session/media/srtpfilter.h
@@ -36,6 +36,7 @@
#include "talk/media/base/cryptoparams.h"
#include "webrtc/p2p/base/sessiondescription.h"
#include "webrtc/base/basictypes.h"
+#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/sigslotrepeater.h"
@@ -242,6 +243,7 @@ class SrtpSession {
int rtcp_auth_tag_len_;
rtc::scoped_ptr<SrtpStat> srtp_stat_;
static bool inited_;
+ static rtc::GlobalLockPod lock_;
int last_send_seq_num_;
DISALLOW_COPY_AND_ASSIGN(SrtpSession);
};