summaryrefslogtreecommitdiff
path: root/chromium/media/base
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2019-02-13 15:05:36 +0100
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2019-02-14 10:33:47 +0000
commite684a3455bcc29a6e3e66a004e352dea4e1141e7 (patch)
treed55b4003bde34d7d05f558f02cfd82b2a66a7aac /chromium/media/base
parent2b94bfe47ccb6c08047959d1c26e392919550e86 (diff)
downloadqtwebengine-chromium-e684a3455bcc29a6e3e66a004e352dea4e1141e7.tar.gz
BASELINE: Update Chromium to 72.0.3626.110 and Ninja to 1.9.0
Change-Id: Ic57220b00ecc929a893c91f5cc552f5d3e99e922 Reviewed-by: Michael BrĂ¼ning <michael.bruning@qt.io>
Diffstat (limited to 'chromium/media/base')
-rw-r--r--chromium/media/base/BUILD.gn2
-rw-r--r--chromium/media/base/android/BUILD.gn1
-rw-r--r--chromium/media/base/android/media_codec_bridge_impl.cc5
-rw-r--r--chromium/media/base/android/media_codec_util.cc4
-rw-r--r--chromium/media/base/android/media_drm_bridge.cc45
-rw-r--r--chromium/media/base/android/media_drm_bridge.h20
-rw-r--r--chromium/media/base/android/media_drm_bridge_client.h2
-rw-r--r--chromium/media/base/android/media_drm_key_type.h25
-rw-r--r--chromium/media/base/android/media_drm_storage.cc7
-rw-r--r--chromium/media/base/android/media_drm_storage.h6
-rw-r--r--chromium/media/base/android/media_drm_storage_bridge.cc26
-rw-r--r--chromium/media/base/audio_buffer.cc5
-rw-r--r--chromium/media/base/audio_buffer.h5
-rw-r--r--chromium/media/base/audio_buffer_unittest.cc24
-rw-r--r--chromium/media/base/audio_latency.cc87
-rw-r--r--chromium/media/base/audio_latency.h13
-rw-r--r--chromium/media/base/audio_latency_unittest.cc199
-rw-r--r--chromium/media/base/audio_parameters.cc26
-rw-r--r--chromium/media/base/audio_parameters.h32
-rw-r--r--chromium/media/base/audio_renderer_mixer.cc46
-rw-r--r--chromium/media/base/audio_renderer_mixer.h53
-rw-r--r--chromium/media/base/audio_renderer_mixer_input.cc161
-rw-r--r--chromium/media/base/audio_renderer_mixer_input.h36
-rw-r--r--chromium/media/base/audio_renderer_mixer_input_unittest.cc120
-rw-r--r--chromium/media/base/audio_renderer_mixer_pool.h27
-rw-r--r--chromium/media/base/audio_renderer_mixer_unittest.cc31
-rw-r--r--chromium/media/base/audio_renderer_sink.h27
-rw-r--r--chromium/media/base/audio_shifter_unittest.cc4
-rw-r--r--chromium/media/base/callback_registry.h5
-rw-r--r--chromium/media/base/cdm_promise_adapter.cc7
-rw-r--r--chromium/media/base/decode_capabilities.cc3
-rw-r--r--chromium/media/base/decoder_buffer.cc5
-rw-r--r--chromium/media/base/decoder_buffer.h4
-rw-r--r--chromium/media/base/decrypt_config.cc7
-rw-r--r--chromium/media/base/decrypt_config.h6
-rw-r--r--chromium/media/base/demuxer_memory_limit_android.cc2
-rw-r--r--chromium/media/base/eme_constants.h28
-rw-r--r--chromium/media/base/fake_audio_renderer_sink.cc7
-rw-r--r--chromium/media/base/fake_audio_renderer_sink.h1
-rw-r--r--chromium/media/base/fake_audio_worker.cc3
-rw-r--r--chromium/media/base/fake_demuxer_stream.cc2
-rw-r--r--chromium/media/base/fallback_video_decoder.cc10
-rw-r--r--chromium/media/base/fallback_video_decoder_unittest.cc3
-rw-r--r--chromium/media/base/ipc/media_param_traits.cc52
-rw-r--r--chromium/media/base/ipc/media_param_traits.h10
-rw-r--r--chromium/media/base/ipc/media_param_traits_macros.h13
-rw-r--r--chromium/media/base/key_systems.cc218
-rw-r--r--chromium/media/base/key_systems.h16
-rw-r--r--chromium/media/base/key_systems_unittest.cc12
-rw-r--r--chromium/media/base/limits.h4
-rw-r--r--chromium/media/base/media_client.h2
-rw-r--r--chromium/media/base/media_export.h24
-rw-r--r--chromium/media/base/media_log.cc8
-rw-r--r--chromium/media/base/media_log.h7
-rw-r--r--chromium/media/base/media_switches.cc35
-rw-r--r--chromium/media/base/media_switches.h4
-rw-r--r--chromium/media/base/media_util.cc40
-rw-r--r--chromium/media/base/media_util.h5
-rw-r--r--chromium/media/base/mime_util_internal.cc126
-rw-r--r--chromium/media/base/mock_audio_renderer_sink.cc16
-rw-r--r--chromium/media/base/mock_audio_renderer_sink.h3
-rw-r--r--chromium/media/base/output_device_info.h2
-rw-r--r--chromium/media/base/pipeline_impl.cc22
-rw-r--r--chromium/media/base/renderer.h2
-rw-r--r--chromium/media/base/scoped_async_trace.cc32
-rw-r--r--chromium/media/base/scoped_async_trace.h43
-rw-r--r--chromium/media/base/silent_sink_suspender.cc12
-rw-r--r--chromium/media/base/silent_sink_suspender.h7
-rw-r--r--chromium/media/base/silent_sink_suspender_unittest.cc14
-rw-r--r--chromium/media/base/test_data_util.cc148
-rw-r--r--chromium/media/base/test_data_util.h3
-rw-r--r--chromium/media/base/test_helpers.cc45
-rw-r--r--chromium/media/base/test_helpers.h3
-rw-r--r--chromium/media/base/unaligned_shared_memory.cc2
-rw-r--r--chromium/media/base/user_input_monitor_unittest.cc21
-rw-r--r--chromium/media/base/video_codecs.cc2
-rw-r--r--chromium/media/base/video_color_space.cc4
-rw-r--r--chromium/media/base/video_decoder.cc33
-rw-r--r--chromium/media/base/video_decoder.h7
-rw-r--r--chromium/media/base/video_decoder_config.cc27
-rw-r--r--chromium/media/base/video_decoder_config.h13
-rw-r--r--chromium/media/base/video_decoder_config_unittest.cc38
-rw-r--r--chromium/media/base/video_frame.cc308
-rw-r--r--chromium/media/base/video_frame.h26
-rw-r--r--chromium/media/base/video_frame_layout.cc138
-rw-r--r--chromium/media/base/video_frame_layout.h97
-rw-r--r--chromium/media/base/video_frame_layout_unittest.cc237
-rw-r--r--chromium/media/base/video_frame_metadata.h10
-rw-r--r--chromium/media/base/video_frame_pool.cc10
-rw-r--r--chromium/media/base/video_frame_unittest.cc12
-rw-r--r--chromium/media/base/video_rotation.h2
-rw-r--r--chromium/media/base/video_thumbnail_decoder_unittest.cc2
-rw-r--r--chromium/media/base/video_types.cc10
-rw-r--r--chromium/media/base/video_types.h23
-rw-r--r--chromium/media/base/wall_clock_time_source.cc6
-rw-r--r--chromium/media/base/wall_clock_time_source.h17
-rw-r--r--chromium/media/base/wall_clock_time_source_unittest.cc2
97 files changed, 2007 insertions, 1100 deletions
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index 2fa1fa5a5da..7afd69d42d8 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -223,6 +223,8 @@ jumbo_source_set("base") {
"routing_token_callback.h",
"sample_rates.cc",
"sample_rates.h",
+ "scoped_async_trace.cc",
+ "scoped_async_trace.h",
"seekable_buffer.cc",
"seekable_buffer.h",
"serial_runner.cc",
diff --git a/chromium/media/base/android/BUILD.gn b/chromium/media/base/android/BUILD.gn
index dfed9a50b07..56ea8d55c3e 100644
--- a/chromium/media/base/android/BUILD.gn
+++ b/chromium/media/base/android/BUILD.gn
@@ -47,6 +47,7 @@ if (is_android) {
"media_drm_bridge_delegate.h",
"media_drm_bridge_factory.cc",
"media_drm_bridge_factory.h",
+ "media_drm_key_type.h",
"media_drm_storage.cc",
"media_drm_storage.h",
"media_drm_storage_bridge.cc",
diff --git a/chromium/media/base/android/media_codec_bridge_impl.cc b/chromium/media/base/android/media_codec_bridge_impl.cc
index dbc41b707b7..24858d7f566 100644
--- a/chromium/media/base/android/media_codec_bridge_impl.cc
+++ b/chromium/media/base/android/media_codec_bridge_impl.cc
@@ -127,8 +127,9 @@ bool GetCodecSpecificDataForAudio(AudioCodec codec,
if (profile == 5 || profile == 29) {
// Read extension config.
- RETURN_ON_ERROR(reader.ReadBits(4, &frequency_index));
- if (frequency_index == 0xf)
+ uint8_t ext_frequency_index = 0;
+ RETURN_ON_ERROR(reader.ReadBits(4, &ext_frequency_index));
+ if (ext_frequency_index == 0xf)
RETURN_ON_ERROR(reader.SkipBits(24));
RETURN_ON_ERROR(reader.ReadBits(5, &profile));
}
diff --git a/chromium/media/base/android/media_codec_util.cc b/chromium/media/base/android/media_codec_util.cc
index 41572909005..51f30953c19 100644
--- a/chromium/media/base/android/media_codec_util.cc
+++ b/chromium/media/base/android/media_codec_util.cc
@@ -222,9 +222,9 @@ std::set<int> MediaCodecUtil::GetEncoderColorFormats(
ScopedJavaLocalRef<jintArray> j_color_format_array =
Java_MediaCodecUtil_getEncoderColorFormatsForMime(env, j_mime);
- if (j_color_format_array.obj()) {
+ if (!j_color_format_array.is_null()) {
std::vector<int> formats;
- JavaIntArrayToIntVector(env, j_color_format_array.obj(), &formats);
+ JavaIntArrayToIntVector(env, j_color_format_array, &formats);
color_formats = std::set<int>(formats.begin(), formats.end());
}
diff --git a/chromium/media/base/android/media_drm_bridge.cc b/chromium/media/base/android/media_drm_bridge.cc
index ac782a4934c..c576edf83f1 100644
--- a/chromium/media/base/android/media_drm_bridge.cc
+++ b/chromium/media/base/android/media_drm_bridge.cc
@@ -24,13 +24,14 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "base/sys_byteorder.h"
-#include "base/sys_info.h"
+#include "base/system/sys_info.h"
#include "base/threading/thread_task_runner_handle.h"
#include "jni/MediaDrmBridge_jni.h"
#include "media/base/android/android_util.h"
#include "media/base/android/media_codec_util.h"
#include "media/base/android/media_drm_bridge_client.h"
#include "media/base/android/media_drm_bridge_delegate.h"
+#include "media/base/android/media_drm_key_type.h"
#include "media/base/cdm_key_information.h"
#include "media/base/media_switches.h"
#include "media/base/provision_fetcher.h"
@@ -71,14 +72,6 @@ enum class KeyStatus : uint32_t {
KEY_STATUS_INTERNAL_ERROR = 4,
};
-// These must be in sync with Android MediaDrm KEY_TYPE_XXX constants:
-// https://developer.android.com/reference/android/media/MediaDrm.html#KEY_TYPE_OFFLINE
-// KEY_TYPE_RELEASE is handled internally in Java.
-enum class KeyType : uint32_t {
- KEY_TYPE_STREAMING = 1,
- KEY_TYPE_OFFLINE = 2,
-};
-
const uint8_t kWidevineUuid[16] = {
0xED, 0xEF, 0x8B, 0xA9, 0x79, 0xD6, 0x4A, 0xCE, //
0xA3, 0xC8, 0x27, 0xDC, 0xD5, 0x1D, 0x21, 0xED};
@@ -102,18 +95,18 @@ std::string ConvertInitDataType(media::EmeInitDataType init_data_type) {
}
}
-// Convert CdmSessionType to KeyType supported by MediaDrm.
-KeyType ConvertCdmSessionType(CdmSessionType session_type) {
+// Convert CdmSessionType to MediaDrmKeyType supported by MediaDrm.
+MediaDrmKeyType ConvertCdmSessionType(CdmSessionType session_type) {
switch (session_type) {
case CdmSessionType::kTemporary:
- return KeyType::KEY_TYPE_STREAMING;
+ return MediaDrmKeyType::STREAMING;
case CdmSessionType::kPersistentLicense:
- return KeyType::KEY_TYPE_OFFLINE;
+ return MediaDrmKeyType::OFFLINE;
default:
LOG(WARNING) << "Unsupported session type "
<< static_cast<int>(session_type);
- return KeyType::KEY_TYPE_STREAMING;
+ return MediaDrmKeyType::STREAMING;
}
}
@@ -568,16 +561,6 @@ bool MediaDrmBridge::IsSecureCodecRequired() {
return true;
}
-void MediaDrmBridge::ResetDeviceCredentials(
- const ResetCredentialsCB& callback) {
- DVLOG(1) << __func__;
-
- DCHECK(!reset_credentials_cb_);
- reset_credentials_cb_ = callback;
- JNIEnv* env = AttachCurrentThread();
- Java_MediaDrmBridge_resetDeviceCredentials(env, j_media_drm_);
-}
-
void MediaDrmBridge::Unprovision() {
DVLOG(1) << __func__;
@@ -749,7 +732,7 @@ void MediaDrmBridge::OnSessionKeysChange(
ScopedJavaLocalRef<jbyteArray> j_key_id =
Java_KeyStatus_getKeyId(env, j_key_status);
std::vector<uint8_t> key_id;
- JavaByteArrayToByteVector(env, j_key_id.obj(), &key_id);
+ JavaByteArrayToByteVector(env, j_key_id, &key_id);
DCHECK(!key_id.empty());
jint j_status_code = Java_KeyStatus_getStatusCode(env, j_key_status);
@@ -803,16 +786,6 @@ void MediaDrmBridge::OnSessionExpirationUpdate(
base::Time::FromDoubleT(expiry_time_ms / 1000.0)));
}
-void MediaDrmBridge::OnResetDeviceCredentialsCompleted(
- JNIEnv* env,
- const JavaParamRef<jobject>&,
- bool success) {
- DVLOG(2) << __func__ << ": success:" << success;
- DCHECK(reset_credentials_cb_);
- task_runner_->PostTask(
- FROM_HERE, base::BindOnce(std::move(reset_credentials_cb_), success));
-}
-
//------------------------------------------------------------------------------
// The following are private methods.
@@ -859,7 +832,7 @@ MediaDrmBridge::MediaDrmBridge(
base::android::BuildInfo::GetInstance()->sdk_int() >=
base::android::SDK_VERSION_MARSHMALLOW &&
// origin id can be empty when MediaDrmBridge is created by
- // CreateWithoutSessionSupport, which is used to reset credentials.
+ // CreateWithoutSessionSupport, which is used for unprovisioning.
!origin_id.empty();
ScopedJavaLocalRef<jstring> j_security_origin = ConvertUTF8ToJavaString(
diff --git a/chromium/media/base/android/media_drm_bridge.h b/chromium/media/base/android/media_drm_bridge.h
index 20de8f7e738..6ee3b9861a6 100644
--- a/chromium/media/base/android/media_drm_bridge.h
+++ b/chromium/media/base/android/media_drm_bridge.h
@@ -58,7 +58,6 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
SECURITY_LEVEL_3 = 3,
};
- using ResetCredentialsCB = base::Callback<void(bool)>;
using MediaCryptoReadyCB = MediaCryptoContext::MediaCryptoReadyCB;
// Checks whether MediaDRM is available and usable, including for decoding.
@@ -143,11 +142,6 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
// video playback.
bool IsSecureCodecRequired();
- // Reset the device credentials. MediaDrmBridge must be created without
- // session support.
- // TODO(xhwang): Unify Unprovision() and ResetDeviceCredentials().
- void ResetDeviceCredentials(const ResetCredentialsCB& callback);
-
// Helper functions to resolve promises.
void ResolvePromise(uint32_t promise_id);
void ResolvePromiseWithSession(uint32_t promise_id,
@@ -231,12 +225,6 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
const base::android::JavaParamRef<jbyteArray>& j_session_id,
jlong expiry_time_ms);
- // Called by the java object when credential reset is completed.
- void OnResetDeviceCredentialsCompleted(
- JNIEnv* env,
- const base::android::JavaParamRef<jobject>&,
- bool success);
-
private:
friend class MediaDrmBridgeFactory;
// For DeleteSoon() in DeleteOnCorrectThread().
@@ -258,8 +246,7 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
// default security level will be used if |security_level| is
// SECURITY_LEVEL_DEFAULT.
//
- // |origin_id| is a random string that can identify an origin. It may be empty
- // when reseting device credential.
+ // |origin_id| is a random string that can identify an origin.
//
// If |requires_media_crypto| is true, MediaCrypto is expected to be created
// and notified via MediaCryptoReadyCB set in SetMediaCryptoReadyCB(). This
@@ -270,8 +257,7 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
//
// If |requires_media_crypto| is false, MediaCrypto will not be created. This
// object cannot be used for playback, but can be used to unprovision the
- // device/origin via Unprovision() and ResetDeviceCredentials(). Sessions
- // should not be created in this mode.
+ // device/origin via Unprovision(). Sessions are not created in this mode.
MediaDrmBridge(const std::vector<uint8_t>& scheme_uuid,
const std::string& origin_id,
SecurityLevel security_level,
@@ -334,8 +320,6 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
MediaCryptoReadyCB media_crypto_ready_cb_;
- ResetCredentialsCB reset_credentials_cb_;
-
PlayerTrackerImpl player_tracker_;
CdmPromiseAdapter cdm_promise_adapter_;
diff --git a/chromium/media/base/android/media_drm_bridge_client.h b/chromium/media/base/android/media_drm_bridge_client.h
index 7237f682ace..dca8161a0c5 100644
--- a/chromium/media/base/android/media_drm_bridge_client.h
+++ b/chromium/media/base/android/media_drm_bridge_client.h
@@ -25,7 +25,7 @@ class MediaDrmBridgeDelegate;
// media playback could occur.
MEDIA_EXPORT void SetMediaDrmBridgeClient(MediaDrmBridgeClient* media_client);
-#if defined(MEDIA_IMPLEMENTATION)
+#if defined(IS_MEDIA_IMPL)
// Getter for the client. Returns nullptr if no customized client is needed.
MediaDrmBridgeClient* GetMediaDrmBridgeClient();
#endif
diff --git a/chromium/media/base/android/media_drm_key_type.h b/chromium/media/base/android/media_drm_key_type.h
new file mode 100644
index 00000000000..4996085af0b
--- /dev/null
+++ b/chromium/media/base/android/media_drm_key_type.h
@@ -0,0 +1,25 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ANDROID_MEDIA_DRM_KEY_TYPE_H_
+#define MEDIA_BASE_ANDROID_MEDIA_DRM_KEY_TYPE_H_
+
+#include <stdint.h>
+
+namespace media {
+
+// These must be in sync with Android MediaDrm KEY_TYPE_XXX constants, except
+// UNKNOWN and MAX:
+// https://developer.android.com/reference/android/media/MediaDrm.html#KEY_TYPE_OFFLINE
+enum class MediaDrmKeyType : uint32_t {
+ UNKNOWN = 0,
+ MIN = UNKNOWN,
+ STREAMING = 1,
+ OFFLINE = 2,
+ RELEASE = 3,
+ MAX = RELEASE,
+};
+
+} // namespace media
+#endif // MEDIA_BASE_ANDROID_MEDIA_DRM_KEY_TYPE_H_
diff --git a/chromium/media/base/android/media_drm_storage.cc b/chromium/media/base/android/media_drm_storage.cc
index 03e129c2c8d..9f4a92b9b01 100644
--- a/chromium/media/base/android/media_drm_storage.cc
+++ b/chromium/media/base/android/media_drm_storage.cc
@@ -9,8 +9,11 @@
namespace media {
MediaDrmStorage::SessionData::SessionData(std::vector<uint8_t> key_set_id,
- std::string mime_type)
- : key_set_id(std::move(key_set_id)), mime_type(std::move(mime_type)) {}
+ std::string mime_type,
+ MediaDrmKeyType key_type)
+ : key_set_id(std::move(key_set_id)),
+ mime_type(std::move(mime_type)),
+ key_type(key_type) {}
MediaDrmStorage::SessionData::SessionData(const SessionData& other) = default;
diff --git a/chromium/media/base/android/media_drm_storage.h b/chromium/media/base/android/media_drm_storage.h
index 120934c7e63..6a831e70f05 100644
--- a/chromium/media/base/android/media_drm_storage.h
+++ b/chromium/media/base/android/media_drm_storage.h
@@ -14,6 +14,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
+#include "media/base/android/media_drm_key_type.h"
#include "media/base/media_export.h"
#include "url/origin.h"
@@ -29,12 +30,15 @@ class MEDIA_EXPORT MediaDrmStorage
: public base::SupportsWeakPtr<MediaDrmStorage> {
public:
struct SessionData {
- SessionData(std::vector<uint8_t> key_set_id, std::string mime_type);
+ SessionData(std::vector<uint8_t> key_set_id,
+ std::string mime_type,
+ MediaDrmKeyType key_type);
SessionData(const SessionData& other);
~SessionData();
std::vector<uint8_t> key_set_id;
std::string mime_type;
+ MediaDrmKeyType key_type;
};
MediaDrmStorage();
diff --git a/chromium/media/base/android/media_drm_storage_bridge.cc b/chromium/media/base/android/media_drm_storage_bridge.cc
index de6ed515101..611a6347490 100644
--- a/chromium/media/base/android/media_drm_storage_bridge.cc
+++ b/chromium/media/base/android/media_drm_storage_bridge.cc
@@ -18,6 +18,7 @@
#include "base/unguessable_token.h"
#include "jni/MediaDrmStorageBridge_jni.h"
#include "media/base/android/android_util.h"
+#include "media/base/android/media_drm_key_type.h"
using base::android::AttachCurrentThread;
using base::android::ConvertUTF8ToJavaString;
@@ -92,22 +93,30 @@ void MediaDrmStorageBridge::OnSaveInfo(
DCHECK(impl_);
std::vector<uint8_t> key_set_id;
JavaByteArrayToByteVector(
- env, Java_PersistentInfo_keySetId(env, j_persist_info).obj(),
- &key_set_id);
+ env, Java_PersistentInfo_keySetId(env, j_persist_info), &key_set_id);
std::string mime = ConvertJavaStringToUTF8(
env, Java_PersistentInfo_mimeType(env, j_persist_info));
std::string session_id;
- JavaByteArrayToString(
- env, Java_PersistentInfo_emeId(env, j_persist_info).obj(), &session_id);
+ JavaByteArrayToString(env, Java_PersistentInfo_emeId(env, j_persist_info),
+ &session_id);
+
+ // This function should only be called for licenses needs persistent storage
+ // (e.g. persistent license). STREAMING license doesn't require persistent
+ // storage support.
+ auto key_type = static_cast<MediaDrmKeyType>(
+ Java_PersistentInfo_keyType(env, j_persist_info));
+ DCHECK(key_type == MediaDrmKeyType::OFFLINE ||
+ key_type == MediaDrmKeyType::RELEASE);
task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&MediaDrmStorage::SavePersistentSession, impl_->AsWeakPtr(),
session_id,
- MediaDrmStorage::SessionData(std::move(key_set_id), std::move(mime)),
+ MediaDrmStorage::SessionData(std::move(key_set_id), std::move(mime),
+ key_type),
base::BindOnce(&MediaDrmStorageBridge::RunAndroidBoolCallback,
weak_factory_.GetWeakPtr(),
base::Passed(CreateJavaObjectPtr(j_callback.obj())))));
@@ -164,9 +173,10 @@ void MediaDrmStorageBridge::OnSessionDataLoaded(
ScopedJavaLocalRef<jstring> j_mime =
ConvertUTF8ToJavaString(env, session_data->mime_type);
- RunObjectCallbackAndroid(
- *j_callback,
- Java_PersistentInfo_create(env, j_eme_id, j_key_set_id, j_mime));
+ RunObjectCallbackAndroid(*j_callback,
+ Java_PersistentInfo_create(
+ env, j_eme_id, j_key_set_id, j_mime,
+ static_cast<uint32_t>(session_data->key_type)));
}
} // namespace media
diff --git a/chromium/media/base/audio_buffer.cc b/chromium/media/base/audio_buffer.cc
index cf94d82a14f..17adcb578eb 100644
--- a/chromium/media/base/audio_buffer.cc
+++ b/chromium/media/base/audio_buffer.cc
@@ -22,6 +22,11 @@ static base::TimeDelta CalculateDuration(int frames, double sample_rate) {
AudioBufferMemoryPool::AudioBufferMemoryPool() = default;
AudioBufferMemoryPool::~AudioBufferMemoryPool() = default;
+size_t AudioBufferMemoryPool::GetPoolSizeForTesting() {
+ base::AutoLock al(entry_lock_);
+ return entries_.size();
+}
+
AudioBufferMemoryPool::AudioMemory AudioBufferMemoryPool::CreateBuffer(
size_t size) {
base::AutoLock al(entry_lock_);
diff --git a/chromium/media/base/audio_buffer.h b/chromium/media/base/audio_buffer.h
index 05034668594..9059bf99990 100644
--- a/chromium/media/base/audio_buffer.h
+++ b/chromium/media/base/audio_buffer.h
@@ -17,6 +17,7 @@
#include "base/memory/aligned_memory.h"
#include "base/memory/ref_counted.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "base/time/time.h"
#include "media/base/channel_layout.h"
#include "media/base/media_export.h"
@@ -240,7 +241,7 @@ class MEDIA_EXPORT AudioBufferMemoryPool
public:
AudioBufferMemoryPool();
- size_t get_pool_size_for_testing() const { return entries_.size(); }
+ size_t GetPoolSizeForTesting();
private:
friend class AudioBuffer;
@@ -254,7 +255,7 @@ class MEDIA_EXPORT AudioBufferMemoryPool
base::Lock entry_lock_;
using MemoryEntry = std::pair<AudioMemory, size_t>;
- std::list<MemoryEntry> entries_;
+ std::list<MemoryEntry> entries_ GUARDED_BY(entry_lock_);
DISALLOW_COPY_AND_ASSIGN(AudioBufferMemoryPool);
};
diff --git a/chromium/media/base/audio_buffer_unittest.cc b/chromium/media/base/audio_buffer_unittest.cc
index d423e357d38..31b5f4d0470 100644
--- a/chromium/media/base/audio_buffer_unittest.cc
+++ b/chromium/media/base/audio_buffer_unittest.cc
@@ -530,7 +530,7 @@ TEST(AudioBufferTest, TrimRangeInterleaved) {
TEST(AudioBufferTest, AudioBufferMemoryPool) {
scoped_refptr<AudioBufferMemoryPool> pool(new AudioBufferMemoryPool());
- EXPECT_EQ(0u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(0u, pool->GetPoolSizeForTesting());
const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_MONO;
scoped_refptr<AudioBuffer> buffer = MakeAudioBuffer<uint8_t>(
@@ -543,29 +543,29 @@ TEST(AudioBufferTest, AudioBufferMemoryPool) {
kSampleFormatU8, buffer->channel_layout(), buffer->channel_count(),
buffer->sample_rate(), buffer->frame_count(), &buffer->channel_data()[0],
buffer->timestamp(), pool);
- EXPECT_EQ(0u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(0u, pool->GetPoolSizeForTesting());
b1 = nullptr;
- EXPECT_EQ(1u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(1u, pool->GetPoolSizeForTesting());
// Even (especially) when used with CreateBuffer.
b1 = AudioBuffer::CreateBuffer(kSampleFormatU8, buffer->channel_layout(),
buffer->channel_count(), buffer->sample_rate(),
buffer->frame_count(), pool);
- EXPECT_EQ(0u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(0u, pool->GetPoolSizeForTesting());
scoped_refptr<AudioBuffer> b2 = AudioBuffer::CreateBuffer(
kSampleFormatU8, buffer->channel_layout(), buffer->channel_count(),
buffer->sample_rate(), buffer->frame_count(), pool);
- EXPECT_EQ(0u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(0u, pool->GetPoolSizeForTesting());
b2 = nullptr;
- EXPECT_EQ(1u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(1u, pool->GetPoolSizeForTesting());
b1 = nullptr;
- EXPECT_EQ(2u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(2u, pool->GetPoolSizeForTesting());
// A buffer of a different size should not reuse the buffer and drain pool.
b2 = AudioBuffer::CreateBuffer(kSampleFormatU8, buffer->channel_layout(),
buffer->channel_count(), buffer->sample_rate(),
buffer->frame_count() / 2, pool);
- EXPECT_EQ(0u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(0u, pool->GetPoolSizeForTesting());
// Mark pool for destruction and ensure buffer is still valid.
pool = nullptr;
@@ -578,7 +578,7 @@ TEST(AudioBufferTest, AudioBufferMemoryPool) {
// Planar allocations use a different path, so make sure pool is used.
TEST(AudioBufferTest, AudioBufferMemoryPoolPlanar) {
scoped_refptr<AudioBufferMemoryPool> pool(new AudioBufferMemoryPool());
- EXPECT_EQ(0u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(0u, pool->GetPoolSizeForTesting());
const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_MONO;
scoped_refptr<AudioBuffer> buffer = MakeAudioBuffer<uint8_t>(
@@ -591,15 +591,15 @@ TEST(AudioBufferTest, AudioBufferMemoryPoolPlanar) {
kSampleFormatPlanarF32, buffer->channel_layout(), buffer->channel_count(),
buffer->sample_rate(), buffer->frame_count(), &buffer->channel_data()[0],
buffer->timestamp(), pool);
- EXPECT_EQ(0u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(0u, pool->GetPoolSizeForTesting());
b1 = nullptr;
- EXPECT_EQ(1u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(1u, pool->GetPoolSizeForTesting());
// Even (especially) when used with CreateBuffer.
b1 = AudioBuffer::CreateBuffer(kSampleFormatU8, buffer->channel_layout(),
buffer->channel_count(), buffer->sample_rate(),
buffer->frame_count(), pool);
- EXPECT_EQ(0u, pool->get_pool_size_for_testing());
+ EXPECT_EQ(0u, pool->GetPoolSizeForTesting());
// Mark pool for destruction and ensure buffer is still valid.
pool = nullptr;
diff --git a/chromium/media/base/audio_latency.cc b/chromium/media/base/audio_latency.cc
index 5816ad071ba..a2577071bf6 100644
--- a/chromium/media/base/audio_latency.cc
+++ b/chromium/media/base/audio_latency.cc
@@ -141,53 +141,56 @@ int AudioLatency::GetInteractiveBufferSize(int hardware_buffer_size) {
int AudioLatency::GetExactBufferSize(base::TimeDelta duration,
int sample_rate,
- int hardware_buffer_size) {
+ int hardware_buffer_size,
+ int min_hardware_buffer_size,
+ int max_hardware_buffer_size) {
DCHECK_NE(0, hardware_buffer_size);
+ DCHECK_GE(hardware_buffer_size, min_hardware_buffer_size);
+ DCHECK_GE(max_hardware_buffer_size, min_hardware_buffer_size);
+ DCHECK(max_hardware_buffer_size == 0 ||
+ hardware_buffer_size <= max_hardware_buffer_size);
+ DCHECK(max_hardware_buffer_size == 0 ||
+ max_hardware_buffer_size <= limits::kMaxWebAudioBufferSize);
- const int requested_buffer_size = duration.InSecondsF() * sample_rate;
+ int requested_buffer_size = std::round(duration.InSecondsF() * sample_rate);
-// On OSX and CRAS the preferred buffer size is larger than the minimum,
-// however we allow values down to the minimum if requested explicitly.
-#if defined(OS_MACOSX)
- const int minimum_buffer_size =
- GetMinAudioBufferSizeMacOS(limits::kMinAudioBufferSize, sample_rate);
- if (requested_buffer_size > limits::kMaxAudioBufferSize) {
- // Mac OS is currently the only platform with a max buffer size less than
- // kMaxWebAudioBufferSize. Since Mac OS audio hardware can run at
- // kMaxAudioBufferSize (currently 4096) and it only makes sense for Web
- // Audio to run at multiples of the hardware buffer size, tell Web Audio to
- // just use web audio max (8192) if the user requests >4096.
- static_assert(
- limits::kMaxWebAudioBufferSize % limits::kMaxAudioBufferSize == 0,
- "Returning kMaxWebAudioBufferSize here assumes it's a multiple of the "
- "hardware buffer size.");
- return limits::kMaxWebAudioBufferSize;
- }
-#elif defined(USE_CRAS)
- const int minimum_buffer_size = limits::kMinAudioBufferSize;
- static_assert(limits::kMaxAudioBufferSize >= limits::kMaxWebAudioBufferSize,
- "Algorithm needs refactoring if kMaxAudioBufferSize for CRAS "
- "is lowered.");
+ if (min_hardware_buffer_size &&
+ requested_buffer_size <= min_hardware_buffer_size)
+ return min_hardware_buffer_size;
+
+ if (requested_buffer_size <= hardware_buffer_size)
+ return hardware_buffer_size;
+
+#if defined(OS_WIN)
+ // On Windows we allow either exactly the minimum buffer size (using
+ // IAudioClient3) or multiples of the default buffer size using the previous
+ // IAudioClient API.
+ const int multiplier = hardware_buffer_size;
#else
- const int minimum_buffer_size = hardware_buffer_size;
+ const int multiplier = min_hardware_buffer_size > 0 ? min_hardware_buffer_size
+ : hardware_buffer_size;
#endif
- // Round requested size up to next multiple of the minimum hardware size. The
- // minimum hardware size is one that we know is allowed by the platform audio
- // layer and may be smaller than its preferred buffer size (the
- // hardware_buffer_size). For platforms where this is supported we know that
- // using a buffer size that is a multiple of this minimum is safe.
- const int buffer_size = std::ceil(std::max(requested_buffer_size, 1) /
- static_cast<double>(minimum_buffer_size)) *
- minimum_buffer_size;
-
- // The maximum must also be a multiple of the minimum hardware buffer size in
- // case the clamping below is required.
- const int maximum_buffer_size =
- (limits::kMaxWebAudioBufferSize / minimum_buffer_size) *
- minimum_buffer_size;
-
- return std::min(maximum_buffer_size,
- std::max(buffer_size, minimum_buffer_size));
+ int buffer_size =
+ std::ceil(requested_buffer_size / static_cast<double>(multiplier)) *
+ multiplier;
+
+ // If the user is requesting a buffer size >= max_hardware_buffer_size then we
+ // want the hardware to run at this max and then only return sizes that are
+ // multiples of this here so that we don't end up with Web Audio running with
+ // a period that's misaligned with the hardware one.
+ if (max_hardware_buffer_size && buffer_size >= max_hardware_buffer_size) {
+ buffer_size = std::ceil(requested_buffer_size /
+ static_cast<double>(max_hardware_buffer_size)) *
+ max_hardware_buffer_size;
+ }
+
+ const int platform_max_buffer_size =
+ max_hardware_buffer_size
+ ? (limits::kMaxWebAudioBufferSize / max_hardware_buffer_size) *
+ max_hardware_buffer_size
+ : (limits::kMaxWebAudioBufferSize / multiplier) * multiplier;
+
+ return std::min(buffer_size, platform_max_buffer_size);
}
} // namespace media
diff --git a/chromium/media/base/audio_latency.h b/chromium/media/base/audio_latency.h
index 5ffa90eea97..fe0bc59ba55 100644
--- a/chromium/media/base/audio_latency.h
+++ b/chromium/media/base/audio_latency.h
@@ -43,9 +43,20 @@ class MEDIA_SHMEM_EXPORT AudioLatency {
static int GetInteractiveBufferSize(int hardware_buffer_size);
+ // Return the closest buffer size for this platform that will result in a
+ // latency not less than |duration| for the given |sample_rate|. The returned
+ // buffer size must be >= |min_hardware_buffer_size| and must be <=
+ // |kMaxWebAudioBufferSize|. |max_hardware_buffer_size| is used to help
+ // determine a buffer size that won't cause web audio and the hardware to run
+ // at unsynchronized buffer sizes (e.g. hardware running at 4096 and web audio
+ // running at 4224). |hardware_buffer_size| is the platform's preferred buffer
+ // size. It is valid for both the min and max to be zero in which case only
+ // |hardware_buffer_size| and multiples of it will be used.
static int GetExactBufferSize(base::TimeDelta duration,
int sample_rate,
- int hardware_buffer_size);
+ int hardware_buffer_size,
+ int min_hardware_buffer_size,
+ int max_hardware_buffer_size);
};
} // namespace media
diff --git a/chromium/media/base/audio_latency_unittest.cc b/chromium/media/base/audio_latency_unittest.cc
index e638958fef5..5b3e6812c1e 100644
--- a/chromium/media/base/audio_latency_unittest.cc
+++ b/chromium/media/base/audio_latency_unittest.cc
@@ -14,6 +14,116 @@
namespace media {
+// Tuple of <sample rate, hardware buffer size, min buffer size, max buffer
+// size>.
+using AudioLatencyTestData = std::tuple<int, int, int, int>;
+
+class AudioLatencyTest : public testing::TestWithParam<AudioLatencyTestData> {
+ public:
+ AudioLatencyTest() = default;
+ ~AudioLatencyTest() override = default;
+
+ void TestExactBufferSizes() {
+ const int hardware_sample_rate = std::get<0>(GetParam());
+ const int hardware_buffer_size = std::get<1>(GetParam());
+ const int min_buffer_size = std::get<2>(GetParam());
+ const int max_buffer_size = std::get<3>(GetParam());
+
+ const int platform_min_buffer_size =
+ min_buffer_size ? min_buffer_size : hardware_buffer_size;
+
+// Windows 10 may allow exactly the minimum buffer size using the IAudioClient3
+// API but any other buffer size must be a multiple of the hardware_buffer_size
+// and not the min_buffer_size.
+#if defined(OS_WIN)
+ const int multiplier = hardware_buffer_size;
+#else
+ const int multiplier = platform_min_buffer_size;
+#endif
+
+ const int platform_max_buffer_size =
+ max_buffer_size
+ ? (limits::kMaxWebAudioBufferSize / max_buffer_size) *
+ max_buffer_size
+ : (limits::kMaxWebAudioBufferSize / multiplier) * multiplier;
+
+ EXPECT_EQ(platform_min_buffer_size,
+ media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(0.0), hardware_sample_rate,
+ hardware_buffer_size, min_buffer_size, max_buffer_size));
+ EXPECT_EQ(
+ platform_min_buffer_size,
+ media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(
+ min_buffer_size / static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size));
+ EXPECT_EQ(
+ multiplier * 2,
+ media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(
+ (multiplier * 2) / static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size));
+ EXPECT_EQ(
+ multiplier * 2,
+ media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(
+ (multiplier * 1.1) / static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size));
+ EXPECT_EQ(platform_max_buffer_size,
+ media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(10.0), hardware_sample_rate,
+ hardware_buffer_size, min_buffer_size, max_buffer_size));
+ if (max_buffer_size) {
+ EXPECT_EQ(
+ max_buffer_size,
+ media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(
+ max_buffer_size / static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size));
+ }
+
+#if defined(OS_WIN)
+ if (min_buffer_size && min_buffer_size < hardware_buffer_size) {
+ EXPECT_EQ(hardware_buffer_size,
+ media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(
+ (min_buffer_size * 1.1) /
+ static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size));
+ }
+#elif defined(OS_MACOSX)
+ EXPECT_EQ(limits::kMaxWebAudioBufferSize,
+ media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(
+ (limits::kMaxAudioBufferSize * 1.1) /
+ static_cast<double>(hardware_sample_rate)),
+ hardware_sample_rate, hardware_buffer_size, min_buffer_size,
+ max_buffer_size));
+#endif
+
+ int previous_buffer_size = 0;
+ for (int i = 0; i < 1000; i++) {
+ int buffer_size = media::AudioLatency::GetExactBufferSize(
+ base::TimeDelta::FromSecondsD(i / 1000.0), hardware_sample_rate,
+ hardware_buffer_size, min_buffer_size, max_buffer_size);
+ EXPECT_GE(buffer_size, previous_buffer_size);
+#if defined(OS_WIN)
+ EXPECT_TRUE(buffer_size == min_buffer_size ||
+ buffer_size % multiplier == 0 ||
+ buffer_size % max_buffer_size == 0);
+#else
+ EXPECT_EQ(buffer_size, buffer_size / multiplier * multiplier);
+#endif
+ previous_buffer_size = buffer_size;
+ }
+ }
+};
+
// TODO(olka): extend unit tests, use real-world sample rates.
TEST(AudioLatency, HighLatencyBufferSizes) {
@@ -47,68 +157,35 @@ TEST(AudioLatency, RtcBufferSizes) {
}
}
-TEST(AudioLatency, ExactBufferSizes) {
- const int hardware_buffer_size = 256;
- const int hardware_sample_rate = 44100;
- const int max_webaudio_buffer_size = 8192;
+TEST_P(AudioLatencyTest, ExactBufferSizes) {
+ TestExactBufferSizes();
+}
-#if defined(OS_MACOSX) || defined(USE_CRAS)
- const int minimum_buffer_size = limits::kMinAudioBufferSize;
+INSTANTIATE_TEST_CASE_P(
+ /* no prefix */,
+ AudioLatencyTest,
+#if defined(OS_WIN)
+ // Windows 10 with supported driver will have valid min and max buffer sizes
+ // whereas older Windows will have zeros. The specific min, max and hardware
+ // are device-dependent.
+ testing::Values(std::make_tuple(44100, 440, 128, 440),
+ std::make_tuple(44100, 440, 440, 440),
+ std::make_tuple(44100, 440, 440, 880),
+ std::make_tuple(44100, 440, 440, 4400),
+ std::make_tuple(44100, 440, 128, 4196),
+ std::make_tuple(44100, 440, 440, 4196),
+ std::make_tuple(44100, 440, 0, 0),
+ std::make_tuple(44100, 256, 128, 512),
+ std::make_tuple(44100, 256, 0, 0))
+#elif defined(OS_MACOSX) || defined(USE_CRAS)
+ // These values are constant on Mac and ChromeOS, regardless of device.
+ testing::Values(std::make_tuple(44100,
+ 256,
+ limits::kMinAudioBufferSize,
+ limits::kMaxAudioBufferSize))
#else
- const int minimum_buffer_size = hardware_buffer_size;
-#endif
-
- EXPECT_EQ(minimum_buffer_size,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(0.0), hardware_sample_rate,
- hardware_buffer_size));
- EXPECT_EQ(
- minimum_buffer_size,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- minimum_buffer_size / static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size));
- EXPECT_EQ(minimum_buffer_size * 2,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- (minimum_buffer_size * 2) /
- static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size));
- EXPECT_EQ(minimum_buffer_size * 2,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- (minimum_buffer_size * 1.1) /
- static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size));
- EXPECT_EQ(max_webaudio_buffer_size,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(10.0), hardware_sample_rate,
- hardware_buffer_size));
-
-#if defined(OS_MACOSX)
- EXPECT_EQ(limits::kMaxAudioBufferSize,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- limits::kMaxAudioBufferSize /
- static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size));
- EXPECT_EQ(max_webaudio_buffer_size,
- media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(
- (limits::kMaxAudioBufferSize * 1.1) /
- static_cast<double>(hardware_sample_rate)),
- hardware_sample_rate, hardware_buffer_size));
+ testing::Values(std::make_tuple(44100, 256, 0, 0),
+ std::make_tuple(44100, 440, 0, 0))
#endif
-
- int previous_buffer_size = 0;
- for (int i = 0; i < 1000; i++) {
- int buffer_size = media::AudioLatency::GetExactBufferSize(
- base::TimeDelta::FromSecondsD(i / 1000.0), hardware_sample_rate,
- hardware_buffer_size);
- EXPECT_GE(buffer_size, previous_buffer_size);
- EXPECT_EQ(buffer_size,
- buffer_size / minimum_buffer_size * minimum_buffer_size);
- previous_buffer_size = buffer_size;
- }
-}
+ );
} // namespace media
diff --git a/chromium/media/base/audio_parameters.cc b/chromium/media/base/audio_parameters.cc
index bacbef8a8d8..f05f18cb301 100644
--- a/chromium/media/base/audio_parameters.cc
+++ b/chromium/media/base/audio_parameters.cc
@@ -66,6 +66,17 @@ AudioParameters::AudioParameters(Format format,
Reset(format, channel_layout, sample_rate, frames_per_buffer);
}
+AudioParameters::AudioParameters(
+ Format format,
+ ChannelLayout channel_layout,
+ int sample_rate,
+ int frames_per_buffer,
+ const HardwareCapabilities& hardware_capabilities)
+ : latency_tag_(AudioLatency::LATENCY_COUNT),
+ hardware_capabilities_(hardware_capabilities) {
+ Reset(format, channel_layout, sample_rate, frames_per_buffer);
+}
+
AudioParameters::~AudioParameters() = default;
AudioParameters::AudioParameters(const AudioParameters&) = default;
@@ -91,6 +102,15 @@ bool AudioParameters::IsValid() const {
(sample_rate_ <= media::limits::kMaxSampleRate) &&
(frames_per_buffer_ > 0) &&
(frames_per_buffer_ <= media::limits::kMaxSamplesPerPacket) &&
+ (!hardware_capabilities_ ||
+ ((hardware_capabilities_->min_frames_per_buffer >= 0) &&
+ (hardware_capabilities_->min_frames_per_buffer <=
+ media::limits::kMaxSamplesPerPacket) &&
+ (hardware_capabilities_->max_frames_per_buffer >= 0) &&
+ (hardware_capabilities_->max_frames_per_buffer <=
+ media::limits::kMaxSamplesPerPacket) &&
+ (hardware_capabilities_->max_frames_per_buffer >=
+ hardware_capabilities_->min_frames_per_buffer))) &&
(channel_layout_ == CHANNEL_LAYOUT_DISCRETE ||
channels_ == ChannelLayoutToChannelCount(channel_layout_));
}
@@ -102,6 +122,12 @@ std::string AudioParameters::AsHumanReadableString() const {
<< " frames_per_buffer: " << frames_per_buffer()
<< " effects: " << effects()
<< " mic_positions: " << PointsToString(mic_positions_);
+ if (hardware_capabilities_) {
+ s << ", hw_cap.min_frames_per_buffer: "
+ << hardware_capabilities_->min_frames_per_buffer
+ << ", hw_cap.max_frames_per_buffer: "
+ << hardware_capabilities_->max_frames_per_buffer;
+ }
return s.str();
}
diff --git a/chromium/media/base/audio_parameters.h b/chromium/media/base/audio_parameters.h
index fa6eb8ea982..786275935db 100644
--- a/chromium/media/base/audio_parameters.h
+++ b/chromium/media/base/audio_parameters.h
@@ -11,6 +11,7 @@
#include "base/compiler_specific.h"
#include "base/numerics/checked_math.h"
+#include "base/optional.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "media/base/audio_bus.h"
@@ -147,15 +148,36 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
MULTIZONE = 0x80,
};
+ struct HardwareCapabilities {
+ HardwareCapabilities(int min_frames_per_buffer, int max_frames_per_buffer)
+ : min_frames_per_buffer(min_frames_per_buffer),
+ max_frames_per_buffer(max_frames_per_buffer) {}
+ HardwareCapabilities()
+ : min_frames_per_buffer(0), max_frames_per_buffer(0) {}
+
+ // Minimum and maximum buffer sizes supported by the audio hardware. Opening
+ // a device with frames_per_buffer set to a value between min and max should
+ // result in the audio hardware running close to this buffer size, values
+ // above or below will be clamped to the min or max by the audio system.
+ // Either value can be 0 and means that the min or max is not known.
+ int min_frames_per_buffer;
+ int max_frames_per_buffer;
+ };
+
AudioParameters();
AudioParameters(Format format,
ChannelLayout channel_layout,
int sample_rate,
int frames_per_buffer);
+ AudioParameters(Format format,
+ ChannelLayout channel_layout,
+ int sample_rate,
+ int frames_per_buffer,
+ const HardwareCapabilities& hardware_capabilities);
~AudioParameters();
- // Re-initializes all members.
+ // Re-initializes all members except for |hardware_capabilities_|.
void Reset(Format format,
ChannelLayout channel_layout,
int sample_rate,
@@ -215,6 +237,10 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
}
int frames_per_buffer() const { return frames_per_buffer_; }
+ base::Optional<HardwareCapabilities> hardware_capabilities() const {
+ return hardware_capabilities_;
+ }
+
void set_effects(int effects) { effects_ = effects; }
int effects() const { return effects_; }
@@ -259,6 +285,10 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
// Optional tag to pass latency info from renderer to browser. Set to
// AudioLatency::LATENCY_COUNT by default, which means "not specified".
AudioLatency::LatencyType latency_tag_;
+
+ // Audio hardware specific parameters, these are treated as read-only and
+ // changing them has no effect.
+ base::Optional<HardwareCapabilities> hardware_capabilities_;
};
// Comparison is useful when AudioParameters is used with std structures.
diff --git a/chromium/media/base/audio_renderer_mixer.cc b/chromium/media/base/audio_renderer_mixer.cc
index 0741ca90d61..c8d8dd79ae1 100644
--- a/chromium/media/base/audio_renderer_mixer.cc
+++ b/chromium/media/base/audio_renderer_mixer.cc
@@ -12,6 +12,7 @@
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/trace_event/trace_event.h"
+#include "media/base/audio_renderer_mixer_input.h"
#include "media/base/audio_timestamp_helper.h"
namespace media {
@@ -23,8 +24,8 @@ enum { kPauseDelaySeconds = 10 };
// lock.
class AudioRendererMixer::UMAMaxValueTracker {
public:
- UMAMaxValueTracker(const UmaLogCallback& log_callback)
- : log_callback_(log_callback), count_(0), max_count_(0) {}
+ UMAMaxValueTracker(UmaLogCallback log_callback)
+ : log_callback_(std::move(log_callback)), count_(0), max_count_(0) {}
~UMAMaxValueTracker() = default;
@@ -52,7 +53,7 @@ class AudioRendererMixer::UMAMaxValueTracker {
AudioRendererMixer::AudioRendererMixer(const AudioParameters& output_params,
scoped_refptr<AudioRendererSink> sink,
- const UmaLogCallback& log_callback)
+ UmaLogCallback log_callback)
: output_params_(output_params),
audio_sink_(std::move(sink)),
master_converter_(output_params, output_params, true),
@@ -60,7 +61,7 @@ AudioRendererMixer::AudioRendererMixer(const AudioParameters& output_params,
last_play_time_(base::TimeTicks::Now()),
// Initialize |playing_| to true since Start() results in an auto-play.
playing_(true),
- input_count_tracker_(new UMAMaxValueTracker(log_callback)) {
+ input_count_tracker_(new UMAMaxValueTracker(std::move(log_callback))) {
DCHECK(audio_sink_);
audio_sink_->Initialize(output_params, this);
audio_sink_->Start();
@@ -73,7 +74,7 @@ AudioRendererMixer::~AudioRendererMixer() {
// Ensure that all mixer inputs have removed themselves prior to destruction.
DCHECK(master_converter_.empty());
DCHECK(converters_.empty());
- DCHECK_EQ(error_callbacks_.size(), 0U);
+ DCHECK(error_callbacks_.empty());
}
void AudioRendererMixer::AddMixerInput(const AudioParameters& input_params,
@@ -93,12 +94,11 @@ void AudioRendererMixer::AddMixerInput(const AudioParameters& input_params,
if (converter == converters_.end()) {
std::pair<AudioConvertersMap::iterator, bool> result =
converters_.insert(std::make_pair(
- input_sample_rate, base::WrapUnique(
+ input_sample_rate, std::make_unique<LoopbackAudioConverter>(
// We expect all InputCallbacks to be
// capable of handling arbitrary buffer
// size requests, disabling FIFO.
- new LoopbackAudioConverter(
- input_params, output_params_, true))));
+ input_params, output_params_, true)));
converter = result.first;
// Add newly-created resampler as an input to the master mixer.
@@ -132,33 +132,25 @@ void AudioRendererMixer::RemoveMixerInput(
input_count_tracker_->Decrement();
}
-void AudioRendererMixer::AddErrorCallback(const base::Closure& error_cb) {
+void AudioRendererMixer::AddErrorCallback(AudioRendererMixerInput* input) {
base::AutoLock auto_lock(lock_);
- error_callbacks_.push_back(error_cb);
+ error_callbacks_.insert(input);
}
-void AudioRendererMixer::RemoveErrorCallback(const base::Closure& error_cb) {
+void AudioRendererMixer::RemoveErrorCallback(AudioRendererMixerInput* input) {
base::AutoLock auto_lock(lock_);
- for (auto it = error_callbacks_.begin(); it != error_callbacks_.end(); ++it) {
- if (it->Equals(error_cb)) {
- error_callbacks_.erase(it);
- return;
- }
- }
-
- // An error callback should always exist when called.
- NOTREACHED();
-}
-
-OutputDeviceInfo AudioRendererMixer::GetOutputDeviceInfo() {
- DVLOG(1) << __func__;
- return audio_sink_->GetOutputDeviceInfo();
+ error_callbacks_.erase(input);
}
bool AudioRendererMixer::CurrentThreadIsRenderingThread() {
return audio_sink_->CurrentThreadIsRenderingThread();
}
+void AudioRendererMixer::SetPauseDelayForTesting(base::TimeDelta delay) {
+ base::AutoLock auto_lock(lock_);
+ pause_delay_ = delay;
+}
+
int AudioRendererMixer::Render(base::TimeDelta delay,
base::TimeTicks delay_timestamp,
int prior_frames_skipped,
@@ -186,8 +178,8 @@ int AudioRendererMixer::Render(base::TimeDelta delay,
void AudioRendererMixer::OnRenderError() {
// Call each mixer input and signal an error.
base::AutoLock auto_lock(lock_);
- for (const auto& cb : error_callbacks_)
- cb.Run();
+ for (auto* input : error_callbacks_)
+ input->OnRenderError();
}
} // namespace media
diff --git a/chromium/media/base/audio_renderer_mixer.h b/chromium/media/base/audio_renderer_mixer.h
index f81e69a30e3..9c9325a2281 100644
--- a/chromium/media/base/audio_renderer_mixer.h
+++ b/chromium/media/base/audio_renderer_mixer.h
@@ -11,14 +11,18 @@
#include <memory>
#include <string>
+#include "base/containers/flat_map.h"
+#include "base/containers/flat_set.h"
#include "base/macros.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "base/time/time.h"
#include "media/base/audio_converter.h"
#include "media/base/audio_renderer_sink.h"
#include "media/base/loopback_audio_converter.h"
namespace media {
+class AudioRendererMixerInput;
// Mixes a set of AudioConverter::InputCallbacks into a single output stream
// which is funneled into a single shared AudioRendererSink; saving a bundle
@@ -26,11 +30,11 @@ namespace media {
class MEDIA_EXPORT AudioRendererMixer
: public AudioRendererSink::RenderCallback {
public:
- typedef base::Callback<void(int)> UmaLogCallback;
+ using UmaLogCallback = base::RepeatingCallback<void(int)>;
AudioRendererMixer(const AudioParameters& output_params,
scoped_refptr<AudioRendererSink> sink,
- const UmaLogCallback& log_callback);
+ UmaLogCallback log_callback);
~AudioRendererMixer() override;
// Add or remove a mixer input from mixing; called by AudioRendererMixerInput.
@@ -40,29 +44,21 @@ class MEDIA_EXPORT AudioRendererMixer
AudioConverter::InputCallback* input);
// Since errors may occur even when no inputs are playing, an error callback
- // must be registered separately from adding a mixer input. The same callback
- // must be given to both the functions.
- void AddErrorCallback(const base::Closure& error_cb);
- void RemoveErrorCallback(const base::Closure& error_cb);
-
- void set_pause_delay_for_testing(base::TimeDelta delay) {
- pause_delay_ = delay;
- }
-
- OutputDeviceInfo GetOutputDeviceInfo();
+ // must be registered separately from adding a mixer input.
+ void AddErrorCallback(AudioRendererMixerInput* input);
+ void RemoveErrorCallback(AudioRendererMixerInput* input);
// Returns true if called on rendering thread, otherwise false.
bool CurrentThreadIsRenderingThread();
- const AudioParameters& GetOutputParamsForTesting() { return output_params_; };
+ void SetPauseDelayForTesting(base::TimeDelta delay);
+ const AudioParameters& get_output_params_for_testing() const {
+ return output_params_;
+ }
private:
class UMAMaxValueTracker;
- // Maps input sample rate to the dedicated converter.
- using AudioConvertersMap =
- std::map<int, std::unique_ptr<LoopbackAudioConverter>>;
-
// AudioRendererSink::RenderCallback implementation.
int Render(base::TimeDelta delay,
base::TimeTicks delay_timestamp,
@@ -70,7 +66,7 @@ class MEDIA_EXPORT AudioRendererMixer
AudioBus* audio_bus) override;
void OnRenderError() override;
- bool is_master_sample_rate(int sample_rate) {
+ bool is_master_sample_rate(int sample_rate) const {
return sample_rate == output_params_.sample_rate();
}
@@ -84,27 +80,30 @@ class MEDIA_EXPORT AudioRendererMixer
base::Lock lock_;
// List of error callbacks used by this mixer.
- typedef std::list<base::Closure> ErrorCallbackList;
- ErrorCallbackList error_callbacks_;
+ base::flat_set<AudioRendererMixerInput*> error_callbacks_ GUARDED_BY(lock_);
+
+ // Maps input sample rate to the dedicated converter.
+ using AudioConvertersMap =
+ base::flat_map<int, std::unique_ptr<LoopbackAudioConverter>>;
// Each of these converters mixes inputs with a given sample rate and
- // resamples them to the output sample rate. Inputs not reqiuring resampling
+ // resamples them to the output sample rate. Inputs not requiring resampling
// go directly to |master_converter_|.
- AudioConvertersMap converters_;
+ AudioConvertersMap converters_ GUARDED_BY(lock_);
// Master converter which mixes all the outputs from |converters_| as well as
// mixer inputs that are in the output sample rate.
- AudioConverter master_converter_;
+ AudioConverter master_converter_ GUARDED_BY(lock_);
// Handles physical stream pause when no inputs are playing. For latency
// reasons we don't want to immediately pause the physical stream.
- base::TimeDelta pause_delay_;
- base::TimeTicks last_play_time_;
- bool playing_;
+ base::TimeDelta pause_delay_ GUARDED_BY(lock_);
+ base::TimeTicks last_play_time_ GUARDED_BY(lock_);
+ bool playing_ GUARDED_BY(lock_);
// Tracks the maximum number of simultaneous mixer inputs and logs it into
// UMA histogram upon the destruction.
- std::unique_ptr<UMAMaxValueTracker> input_count_tracker_;
+ std::unique_ptr<UMAMaxValueTracker> input_count_tracker_ GUARDED_BY(lock_);
DISALLOW_COPY_AND_ASSIGN(AudioRendererMixer);
};
diff --git a/chromium/media/base/audio_renderer_mixer_input.cc b/chromium/media/base/audio_renderer_mixer_input.cc
index 5d6164c9d67..f8444b49c8e 100644
--- a/chromium/media/base/audio_renderer_mixer_input.cc
+++ b/chromium/media/base/audio_renderer_mixer_input.cc
@@ -8,6 +8,7 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
+#include "base/threading/sequenced_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "media/base/audio_renderer_mixer.h"
#include "media/base/audio_renderer_mixer_pool.h"
@@ -21,22 +22,21 @@ AudioRendererMixerInput::AudioRendererMixerInput(
const std::string& device_id,
AudioLatency::LatencyType latency)
: mixer_pool_(mixer_pool),
- started_(false),
- playing_(false),
- volume_(1.0f),
owner_id_(owner_id),
device_id_(device_id),
- latency_(latency),
- mixer_(nullptr),
- callback_(nullptr),
- error_cb_(base::Bind(&AudioRendererMixerInput::OnRenderError,
- base::Unretained(this))) {
+ latency_(latency) {
DCHECK(mixer_pool_);
}
AudioRendererMixerInput::~AudioRendererMixerInput() {
+ // Note: This may not happen on the thread the sink was used. E.g., this may
+ // end up destroyed on the render thread despite being used on the media
+ // thread.
+
DCHECK(!started_);
DCHECK(!mixer_);
+ if (sink_)
+ sink_->Stop();
}
void AudioRendererMixerInput::Initialize(
@@ -46,6 +46,12 @@ void AudioRendererMixerInput::Initialize(
DCHECK(!mixer_);
DCHECK(callback);
+ // Current usage ensures we always call GetOutputDeviceInfoAsync() and wait
+ // for the result before calling this method. We could add support for doing
+ // otherwise here, but it's not needed for now, so for simplicity just DCHECK.
+ DCHECK(sink_);
+ DCHECK(device_info_);
+
params_ = params;
callback_ = callback;
}
@@ -55,16 +61,16 @@ void AudioRendererMixerInput::Start() {
DCHECK(!mixer_);
DCHECK(callback_); // Initialized.
+ DCHECK(sink_);
+ DCHECK(device_info_);
+ DCHECK_EQ(device_info_->device_status(), OUTPUT_DEVICE_STATUS_OK);
+
started_ = true;
- mixer_ =
- mixer_pool_->GetMixer(owner_id_, params_, latency_, device_id_, nullptr);
- if (!mixer_) {
- callback_->OnRenderError();
- return;
- }
+ mixer_ = mixer_pool_->GetMixer(owner_id_, params_, latency_, *device_info_,
+ std::move(sink_));
// Note: OnRenderError() may be called immediately after this call returns.
- mixer_->AddErrorCallback(error_cb_);
+ mixer_->AddErrorCallback(this);
}
void AudioRendererMixerInput::Stop() {
@@ -73,10 +79,7 @@ void AudioRendererMixerInput::Stop() {
Pause();
if (mixer_) {
- // TODO(dalecurtis): This is required so that |callback_| isn't called after
- // Stop() by an error event since it may outlive this ref-counted object. We
- // should instead have sane ownership semantics: http://crbug.com/151051
- mixer_->RemoveErrorCallback(error_cb_);
+ mixer_->RemoveErrorCallback(this);
mixer_pool_->ReturnMixer(mixer_);
mixer_ = nullptr;
}
@@ -107,9 +110,34 @@ bool AudioRendererMixerInput::SetVolume(double volume) {
}
OutputDeviceInfo AudioRendererMixerInput::GetOutputDeviceInfo() {
- return mixer_ ? mixer_->GetOutputDeviceInfo()
- : mixer_pool_->GetOutputDeviceInfo(
- owner_id_, 0 /* session_id */, device_id_);
+ NOTREACHED(); // The blocking API is intentionally not supported.
+ return OutputDeviceInfo();
+}
+
+void AudioRendererMixerInput::GetOutputDeviceInfoAsync(
+ OutputDeviceInfoCB info_cb) {
+ // If we have device information for a current sink or mixer, just return it
+ // immediately. Per the AudioRendererSink API contract, this must be posted.
+ if (device_info_.has_value() && (sink_ || mixer_)) {
+ base::SequencedTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE, base::BindOnce(std::move(info_cb), *device_info_));
+ return;
+ }
+
+ // We may have |device_info_|, but a Stop() has been called since if we don't
+ // have a |sink_| or a |mixer_|, so request the information again in case it
+ // has changed (which may occur due to browser side device changes).
+ device_info_.reset();
+
+ // If we don't have a sink yet start the process of getting one.
+ sink_ = mixer_pool_->GetSink(owner_id_, device_id_);
+
+ // Retain a ref to this sink to ensure it is not destructed while this occurs.
+ // The callback is guaranteed to execute on this thread, so there are no
+ // threading issues.
+ sink_->GetOutputDeviceInfoAsync(
+ base::BindOnce(&AudioRendererMixerInput::OnDeviceInfoReceived,
+ base::RetainedRef(this), std::move(info_cb)));
}
bool AudioRendererMixerInput::IsOptimizedForHardwareParameters() {
@@ -122,44 +150,23 @@ bool AudioRendererMixerInput::CurrentThreadIsRenderingThread() {
void AudioRendererMixerInput::SwitchOutputDevice(
const std::string& device_id,
- const OutputDeviceStatusCB& callback) {
+ OutputDeviceStatusCB callback) {
if (device_id == device_id_) {
- callback.Run(OUTPUT_DEVICE_STATUS_OK);
+ std::move(callback).Run(OUTPUT_DEVICE_STATUS_OK);
return;
}
- if (mixer_) {
- OutputDeviceStatus new_mixer_status = OUTPUT_DEVICE_STATUS_ERROR_INTERNAL;
- AudioRendererMixer* new_mixer = mixer_pool_->GetMixer(
- owner_id_, params_, latency_, device_id, &new_mixer_status);
- if (new_mixer_status != OUTPUT_DEVICE_STATUS_OK) {
- callback.Run(new_mixer_status);
- return;
- }
-
- bool was_playing = playing_;
- Stop();
- device_id_ = device_id;
- mixer_ = new_mixer;
- mixer_->AddErrorCallback(error_cb_);
- started_ = true;
-
- if (was_playing)
- Play();
-
- } else {
- OutputDeviceStatus new_mixer_status =
- mixer_pool_
- ->GetOutputDeviceInfo(owner_id_, 0 /* session_id */, device_id)
- .device_status();
- if (new_mixer_status != OUTPUT_DEVICE_STATUS_OK) {
- callback.Run(new_mixer_status);
- return;
- }
- device_id_ = device_id;
- }
-
- callback.Run(OUTPUT_DEVICE_STATUS_OK);
+ // Request a new sink using the new device id. This process may fail, so to
+ // avoid interrupting working audio, don't set any class variables until we
+ // know it's a success.
+ auto new_sink = mixer_pool_->GetSink(owner_id_, device_id);
+
+ // Retain a ref to this sink to ensure it is not destructed while this occurs.
+ // The callback is guaranteed to execute on this thread, so there are no
+ // threading issues.
+ new_sink->GetOutputDeviceInfoAsync(
+ base::BindOnce(&AudioRendererMixerInput::OnDeviceSwitchReady,
+ base::RetainedRef(this), std::move(callback), new_sink));
}
double AudioRendererMixerInput::ProvideInput(AudioBus* audio_bus,
@@ -173,8 +180,8 @@ double AudioRendererMixerInput::ProvideInput(AudioBus* audio_bus,
// AudioConverter expects unfilled frames to be zeroed.
if (frames_filled < audio_bus->frames()) {
- audio_bus->ZeroFramesPartial(
- frames_filled, audio_bus->frames() - frames_filled);
+ audio_bus->ZeroFramesPartial(frames_filled,
+ audio_bus->frames() - frames_filled);
}
// We're reading |volume_| from the audio device thread and must avoid racing
@@ -190,4 +197,42 @@ void AudioRendererMixerInput::OnRenderError() {
callback_->OnRenderError();
}
+void AudioRendererMixerInput::OnDeviceInfoReceived(
+ OutputDeviceInfoCB info_cb,
+ OutputDeviceInfo device_info) {
+ device_info_ = device_info;
+ std::move(info_cb).Run(*device_info_);
+}
+
+void AudioRendererMixerInput::OnDeviceSwitchReady(
+ OutputDeviceStatusCB switch_cb,
+ scoped_refptr<AudioRendererSink> sink,
+ OutputDeviceInfo device_info) {
+ if (device_info.device_status() != OUTPUT_DEVICE_STATUS_OK) {
+ sink->Stop();
+ std::move(switch_cb).Run(device_info.device_status());
+ return;
+ }
+
+ const bool has_mixer = !!mixer_;
+ const bool is_playing = playing_;
+
+ // This may occur if Start() hasn't yet been called.
+ if (sink_)
+ sink_->Stop();
+
+ sink_ = std::move(sink);
+ device_info_ = device_info;
+ device_id_ = device_info.device_id();
+
+ Stop();
+ if (has_mixer) {
+ Start();
+ if (is_playing)
+ Play();
+ }
+
+ std::move(switch_cb).Run(device_info.device_status());
+}
+
} // namespace media
diff --git a/chromium/media/base/audio_renderer_mixer_input.h b/chromium/media/base/audio_renderer_mixer_input.h
index 8109faca65a..bfbaa16e348 100644
--- a/chromium/media/base/audio_renderer_mixer_input.h
+++ b/chromium/media/base/audio_renderer_mixer_input.h
@@ -21,6 +21,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "media/base/audio_converter.h"
#include "media/base/audio_latency.h"
#include "media/base/audio_renderer_sink.h"
@@ -46,11 +47,13 @@ class MEDIA_EXPORT AudioRendererMixerInput
void Pause() override;
bool SetVolume(double volume) override;
OutputDeviceInfo GetOutputDeviceInfo() override;
+ void GetOutputDeviceInfoAsync(OutputDeviceInfoCB info_cb) override;
+
bool IsOptimizedForHardwareParameters() override;
void Initialize(const AudioParameters& params,
AudioRendererSink::RenderCallback* renderer) override;
void SwitchOutputDevice(const std::string& device_id,
- const OutputDeviceStatusCB& callback) override;
+ OutputDeviceStatusCB callback) override;
// This is expected to be called on the audio rendering thread. The caller
// must ensure that this input has been added to a mixer before calling the
// function, and that it is not removed from the mixer before this function
@@ -73,13 +76,31 @@ class MEDIA_EXPORT AudioRendererMixerInput
// SetVolume().
base::Lock volume_lock_;
- bool started_;
- bool playing_;
- double volume_;
+ bool started_ = false;
+ bool playing_ = false;
+ double volume_ GUARDED_BY(volume_lock_) = 1.0;
+
+ scoped_refptr<AudioRendererSink> sink_;
+ base::Optional<OutputDeviceInfo> device_info_;
// AudioConverter::InputCallback implementation.
double ProvideInput(AudioBus* audio_bus, uint32_t frames_delayed) override;
+ void OnDeviceInfoReceived(OutputDeviceInfoCB info_cb,
+ OutputDeviceInfo device_info);
+
+ // Method to help handle device changes. Must be static to ensure we can still
+ // execute the |switch_cb| even if the pipeline is destructed. Restarts (if
+ // necessary) Start() and Play() state with a new |sink| and |device_info|.
+ //
+ // |switch_cb| is the callback given to the SwitchOutputDevice() call.
+ // |sink| is a fresh sink which should be used if device info is good.
+ // |device_info| is the OutputDeviceInfo for |sink| after
+ // GetOutputDeviceInfoAsync() completes.
+ void OnDeviceSwitchReady(OutputDeviceStatusCB switch_cb,
+ scoped_refptr<AudioRendererSink> sink,
+ OutputDeviceInfo device_info);
+
// AudioParameters received during Initialize().
AudioParameters params_;
@@ -89,13 +110,10 @@ class MEDIA_EXPORT AudioRendererMixerInput
// AudioRendererMixer obtained from mixer pool during Initialize(),
// guaranteed to live (at least) until it is returned to the pool.
- AudioRendererMixer* mixer_;
+ AudioRendererMixer* mixer_ = nullptr;
// Source of audio data which is provided to the mixer.
- AudioRendererSink::RenderCallback* callback_;
-
- // Error callback for handing to AudioRendererMixer.
- const base::Closure error_cb_;
+ AudioRendererSink::RenderCallback* callback_ = nullptr;
DISALLOW_COPY_AND_ASSIGN(AudioRendererMixerInput);
};
diff --git a/chromium/media/base/audio_renderer_mixer_input_unittest.cc b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
index 7b6013c84cf..f42decfd562 100644
--- a/chromium/media/base/audio_renderer_mixer_input_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
@@ -35,7 +35,7 @@ static const char kUnauthorizedDeviceId[] = "unauthorized";
static const char kNonexistentDeviceId[] = "nonexistent";
class AudioRendererMixerInputTest : public testing::Test,
- AudioRendererMixerPool {
+ public AudioRendererMixerPool {
public:
AudioRendererMixerInputTest() {
audio_parameters_ =
@@ -49,42 +49,26 @@ class AudioRendererMixerInputTest : public testing::Test,
void CreateMixerInput(const std::string& device_id) {
mixer_input_ = new AudioRendererMixerInput(this, kRenderFrameId, device_id,
-
AudioLatency::LATENCY_PLAYBACK);
+ mixer_input_->GetOutputDeviceInfoAsync(base::DoNothing());
+ scoped_task_environment_.RunUntilIdle();
}
AudioRendererMixer* GetMixer(int owner_id,
const AudioParameters& params,
AudioLatency::LatencyType latency,
- const std::string& device_id,
- OutputDeviceStatus* device_status) override {
+ const OutputDeviceInfo& sink_info,
+ scoped_refptr<AudioRendererSink> sink) override {
EXPECT_TRUE(params.IsValid());
- if (device_id == kNonexistentDeviceId) {
- if (device_status)
- *device_status = OUTPUT_DEVICE_STATUS_ERROR_NOT_FOUND;
- return nullptr;
- }
-
- if (device_id == kUnauthorizedDeviceId) {
- if (device_status)
- *device_status = OUTPUT_DEVICE_STATUS_ERROR_NOT_AUTHORIZED;
- return nullptr;
- }
-
- size_t idx = (device_id == kDefaultDeviceId) ? 0 : 1;
+ size_t idx = (sink_info.device_id() == kDefaultDeviceId) ? 0 : 1;
if (!mixers_[idx]) {
- sinks_[idx] =
- new MockAudioRendererSink(device_id, OUTPUT_DEVICE_STATUS_OK);
- EXPECT_CALL(*(sinks_[idx].get()), Start());
- EXPECT_CALL(*(sinks_[idx].get()), Stop());
+ EXPECT_CALL(*reinterpret_cast<MockAudioRendererSink*>(sink.get()),
+ Start());
mixers_[idx].reset(new AudioRendererMixer(
- audio_parameters_, sinks_[idx].get(), base::Bind(&LogUma)));
+ audio_parameters_, std::move(sink), base::BindRepeating(&LogUma)));
}
EXPECT_CALL(*this, ReturnMixer(mixers_[idx].get()));
-
- if (device_status)
- *device_status = OUTPUT_DEVICE_STATUS_OK;
return mixers_[idx].get();
}
@@ -92,22 +76,19 @@ class AudioRendererMixerInputTest : public testing::Test,
return mixer_input_->ProvideInput(audio_bus_.get(), 0);
}
- OutputDeviceInfo GetOutputDeviceInfo(int source_render_frame_id,
- int session_id,
- const std::string& device_id) override {
+ scoped_refptr<AudioRendererSink> GetSink(
+ int owner_id,
+ const std::string& device_id) override {
OutputDeviceStatus status = OUTPUT_DEVICE_STATUS_OK;
if (device_id == kNonexistentDeviceId)
status = OUTPUT_DEVICE_STATUS_ERROR_NOT_FOUND;
else if (device_id == kUnauthorizedDeviceId)
status = OUTPUT_DEVICE_STATUS_ERROR_NOT_AUTHORIZED;
-
- GetOutputDeviceInfoCalled(device_id);
- return OutputDeviceInfo(device_id, status,
- AudioParameters::UnavailableDeviceParams());
+ auto sink = base::MakeRefCounted<MockAudioRendererSink>(device_id, status);
+ EXPECT_CALL(*sink, Stop());
+ return sink;
}
- MOCK_METHOD1(GetOutputDeviceInfoCalled, void(const std::string&));
-
MOCK_METHOD1(ReturnMixer, void(AudioRendererMixer*));
MOCK_METHOD1(SwitchCallbackCalled, void(OutputDeviceStatus));
@@ -118,13 +99,15 @@ class AudioRendererMixerInputTest : public testing::Test,
}
AudioRendererMixer* GetInputMixer() { return mixer_input_->mixer_; }
+ MockAudioRendererSink* GetMockSink() const {
+ return reinterpret_cast<MockAudioRendererSink*>(mixer_input_->sink_.get());
+ }
protected:
~AudioRendererMixerInputTest() override = default;
base::test::ScopedTaskEnvironment scoped_task_environment_;
AudioParameters audio_parameters_;
- scoped_refptr<MockAudioRendererSink> sinks_[2];
std::unique_ptr<AudioRendererMixer> mixers_[2];
scoped_refptr<AudioRendererMixerInput> mixer_input_;
std::unique_ptr<FakeAudioRenderCallback> fake_callback_;
@@ -134,25 +117,6 @@ class AudioRendererMixerInputTest : public testing::Test,
DISALLOW_COPY_AND_ASSIGN(AudioRendererMixerInputTest);
};
-TEST_F(AudioRendererMixerInputTest, GetDeviceInfo) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(kDefaultDeviceId))
- .Times(testing::Exactly(1));
-
- mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
-
- // Calling GetOutputDeviceInfo() should result in the mock call, since there
- // is no mixer created yet for mixer input.
- mixer_input_->GetOutputDeviceInfo();
- mixer_input_->Start();
-
- // This call should be directed to the mixer and should not result in the mock
- // call.
- EXPECT_STREQ(kDefaultDeviceId,
- mixer_input_->GetOutputDeviceInfo().device_id().c_str());
-
- mixer_input_->Stop();
-}
-
// Test that getting and setting the volume work as expected. The volume is
// returned from ProvideInput() only when playing.
TEST_F(AudioRendererMixerInputTest, GetSetVolume) {
@@ -197,6 +161,9 @@ TEST_F(AudioRendererMixerInputTest, StopBeforeInitializeOrStart) {
TEST_F(AudioRendererMixerInputTest, StartAfterStop) {
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Stop();
+
+ mixer_input_->GetOutputDeviceInfoAsync(base::DoNothing());
+ scoped_task_environment_.RunUntilIdle();
mixer_input_->Start();
mixer_input_->Stop();
}
@@ -206,14 +173,15 @@ TEST_F(AudioRendererMixerInputTest, InitializeAfterStop) {
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Start();
mixer_input_->Stop();
+
+ mixer_input_->GetOutputDeviceInfoAsync(base::DoNothing());
+ scoped_task_environment_.RunUntilIdle();
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Stop();
}
// Test SwitchOutputDevice().
TEST_F(AudioRendererMixerInputTest, SwitchOutputDevice) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(testing::_))
- .Times(testing::Exactly(0));
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Start();
const std::string kDeviceId("mock-device-id");
@@ -233,8 +201,6 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDevice) {
// Test SwitchOutputDevice() to the same device as the current (default) device
TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToSameDevice) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(testing::_))
- .Times(testing::Exactly(0));
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Start();
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
@@ -251,8 +217,6 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToSameDevice) {
// Test SwitchOutputDevice() to the new device
TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToAnotherDevice) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(testing::_))
- .Times(testing::Exactly(0));
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Start();
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
@@ -269,8 +233,6 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToAnotherDevice) {
// Test that SwitchOutputDevice() to a nonexistent device fails.
TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToNonexistentDevice) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(testing::_))
- .Times(testing::Exactly(0));
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Start();
EXPECT_CALL(*this,
@@ -286,8 +248,6 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToNonexistentDevice) {
// Test that SwitchOutputDevice() to an unauthorized device fails.
TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToUnauthorizedDevice) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(testing::_))
- .Times(testing::Exactly(0));
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Start();
EXPECT_CALL(*this,
@@ -303,8 +263,6 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToUnauthorizedDevice) {
// Test that calling SwitchOutputDevice() before Start() succeeds.
TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceBeforeStart) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(kAnotherDeviceId))
- .Times(testing::Exactly(1));
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
base::RunLoop run_loop;
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
@@ -319,47 +277,19 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceBeforeStart) {
// Test that calling SwitchOutputDevice() succeeds even if Start() is never
// called.
TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceWithoutStart) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(kAnotherDeviceId))
- .Times(testing::Exactly(1));
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
base::RunLoop run_loop;
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
mixer_input_->SwitchOutputDevice(
kAnotherDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
base::Unretained(this), &run_loop));
- mixer_input_->Stop();
run_loop.Run();
-}
-
-// Test creation with an invalid device. OnRenderError() should be called.
-// Play(), Pause() and SwitchOutputDevice() should not cause crashes, even if
-// they have no effect.
-TEST_F(AudioRendererMixerInputTest, CreateWithInvalidDevice) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(kDefaultDeviceId))
- .Times(testing::Exactly(1));
- // |mixer_input_| was initialized during construction.
mixer_input_->Stop();
-
- CreateMixerInput(kNonexistentDeviceId);
- EXPECT_CALL(*fake_callback_, OnRenderError());
- mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
- mixer_input_->Start();
- mixer_input_->Play();
- mixer_input_->Pause();
- base::RunLoop run_loop;
- EXPECT_CALL(*this, SwitchCallbackCalled(testing::_));
- mixer_input_->SwitchOutputDevice(
- kDefaultDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
- mixer_input_->Stop();
- run_loop.Run();
}
// Test that calling SwitchOutputDevice() works after calling Stop(), and that
// restarting works after the call to SwitchOutputDevice().
TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceAfterStopBeforeRestart) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(kAnotherDeviceId))
- .Times(testing::Exactly(1));
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Start();
mixer_input_->Stop();
@@ -379,8 +309,6 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceAfterStopBeforeRestart) {
// and that initialization and restart work after the call to
// SwitchOutputDevice().
TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceBeforeInitialize) {
- EXPECT_CALL(*this, GetOutputDeviceInfoCalled(kAnotherDeviceId))
- .Times(testing::Exactly(1));
base::RunLoop run_loop;
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
mixer_input_->SwitchOutputDevice(
diff --git a/chromium/media/base/audio_renderer_mixer_pool.h b/chromium/media/base/audio_renderer_mixer_pool.h
index 760f3eed032..461960d7ee4 100644
--- a/chromium/media/base/audio_renderer_mixer_pool.h
+++ b/chromium/media/base/audio_renderer_mixer_pool.h
@@ -13,31 +13,38 @@
namespace media {
class AudioParameters;
class AudioRendererMixer;
+class AudioRendererSink;
// Provides AudioRendererMixer instances for shared usage.
// Thread safe.
class MEDIA_EXPORT AudioRendererMixerPool {
public:
- AudioRendererMixerPool() {}
- virtual ~AudioRendererMixerPool() {}
+ AudioRendererMixerPool() = default;
+ virtual ~AudioRendererMixerPool() = default;
// Obtains a pointer to mixer instance based on AudioParameters. The pointer
// is guaranteed to be valid (at least) until it's rereleased by a call to
// ReturnMixer().
- virtual AudioRendererMixer* GetMixer(int owner_id,
- const AudioParameters& params,
- AudioLatency::LatencyType latency,
- const std::string& device_id,
- OutputDeviceStatus* device_status) = 0;
+ //
+ // Ownership of |sink| must be passed to GetMixer(), it will be stopped and
+ // discard if an existing mixer can be reused. Clients must have called
+ // GetOutputDeviceInfoAsync() on |sink| to get |sink_info|, and it must have
+ // a device_status() == OUTPUT_DEVICE_STATUS_OK.
+ virtual AudioRendererMixer* GetMixer(
+ int owner_id,
+ const AudioParameters& input_params,
+ AudioLatency::LatencyType latency,
+ const OutputDeviceInfo& sink_info,
+ scoped_refptr<AudioRendererSink> sink) = 0;
// Returns mixer back to the pool, must be called when the mixer is not needed
// any more to avoid memory leakage.
virtual void ReturnMixer(AudioRendererMixer* mixer) = 0;
- // Returns output device information
- virtual OutputDeviceInfo GetOutputDeviceInfo(
+ // Returns an AudioRendererSink for use with GetMixer(). Inputs must call this
+ // to get a sink to use with a subsequent GetMixer()
+ virtual scoped_refptr<AudioRendererSink> GetSink(
int owner_id,
- int session_id,
const std::string& device_id) = 0;
private:
diff --git a/chromium/media/base/audio_renderer_mixer_unittest.cc b/chromium/media/base/audio_renderer_mixer_unittest.cc
index 3e615bce905..1c00ec274b7 100644
--- a/chromium/media/base/audio_renderer_mixer_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_unittest.cc
@@ -16,6 +16,7 @@
#include "base/bind_helpers.h"
#include "base/macros.h"
#include "base/synchronization/waitable_event.h"
+#include "base/test/scoped_task_environment.h"
#include "base/threading/platform_thread.h"
#include "media/base/audio_renderer_mixer_input.h"
#include "media/base/audio_renderer_mixer_pool.h"
@@ -55,7 +56,7 @@ using AudioRendererMixerTestData =
class AudioRendererMixerTest
: public testing::TestWithParam<AudioRendererMixerTestData>,
- AudioRendererMixerPool {
+ public AudioRendererMixerPool {
public:
AudioRendererMixerTest()
: epsilon_(std::get<3>(GetParam())), half_fill_(false) {
@@ -93,8 +94,8 @@ class AudioRendererMixerTest
AudioRendererMixer* GetMixer(int owner_id,
const AudioParameters& params,
AudioLatency::LatencyType latency,
- const std::string& device_id,
- OutputDeviceStatus* device_status) final {
+ const OutputDeviceInfo& sink_info,
+ scoped_refptr<AudioRendererSink> sink) final {
return mixer_.get();
};
@@ -102,8 +103,11 @@ class AudioRendererMixerTest
EXPECT_EQ(mixer_.get(), mixer);
}
- MOCK_METHOD3(GetOutputDeviceInfo,
- OutputDeviceInfo(int, int, const std::string&));
+ scoped_refptr<AudioRendererSink> GetSink(
+ int owner_id,
+ const std::string& device_id) override {
+ return sink_;
+ }
void InitializeInputs(int inputs_per_sample_rate) {
mixer_inputs_.reserve(inputs_per_sample_rate * input_parameters_.size());
@@ -334,15 +338,20 @@ class AudioRendererMixerTest
}
scoped_refptr<AudioRendererMixerInput> CreateMixerInput() {
- return new AudioRendererMixerInput(this,
- // Zero frame id, default device ID.
- 0, std::string(),
- AudioLatency::LATENCY_PLAYBACK);
+ auto input = base::MakeRefCounted<AudioRendererMixerInput>(
+ this,
+ // Zero frame id, default device ID.
+ 0, std::string(), AudioLatency::LATENCY_PLAYBACK);
+ input->GetOutputDeviceInfoAsync(
+ base::DoNothing()); // Primes input, needed for tests.
+ task_env_.RunUntilIdle();
+ return input;
}
protected:
virtual ~AudioRendererMixerTest() = default;
+ base::test::ScopedTaskEnvironment task_env_;
scoped_refptr<MockAudioRendererSink> sink_;
std::unique_ptr<AudioRendererMixer> mixer_;
AudioRendererSink::RenderCallback* mixer_callback_;
@@ -350,7 +359,7 @@ class AudioRendererMixerTest
AudioParameters output_parameters_;
std::unique_ptr<AudioBus> audio_bus_;
std::unique_ptr<AudioBus> expected_audio_bus_;
- std::vector< scoped_refptr<AudioRendererMixerInput> > mixer_inputs_;
+ std::vector<scoped_refptr<AudioRendererMixerInput>> mixer_inputs_;
std::vector<std::unique_ptr<FakeAudioRenderCallback>> fake_callbacks_;
std::unique_ptr<FakeAudioRenderCallback> expected_callback_;
double epsilon_;
@@ -478,7 +487,7 @@ TEST_P(AudioRendererMixerBehavioralTest, MixerPausesStream) {
const base::TimeDelta kPauseTime = base::TimeDelta::FromMilliseconds(500);
// This value can't be too low or valgrind, tsan will timeout on the bots.
const base::TimeDelta kTestTimeout = 10 * kPauseTime;
- mixer_->set_pause_delay_for_testing(kPauseTime);
+ mixer_->SetPauseDelayForTesting(kPauseTime);
base::WaitableEvent pause_event(
base::WaitableEvent::ResetPolicy::MANUAL,
diff --git a/chromium/media/base/audio_renderer_sink.h b/chromium/media/base/audio_renderer_sink.h
index 112bbc90891..fefc018b4b0 100644
--- a/chromium/media/base/audio_renderer_sink.h
+++ b/chromium/media/base/audio_renderer_sink.h
@@ -66,12 +66,29 @@ class AudioRendererSink
virtual bool SetVolume(double volume) = 0;
// Returns current output device information. If the information is not
- // available yet, this method may block until it becomes available.
- // If the sink is not associated with any output device, |device_status| of
- // OutputDeviceInfo should be set to OUTPUT_DEVICE_STATUS_ERROR_INTERNAL.
- // Must never be called on the IO thread.
+ // available yet, this method may block until it becomes available. If the
+ // sink is not associated with any output device, |device_status| of
+ // OutputDeviceInfo should be set to OUTPUT_DEVICE_STATUS_ERROR_INTERNAL. Must
+ // never be called on the IO thread.
+ //
+ // Note: Prefer to use GetOutputDeviceInfoAsync instead if possible.
virtual OutputDeviceInfo GetOutputDeviceInfo() = 0;
+ // Same as the above, but does not block and will execute |info_cb| when the
+ // OutputDeviceInfo is available. Callback will be executed on the calling
+ // thread. Prefer this function to the synchronous version, it does not have a
+ // timeout so will result in less spurious timeout errors.
+ //
+ // |info_cb| will always be posted (I.e., executed after this function
+ // returns), even if OutputDeviceInfo is already available.
+ //
+ // Upon destruction if OutputDeviceInfo is still not available, |info_cb| will
+ // be posted with OUTPUT_DEVICE_STATUS_ERROR_INTERNAL. Note: Because |info_cb|
+ // is posted it will execute after destruction, so clients must handle
+ // cancellation of the callback if needed.
+ using OutputDeviceInfoCB = base::OnceCallback<void(OutputDeviceInfo)>;
+ virtual void GetOutputDeviceInfoAsync(OutputDeviceInfoCB info_cb) = 0;
+
// Returns |true| if a source with hardware parameters is preferable.
virtual bool IsOptimizedForHardwareParameters() = 0;
@@ -103,7 +120,7 @@ class SwitchableAudioRendererSink : public RestartableAudioRendererSink {
// the media::OutputDeviceStatus enum.
// There is no guarantee about the thread where |callback| will be invoked.
virtual void SwitchOutputDevice(const std::string& device_id,
- const OutputDeviceStatusCB& callback) = 0;
+ OutputDeviceStatusCB callback) = 0;
protected:
~SwitchableAudioRendererSink() override {}
diff --git a/chromium/media/base/audio_shifter_unittest.cc b/chromium/media/base/audio_shifter_unittest.cc
index c6de5920765..823e64523dd 100644
--- a/chromium/media/base/audio_shifter_unittest.cc
+++ b/chromium/media/base/audio_shifter_unittest.cc
@@ -18,8 +18,6 @@ const int kSampleRate = 48000;
const int kInputPacketSize = 48;
const int kOutputPacketSize = 24;
-#if GTEST_HAS_COMBINE
-
class AudioShifterTest :
public ::testing::TestWithParam<::testing::tuple<int, int, int, bool> > {
public:
@@ -207,6 +205,4 @@ INSTANTIATE_TEST_CASE_P(
::testing::Range(0, 3),
::testing::Bool()));
-#endif
-
} // namespace media
diff --git a/chromium/media/base/callback_registry.h b/chromium/media/base/callback_registry.h
index f5c315e4ecc..5e4ea16d94d 100644
--- a/chromium/media/base/callback_registry.h
+++ b/chromium/media/base/callback_registry.h
@@ -14,6 +14,7 @@
#include "base/logging.h"
#include "base/macros.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "media/base/bind_to_current_loop.h"
namespace media {
@@ -92,8 +93,8 @@ class CallbackRegistry<void(Args...)> {
}
base::Lock lock_;
- uint32_t next_registration_id_ = 0;
- std::map<uint32_t, CallbackType> callbacks_;
+ uint32_t next_registration_id_ GUARDED_BY(lock_) = 0;
+ std::map<uint32_t, CallbackType> callbacks_ GUARDED_BY(lock_);
DISALLOW_COPY_AND_ASSIGN(CallbackRegistry);
};
diff --git a/chromium/media/base/cdm_promise_adapter.cc b/chromium/media/base/cdm_promise_adapter.cc
index 3ff34ee9984..6b9fe520fae 100644
--- a/chromium/media/base/cdm_promise_adapter.cc
+++ b/chromium/media/base/cdm_promise_adapter.cc
@@ -34,7 +34,7 @@ void CdmPromiseAdapter::ResolvePromise(uint32_t promise_id,
const T&... result) {
std::unique_ptr<CdmPromise> promise = TakePromise(promise_id);
if (!promise) {
- NOTREACHED() << "Promise not found for " << promise_id;
+ LOG(ERROR) << "Promise not found for " << promise_id;
return;
}
@@ -42,7 +42,7 @@ void CdmPromiseAdapter::ResolvePromise(uint32_t promise_id,
CdmPromise::ResolveParameterType type = promise->GetResolveParameterType();
CdmPromise::ResolveParameterType expected = CdmPromiseTraits<T...>::kType;
if (type != expected) {
- NOTREACHED() << "Promise type mismatch: " << type << " vs " << expected;
+ LOG(ERROR) << "Promise type mismatch: " << type << " vs " << expected;
return;
}
@@ -55,7 +55,7 @@ void CdmPromiseAdapter::RejectPromise(uint32_t promise_id,
const std::string& error_message) {
std::unique_ptr<CdmPromise> promise = TakePromise(promise_id);
if (!promise) {
- NOTREACHED() << "No promise found for promise_id " << promise_id;
+ LOG(ERROR) << "Promise not found for " << promise_id;
return;
}
@@ -77,6 +77,7 @@ std::unique_ptr<CdmPromise> CdmPromiseAdapter::TakePromise(
auto it = promises_.find(promise_id);
if (it == promises_.end())
return nullptr;
+
std::unique_ptr<CdmPromise> result = std::move(it->second);
promises_.erase(it);
return result;
diff --git a/chromium/media/base/decode_capabilities.cc b/chromium/media/base/decode_capabilities.cc
index 9d30a878c3f..9799a04bdb7 100644
--- a/chromium/media/base/decode_capabilities.cc
+++ b/chromium/media/base/decode_capabilities.cc
@@ -163,8 +163,7 @@ bool IsSupportedVideoConfig(const VideoConfig& config) {
switch (config.codec) {
case media::kCodecAV1:
#if BUILDFLAG(ENABLE_AV1_DECODER)
- return base::FeatureList::IsEnabled(kAv1Decoder) &&
- IsColorSpaceSupported(config.color_space);
+ return IsColorSpaceSupported(config.color_space);
#else
return false;
#endif
diff --git a/chromium/media/base/decoder_buffer.cc b/chromium/media/base/decoder_buffer.cc
index 3366d41608b..4cd4851a00c 100644
--- a/chromium/media/base/decoder_buffer.cc
+++ b/chromium/media/base/decoder_buffer.cc
@@ -70,6 +70,11 @@ DecoderBuffer::~DecoderBuffer() {
void* data_at_initialize = data_at_initialize_;
base::debug::Alias(&data_at_initialize);
+ uint32_t destruction = destruction_;
+ base::debug::Alias(&destruction);
+ CHECK_NE(destruction_, 0xAAAAAAAA);
+ destruction_ = 0xAAAAAAAA;
+
CHECK_EQ(!!side_data_size_, !!side_data_);
data_.reset();
side_data_.reset();
diff --git a/chromium/media/base/decoder_buffer.h b/chromium/media/base/decoder_buffer.h
index cb727c259e1..32392820e62 100644
--- a/chromium/media/base/decoder_buffer.h
+++ b/chromium/media/base/decoder_buffer.h
@@ -230,6 +230,10 @@ class MEDIA_EXPORT DecoderBuffer
// Whether the frame was marked as a keyframe in the container.
bool is_key_frame_;
+ // Check for double destruction. This field is not to be used.
+ // crbug.com/794740.
+ uint32_t destruction_ = 0x55555555;
+
// Constructor helper method for memory allocations.
void Initialize();
diff --git a/chromium/media/base/decrypt_config.cc b/chromium/media/base/decrypt_config.cc
index eda225a1f7f..385e1597c3e 100644
--- a/chromium/media/base/decrypt_config.cc
+++ b/chromium/media/base/decrypt_config.cc
@@ -76,6 +76,13 @@ std::unique_ptr<DecryptConfig> DecryptConfig::Clone() const {
return base::WrapUnique(new DecryptConfig(*this));
}
+std::unique_ptr<DecryptConfig> DecryptConfig::CopyNewSubsamplesIV(
+ const std::vector<SubsampleEntry>& subsamples,
+ const std::string& iv) {
+ return std::make_unique<DecryptConfig>(encryption_mode_, key_id_, iv,
+ subsamples, encryption_pattern_);
+}
+
bool DecryptConfig::HasPattern() const {
return encryption_pattern_.has_value();
}
diff --git a/chromium/media/base/decrypt_config.h b/chromium/media/base/decrypt_config.h
index 111a8037cdf..a3525b7ebc3 100644
--- a/chromium/media/base/decrypt_config.h
+++ b/chromium/media/base/decrypt_config.h
@@ -74,6 +74,12 @@ class MEDIA_EXPORT DecryptConfig {
std::unique_ptr<DecryptConfig> Clone() const;
+ // Makes a new config which has the same configuration options (mode, pattern)
+ // while providing a new vector of subsamples and initialization vector.
+ std::unique_ptr<DecryptConfig> CopyNewSubsamplesIV(
+ const std::vector<SubsampleEntry>& subsamples,
+ const std::string& iv);
+
// Returns whether this config has EncryptionPattern set or not.
bool HasPattern() const;
diff --git a/chromium/media/base/demuxer_memory_limit_android.cc b/chromium/media/base/demuxer_memory_limit_android.cc
index 799e1b76eaa..b2f35ecd0d8 100644
--- a/chromium/media/base/demuxer_memory_limit_android.cc
+++ b/chromium/media/base/demuxer_memory_limit_android.cc
@@ -5,7 +5,7 @@
#include "media/base/demuxer_memory_limit.h"
#include "base/android/build_info.h"
-#include "base/sys_info.h"
+#include "base/system/sys_info.h"
namespace media {
diff --git a/chromium/media/base/eme_constants.h b/chromium/media/base/eme_constants.h
index 0da02beba1b..8fadd75933f 100644
--- a/chromium/media/base/eme_constants.h
+++ b/chromium/media/base/eme_constants.h
@@ -16,18 +16,21 @@ namespace media {
// http://w3c.github.io/encrypted-media/initdata-format-registry.html#registry
enum class EmeInitDataType { UNKNOWN, WEBM, CENC, KEYIDS, MAX = KEYIDS };
-// Defines bitmask values that specify codecs used in Encrypted Media Extension
-// (EME). Each value represents a codec within a specific container.
+// Defines bitmask values that specify codecs used in Encrypted Media Extensions
+// (EME). Generally codec profiles are not specified and it is assumed that the
+// profile support for encrypted playback is the same as for clear playback.
+// The only exception is VP9 where we have older CDMs only supporting profile 0,
+// while new CDMs could support profile 2. Profile 1 and 3 are not supported by
+// EME, see https://crbug.com/898298.
enum EmeCodec : uint32_t {
EME_CODEC_NONE = 0,
EME_CODEC_OPUS = 1 << 0,
EME_CODEC_VORBIS = 1 << 1,
EME_CODEC_VP8 = 1 << 2,
- EME_CODEC_LEGACY_VP9 = 1 << 3,
+ EME_CODEC_VP9_PROFILE0 = 1 << 3,
EME_CODEC_AAC = 1 << 4,
- // AVC1 is shared by MP4 and MP2T.
EME_CODEC_AVC1 = 1 << 5,
- EME_CODEC_VP9 = 1 << 6, // New multi-part VP9 for WebM and MP4.
+ EME_CODEC_VP9_PROFILE2 = 1 << 6, // VP9 profiles 2
EME_CODEC_HEVC = 1 << 7,
EME_CODEC_DOLBY_VISION_AVC = 1 << 8,
EME_CODEC_DOLBY_VISION_HEVC = 1 << 9,
@@ -35,6 +38,7 @@ enum EmeCodec : uint32_t {
EME_CODEC_EAC3 = 1 << 11,
EME_CODEC_MPEG_H_AUDIO = 1 << 12,
EME_CODEC_FLAC = 1 << 13,
+ EME_CODEC_AV1 = 1 << 14,
};
// *_ALL values should only be used for masking, do not use them to specify
@@ -42,6 +46,8 @@ enum EmeCodec : uint32_t {
using SupportedCodecs = uint32_t;
+namespace {
+
constexpr SupportedCodecs GetMp4AudioCodecs() {
SupportedCodecs codecs = EME_CODEC_FLAC;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -57,8 +63,10 @@ constexpr SupportedCodecs GetMp4AudioCodecs() {
}
constexpr SupportedCodecs GetMp4VideoCodecs() {
- // VP9 is supported in MP4, but legacy VP9 is not.
- SupportedCodecs codecs = EME_CODEC_VP9;
+ // VP9 codec can be in MP4. Legacy VP9 codec strings ("vp9" and "vp9.0") can
+ // not be in "video/mp4" mime type, but that is enforced by media::MimeUtil.
+ SupportedCodecs codecs = EME_CODEC_VP9_PROFILE0 | EME_CODEC_VP9_PROFILE2;
+ codecs |= EME_CODEC_AV1;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
codecs |= EME_CODEC_AVC1;
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
@@ -74,12 +82,14 @@ constexpr SupportedCodecs GetMp4VideoCodecs() {
return codecs;
}
+} // namespace
+
constexpr SupportedCodecs EME_CODEC_WEBM_AUDIO_ALL =
EME_CODEC_OPUS | EME_CODEC_VORBIS;
-// Both VP9 and legacy VP9 are supported in WebM.
constexpr SupportedCodecs EME_CODEC_WEBM_VIDEO_ALL =
- EME_CODEC_VP8 | EME_CODEC_LEGACY_VP9 | EME_CODEC_VP9;
+ EME_CODEC_VP8 | EME_CODEC_VP9_PROFILE0 | EME_CODEC_VP9_PROFILE2 |
+ EME_CODEC_AV1;
constexpr SupportedCodecs EME_CODEC_WEBM_ALL =
EME_CODEC_WEBM_AUDIO_ALL | EME_CODEC_WEBM_VIDEO_ALL;
diff --git a/chromium/media/base/fake_audio_renderer_sink.cc b/chromium/media/base/fake_audio_renderer_sink.cc
index e80a0f817d9..924691ead1b 100644
--- a/chromium/media/base/fake_audio_renderer_sink.cc
+++ b/chromium/media/base/fake_audio_renderer_sink.cc
@@ -7,6 +7,7 @@
#include "base/bind.h"
#include "base/location.h"
#include "base/logging.h"
+#include "base/threading/sequenced_task_runner_handle.h"
namespace media {
@@ -69,6 +70,12 @@ OutputDeviceInfo FakeAudioRendererSink::GetOutputDeviceInfo() {
return output_device_info_;
}
+void FakeAudioRendererSink::GetOutputDeviceInfoAsync(
+ OutputDeviceInfoCB info_cb) {
+ base::SequencedTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE, base::BindOnce(std::move(info_cb), output_device_info_));
+}
+
bool FakeAudioRendererSink::IsOptimizedForHardwareParameters() {
return is_optimized_for_hw_params_;
}
diff --git a/chromium/media/base/fake_audio_renderer_sink.h b/chromium/media/base/fake_audio_renderer_sink.h
index 7e87a960ee2..de599d2652b 100644
--- a/chromium/media/base/fake_audio_renderer_sink.h
+++ b/chromium/media/base/fake_audio_renderer_sink.h
@@ -39,6 +39,7 @@ class FakeAudioRendererSink : public AudioRendererSink {
void Play() override;
bool SetVolume(double volume) override;
OutputDeviceInfo GetOutputDeviceInfo() override;
+ void GetOutputDeviceInfoAsync(OutputDeviceInfoCB info_cb) override;
bool IsOptimizedForHardwareParameters() override;
bool CurrentThreadIsRenderingThread() override;
diff --git a/chromium/media/base/fake_audio_worker.cc b/chromium/media/base/fake_audio_worker.cc
index 2d274ede27c..09b4ea4319a 100644
--- a/chromium/media/base/fake_audio_worker.cc
+++ b/chromium/media/base/fake_audio_worker.cc
@@ -12,6 +12,7 @@
#include "base/macros.h"
#include "base/single_thread_task_runner.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "media/base/audio_parameters.h"
@@ -47,7 +48,7 @@ class FakeAudioWorker::Worker
const base::TimeDelta buffer_duration_;
base::Lock worker_cb_lock_; // Held while mutating or running |worker_cb_|.
- base::Closure worker_cb_;
+ base::Closure worker_cb_ GUARDED_BY(worker_cb_lock_);
base::TimeTicks next_read_time_;
// Used to cancel any delayed tasks still inside the worker loop's queue.
diff --git a/chromium/media/base/fake_demuxer_stream.cc b/chromium/media/base/fake_demuxer_stream.cc
index a327a38ac3f..0345ce1fcab 100644
--- a/chromium/media/base/fake_demuxer_stream.cc
+++ b/chromium/media/base/fake_demuxer_stream.cc
@@ -158,7 +158,7 @@ void FakeDemuxerStream::UpdateVideoDecoderConfig() {
const gfx::Rect kVisibleRect(kStartWidth, kStartHeight);
video_decoder_config_.Initialize(
kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN, PIXEL_FORMAT_I420,
- COLOR_SPACE_UNSPECIFIED, VIDEO_ROTATION_0, next_coded_size_, kVisibleRect,
+ VideoColorSpace(), VIDEO_ROTATION_0, next_coded_size_, kVisibleRect,
next_coded_size_, EmptyExtraData(),
is_encrypted_ ? AesCtrEncryptionScheme() : Unencrypted());
next_coded_size_.Enlarge(kWidthDelta, kHeightDelta);
diff --git a/chromium/media/base/fallback_video_decoder.cc b/chromium/media/base/fallback_video_decoder.cc
index 50568e53141..6d73bce38b1 100644
--- a/chromium/media/base/fallback_video_decoder.cc
+++ b/chromium/media/base/fallback_video_decoder.cc
@@ -5,6 +5,8 @@
#include <utility>
#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/threading/sequenced_task_runner_handle.h"
#include "media/base/decoder_buffer.h"
#include "media/base/fallback_video_decoder.h"
#include "media/base/video_decoder_config.h"
@@ -58,7 +60,13 @@ void FallbackVideoDecoder::FallbackInitialize(
}
did_fallback_ = true;
- preferred_decoder_.reset();
+ // Post destruction of |preferred_decoder_| so that we don't destroy the
+ // object during the callback. DeleteSoon doesn't handle custom deleters, so
+ // we post a do-nothing task instead.
+ base::SequencedTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::BindOnce(base::DoNothing::Once<std::unique_ptr<VideoDecoder>>(),
+ std::move(preferred_decoder_)));
selected_decoder_ = fallback_decoder_.get();
fallback_decoder_->Initialize(config, low_delay, cdm_context, init_cb,
output_cb, waiting_for_decryption_key_cb);
diff --git a/chromium/media/base/fallback_video_decoder_unittest.cc b/chromium/media/base/fallback_video_decoder_unittest.cc
index ca4bcbce478..0c8a231b8c5 100644
--- a/chromium/media/base/fallback_video_decoder_unittest.cc
+++ b/chromium/media/base/fallback_video_decoder_unittest.cc
@@ -7,6 +7,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/run_loop.h"
+#include "base/test/scoped_task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/fallback_video_decoder.h"
#include "media/base/gmock_callback_support.h"
@@ -70,6 +71,8 @@ class FallbackVideoDecoderUnittest : public ::testing::TestWithParam<bool> {
bool PreferredShouldSucceed() { return GetParam(); }
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
+
StrictMock<MockVideoDecoder>* backup_decoder_;
StrictMock<MockVideoDecoder>* preferred_decoder_;
VideoDecoder* fallback_decoder_;
diff --git a/chromium/media/base/ipc/media_param_traits.cc b/chromium/media/base/ipc/media_param_traits.cc
index 9967314bd6b..64db44e75c1 100644
--- a/chromium/media/base/ipc/media_param_traits.cc
+++ b/chromium/media/base/ipc/media_param_traits.cc
@@ -31,6 +31,7 @@ void ParamTraits<AudioParameters>::Write(base::Pickle* m,
WriteParam(m, p.effects());
WriteParam(m, p.mic_positions());
WriteParam(m, p.latency_tag());
+ WriteParam(m, p.hardware_capabilities());
}
bool ParamTraits<AudioParameters>::Read(const base::Pickle* m,
@@ -41,24 +42,32 @@ bool ParamTraits<AudioParameters>::Read(const base::Pickle* m,
int sample_rate, frames_per_buffer, channels, effects;
std::vector<media::Point> mic_positions;
AudioLatency::LatencyType latency_tag;
+ base::Optional<media::AudioParameters::HardwareCapabilities>
+ hardware_capabilities;
if (!ReadParam(m, iter, &format) || !ReadParam(m, iter, &channel_layout) ||
!ReadParam(m, iter, &sample_rate) ||
!ReadParam(m, iter, &frames_per_buffer) ||
!ReadParam(m, iter, &channels) || !ReadParam(m, iter, &effects) ||
!ReadParam(m, iter, &mic_positions) ||
- !ReadParam(m, iter, &latency_tag)) {
+ !ReadParam(m, iter, &latency_tag) ||
+ !ReadParam(m, iter, &hardware_capabilities)) {
return false;
}
- AudioParameters params(format, channel_layout, sample_rate,
- frames_per_buffer);
- params.set_channels_for_discrete(channels);
- params.set_effects(effects);
- params.set_mic_positions(mic_positions);
- params.set_latency_tag(latency_tag);
+ if (hardware_capabilities) {
+ *r = AudioParameters(format, channel_layout, sample_rate, frames_per_buffer,
+ *hardware_capabilities);
+ } else {
+ *r =
+ AudioParameters(format, channel_layout, sample_rate, frames_per_buffer);
+ }
+
+ r->set_channels_for_discrete(channels);
+ r->set_effects(effects);
+ r->set_mic_positions(mic_positions);
+ r->set_latency_tag(latency_tag);
- *r = params;
return r->IsValid();
}
@@ -67,6 +76,33 @@ void ParamTraits<AudioParameters>::Log(const AudioParameters& p,
l->append(base::StringPrintf("<AudioParameters>"));
}
+void ParamTraits<AudioParameters::HardwareCapabilities>::Write(
+ base::Pickle* m,
+ const param_type& p) {
+ WriteParam(m, p.min_frames_per_buffer);
+ WriteParam(m, p.max_frames_per_buffer);
+}
+
+bool ParamTraits<AudioParameters::HardwareCapabilities>::Read(
+ const base::Pickle* m,
+ base::PickleIterator* iter,
+ param_type* r) {
+ int max_frames_per_buffer, min_frames_per_buffer;
+ if (!ReadParam(m, iter, &min_frames_per_buffer) ||
+ !ReadParam(m, iter, &max_frames_per_buffer)) {
+ return false;
+ }
+ r->min_frames_per_buffer = min_frames_per_buffer;
+ r->max_frames_per_buffer = max_frames_per_buffer;
+ return true;
+}
+
+void ParamTraits<AudioParameters::HardwareCapabilities>::Log(
+ const param_type& p,
+ std::string* l) {
+ l->append(base::StringPrintf("<AudioParameters::HardwareCapabilities>"));
+}
+
template <>
struct ParamTraits<media::EncryptionPattern> {
typedef media::EncryptionPattern param_type;
diff --git a/chromium/media/base/ipc/media_param_traits.h b/chromium/media/base/ipc/media_param_traits.h
index 73275423552..b4205f5d22c 100644
--- a/chromium/media/base/ipc/media_param_traits.h
+++ b/chromium/media/base/ipc/media_param_traits.h
@@ -27,6 +27,16 @@ struct ParamTraits<media::AudioParameters> {
};
template <>
+struct ParamTraits<media::AudioParameters::HardwareCapabilities> {
+ typedef media::AudioParameters::HardwareCapabilities param_type;
+ static void Write(base::Pickle* m, const param_type& p);
+ static bool Read(const base::Pickle* m,
+ base::PickleIterator* iter,
+ param_type* r);
+ static void Log(const param_type& p, std::string* l);
+};
+
+template <>
struct ParamTraits<media::EncryptionScheme> {
typedef media::EncryptionScheme param_type;
static void Write(base::Pickle* m, const param_type& p);
diff --git a/chromium/media/base/ipc/media_param_traits_macros.h b/chromium/media/base/ipc/media_param_traits_macros.h
index 3507072c46d..26115f13b9d 100644
--- a/chromium/media/base/ipc/media_param_traits_macros.h
+++ b/chromium/media/base/ipc/media_param_traits_macros.h
@@ -5,6 +5,7 @@
#ifndef MEDIA_BASE_IPC_MEDIA_PARAM_TRAITS_MACROS_H_
#define MEDIA_BASE_IPC_MEDIA_PARAM_TRAITS_MACROS_H_
+#include "build/build_config.h"
#include "ipc/ipc_message_macros.h"
#include "media/base/audio_codecs.h"
#include "media/base/audio_parameters.h"
@@ -39,6 +40,10 @@
#include "media/media_buildflags.h"
#include "ui/gfx/ipc/color/gfx_param_traits_macros.h"
+#if defined(OS_ANDROID)
+#include "media/base/android/media_drm_key_type.h"
+#endif // defined(OS_ANDROID)
+
// Enum traits.
IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodec, media::AudioCodec::kAudioCodecMax)
@@ -78,8 +83,6 @@ IPC_ENUM_TRAITS_MAX_VALUE(media::CdmSessionType,
IPC_ENUM_TRAITS_MAX_VALUE(media::ChannelLayout, media::CHANNEL_LAYOUT_MAX)
-IPC_ENUM_TRAITS_MAX_VALUE(media::ColorSpace, media::COLOR_SPACE_MAX)
-
IPC_ENUM_TRAITS_MAX_VALUE(media::DecodeStatus,
media::DecodeStatus::DECODE_STATUS_MAX)
@@ -133,6 +136,12 @@ IPC_ENUM_TRAITS_MAX_VALUE(media::VideoRotation, media::VIDEO_ROTATION_MAX)
IPC_ENUM_TRAITS_MAX_VALUE(media::container_names::MediaContainerName,
media::container_names::CONTAINER_MAX);
+#if defined(OS_ANDROID)
+IPC_ENUM_TRAITS_MIN_MAX_VALUE(media::MediaDrmKeyType,
+ media::MediaDrmKeyType::MIN,
+ media::MediaDrmKeyType::MAX);
+#endif // defined(OS_ANDROID)
+
IPC_ENUM_TRAITS_VALIDATE(
media::VideoColorSpace::PrimaryID,
static_cast<int>(value) ==
diff --git a/chromium/media/base/key_systems.cc b/chromium/media/base/key_systems.cc
index b1657e57767..46cac7e4ff2 100644
--- a/chromium/media/base/key_systems.cc
+++ b/chromium/media/base/key_systems.cc
@@ -20,6 +20,7 @@
#include "media/base/media.h"
#include "media/base/media_client.h"
#include "media/base/media_switches.h"
+#include "media/base/mime_util.h"
#include "media/media_buildflags.h"
#include "third_party/widevine/cdm/widevine_cdm_common.h"
@@ -51,46 +52,68 @@ static const MimeTypeToCodecs kMimeTypeToCodecsMap[] = {
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
}; // namespace media
-struct NameToCodec {
- const char* name;
- EmeCodec codec;
-};
+EmeCodec ToAudioEmeCodec(AudioCodec codec) {
+ switch (codec) {
+ case kCodecAAC:
+ return EME_CODEC_AAC;
+ case kCodecVorbis:
+ return EME_CODEC_VORBIS;
+ case kCodecFLAC:
+ return EME_CODEC_FLAC;
+ case kCodecOpus:
+ return EME_CODEC_OPUS;
+ case kCodecEAC3:
+ return EME_CODEC_EAC3;
+ case kCodecAC3:
+ return EME_CODEC_AC3;
+ case kCodecMpegHAudio:
+ return EME_CODEC_MPEG_H_AUDIO;
+ default:
+ DVLOG(1) << "Unsupported AudioCodec " << codec;
+ return EME_CODEC_NONE;
+ }
+}
-// Mapping between codec names and enum values.
-static const NameToCodec kCodecMap[] = {
- {"opus", EME_CODEC_OPUS}, // Opus.
- {"vorbis", EME_CODEC_VORBIS}, // Vorbis.
- {"vp8", EME_CODEC_VP8}, // VP8.
- {"vp8.0", EME_CODEC_VP8}, // VP8.
- {"vp9", EME_CODEC_LEGACY_VP9}, // VP9.
- {"vp9.0", EME_CODEC_LEGACY_VP9}, // VP9.
- {"vp09", EME_CODEC_VP9}, // New multi-part VP9 for WebM and MP4.
- {"flac", EME_CODEC_FLAC}, // FLAC.
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- {"mp4a", EME_CODEC_AAC}, // AAC.
-#if BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
- {"ac-3", EME_CODEC_AC3}, // AC3.
- {"ec-3", EME_CODEC_EAC3}, // EAC3.
-#endif
-#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
- {"mhm1", EME_CODEC_MPEG_H_AUDIO}, // MPEG-H Audio.
-#endif
- {"avc1", EME_CODEC_AVC1}, // AVC1 for MP4 and MP2T
- {"avc3", EME_CODEC_AVC1}, // AVC3 for MP4 and MP2T
-#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
- {"hev1", EME_CODEC_HEVC}, // HEV1.
- {"hvc1", EME_CODEC_HEVC}, // HVC1.
-#endif
-#if BUILDFLAG(ENABLE_DOLBY_VISION_DEMUXING)
- {"dva1", EME_CODEC_DOLBY_VISION_AVC}, // DolbyVision AVC
- {"dvav", EME_CODEC_DOLBY_VISION_AVC}, // DolbyVision AVC
-#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
- {"dvh1", EME_CODEC_DOLBY_VISION_HEVC}, // DolbyVision HEVC
- {"dvhe", EME_CODEC_DOLBY_VISION_HEVC}, // DolbyVision HEVC
-#endif
-#endif
-#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
-};
+EmeCodec ToVideoEmeCodec(VideoCodec codec, VideoCodecProfile profile) {
+ switch (codec) {
+ case kCodecH264:
+ return EME_CODEC_AVC1;
+ case kCodecVP8:
+ return EME_CODEC_VP8;
+ case kCodecVP9:
+ // ParseVideoCodecString() returns VIDEO_CODEC_PROFILE_UNKNOWN for "vp9"
+ // and "vp9.0". Since these codecs are essentially the same as profile 0,
+ // return EME_CODEC_VP9_PROFILE0.
+ if (profile == VP9PROFILE_PROFILE0 ||
+ profile == VIDEO_CODEC_PROFILE_UNKNOWN) {
+ return EME_CODEC_VP9_PROFILE0;
+ } else if (profile == VP9PROFILE_PROFILE2) {
+ return EME_CODEC_VP9_PROFILE2;
+ } else {
+ // Profile 1 and 3 not supported by EME. See https://crbug.com/898298.
+ return EME_CODEC_NONE;
+ }
+ case kCodecHEVC:
+ return EME_CODEC_HEVC;
+ case kCodecDolbyVision:
+ // Only profiles 0, 4, 5 and 7 are valid. Profile 0 is encoded based on
+ // AVC while profile 4, 5 and 7 are based on HEVC.
+ if (profile == DOLBYVISION_PROFILE0) {
+ return EME_CODEC_DOLBY_VISION_AVC;
+ } else if (profile == DOLBYVISION_PROFILE4 ||
+ profile == DOLBYVISION_PROFILE5 ||
+ profile == DOLBYVISION_PROFILE7) {
+ return EME_CODEC_DOLBY_VISION_HEVC;
+ } else {
+ return EME_CODEC_NONE;
+ }
+ case kCodecAV1:
+ return EME_CODEC_AV1;
+ default:
+ DVLOG(1) << "Unsupported VideoCodec " << codec;
+ return EME_CODEC_NONE;
+ }
+}
class ClearKeyProperties : public KeySystemProperties {
public:
@@ -206,10 +229,11 @@ class KeySystemsImpl : public KeySystems {
std::string GetKeySystemNameForUMA(const std::string& key_system) const;
// These two functions are for testing purpose only.
- void AddCodecMask(EmeMediaType media_type,
- const std::string& codec,
- uint32_t mask);
- void AddMimeTypeCodecMask(const std::string& mime_type, uint32_t mask);
+ void AddCodecMaskForTesting(EmeMediaType media_type,
+ const std::string& codec,
+ uint32_t mask);
+ void AddMimeTypeCodecMaskForTesting(const std::string& mime_type,
+ uint32_t mask);
// Implementation of KeySystems interface.
bool IsSupportedKeySystem(const std::string& key_system) const override;
@@ -272,14 +296,21 @@ class KeySystemsImpl : public KeySystems {
// Potentially pass EmeMediaType and a container enum.
SupportedCodecs GetCodecMaskForMimeType(
const std::string& container_mime_type) const;
- EmeCodec GetCodecForString(const std::string& codec) const;
+
+ // Converts a full |codec_string| (e.g. vp09.02.10.10) to an EmeCodec. Returns
+ // EME_CODEC_NONE is the |codec_string| is invalid or not supported by EME.
+ EmeCodec GetEmeCodecForString(EmeMediaType media_type,
+ const std::string& container_mime_type,
+ const std::string& codec_string) const;
// Map from key system string to KeySystemProperties instance.
KeySystemPropertiesMap key_system_properties_map_;
// This member should only be modified by RegisterMimeType().
MimeTypeToCodecsMap mime_type_to_codecs_map_;
- CodecMap codec_map_;
+
+ // For unit test only.
+ CodecMap codec_map_for_testing_;
SupportedCodecs audio_codec_mask_;
SupportedCodecs video_codec_mask_;
@@ -301,11 +332,6 @@ KeySystemsImpl* KeySystemsImpl::GetInstance() {
KeySystemsImpl::KeySystemsImpl()
: audio_codec_mask_(EME_CODEC_AUDIO_ALL),
video_codec_mask_(EME_CODEC_VIDEO_ALL) {
- for (size_t i = 0; i < arraysize(kCodecMap); ++i) {
- const std::string& name = kCodecMap[i].name;
- DCHECK(!codec_map_.count(name));
- codec_map_[name] = kCodecMap[i].codec;
- }
for (size_t i = 0; i < arraysize(kMimeTypeToCodecsMap); ++i) {
RegisterMimeType(kMimeTypeToCodecsMap[i].mime_type,
kMimeTypeToCodecsMap[i].codecs);
@@ -327,11 +353,44 @@ SupportedCodecs KeySystemsImpl::GetCodecMaskForMimeType(
return iter->second;
}
-EmeCodec KeySystemsImpl::GetCodecForString(const std::string& codec) const {
- auto iter = codec_map_.find(codec);
- if (iter != codec_map_.end())
+EmeCodec KeySystemsImpl::GetEmeCodecForString(
+ EmeMediaType media_type,
+ const std::string& container_mime_type,
+ const std::string& codec_string) const {
+ // This is not checked because MimeUtil declares "vp9" and "vp9.0" as
+ // ambiguous, but they have always been supported by EME.
+ // TODO(xhwang): Find out whether we should fix MimeUtil about these cases.
+ bool is_ambiguous = true;
+
+ // For testing only.
+ auto iter = codec_map_for_testing_.find(codec_string);
+ if (iter != codec_map_for_testing_.end())
return iter->second;
- return EME_CODEC_NONE;
+
+ if (media_type == EmeMediaType::AUDIO) {
+ AudioCodec audio_codec = kUnknownAudioCodec;
+ ParseAudioCodecString(container_mime_type, codec_string, &is_ambiguous,
+ &audio_codec);
+ DVLOG(3) << "Audio codec = " << audio_codec;
+ return ToAudioEmeCodec(audio_codec);
+ }
+
+ DCHECK_EQ(media_type, EmeMediaType::VIDEO);
+
+ // In general EmeCodec doesn't care about codec profiles and assumes the same
+ // level of profile support as Chromium, which is checked in
+ // KeySystemConfigSelector::IsSupportedContentType(). However, there are a few
+ // exceptions where we need to know the profile. For example, for VP9, there
+ // are older CDMs only supporting profile 0, hence EmeCodec differentiate
+ // between VP9 profile 0 and higher profiles.
+ VideoCodec video_codec = kUnknownVideoCodec;
+ VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
+ uint8_t level = 0;
+ VideoColorSpace color_space;
+ ParseVideoCodecString(container_mime_type, codec_string, &is_ambiguous,
+ &video_codec, &profile, &level, &color_space);
+ DVLOG(3) << "Video codec = " << video_codec << ", profile = " << profile;
+ return ToVideoEmeCodec(video_codec, profile);
}
void KeySystemsImpl::UpdateIfNeeded() {
@@ -528,12 +587,12 @@ std::string KeySystemsImpl::GetKeySystemNameForUMA(
return kUnknownKeySystemNameForUMA;
}
-void KeySystemsImpl::AddCodecMask(EmeMediaType media_type,
- const std::string& codec,
- uint32_t mask) {
+void KeySystemsImpl::AddCodecMaskForTesting(EmeMediaType media_type,
+ const std::string& codec,
+ uint32_t mask) {
DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(!codec_map_.count(codec));
- codec_map_[codec] = static_cast<EmeCodec>(mask);
+ DCHECK(!codec_map_for_testing_.count(codec));
+ codec_map_for_testing_[codec] = static_cast<EmeCodec>(mask);
if (media_type == EmeMediaType::AUDIO) {
audio_codec_mask_ |= mask;
} else {
@@ -541,8 +600,9 @@ void KeySystemsImpl::AddCodecMask(EmeMediaType media_type,
}
}
-void KeySystemsImpl::AddMimeTypeCodecMask(const std::string& mime_type,
- uint32_t codecs_mask) {
+void KeySystemsImpl::AddMimeTypeCodecMaskForTesting(
+ const std::string& mime_type,
+ uint32_t codecs_mask) {
RegisterMimeType(mime_type, static_cast<EmeCodec>(codecs_mask));
}
@@ -606,7 +666,7 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
SupportedCodecs mime_type_codec_mask =
GetCodecMaskForMimeType(container_mime_type);
if ((key_system_codec_mask & mime_type_codec_mask) == 0) {
- DVLOG(2) << " Container " << container_mime_type << " not supported by "
+ DVLOG(2) << "Container " << container_mime_type << " not supported by "
<< key_system;
return EmeConfigRule::NOT_SUPPORTED;
}
@@ -619,9 +679,19 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
// no | any | NOT_SUPPORTED
EmeConfigRule support = EmeConfigRule::SUPPORTED;
for (size_t i = 0; i < codecs.size(); i++) {
- SupportedCodecs codec = GetCodecForString(codecs[i]);
- if ((codec & key_system_codec_mask & mime_type_codec_mask) == 0) {
- DVLOG(2) << " Container/codec pair (" << container_mime_type << " / "
+ EmeCodec codec =
+ GetEmeCodecForString(media_type, container_mime_type, codecs[i]);
+ if (codec == EME_CODEC_NONE) {
+ DVLOG(2) << "Unsupported codec string \"" << codecs[i] << "\"";
+ return EmeConfigRule::NOT_SUPPORTED;
+ }
+
+ // Currently all EmeCodecs only have one bit set. In case there could be
+ // codecs with multiple bits set, e.g. to cover multiple profiles, we check
+ // (codec & mask) == codec instead of (codec & mask) != 0 to make sure all
+ // bits are set. Same below.
+ if ((codec & key_system_codec_mask & mime_type_codec_mask) != codec) {
+ DVLOG(2) << "Container/codec pair (" << container_mime_type << " / "
<< codecs[i] << ") not supported by " << key_system;
return EmeConfigRule::NOT_SUPPORTED;
}
@@ -634,7 +704,7 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
// to consider codecs that are only supported in hardware-secure mode. We
// could do so, and make use of HW_SECURE_CODECS_REQUIRED, if it turns out
// that hardware-secure-only codecs actually exist and are useful.
- if ((codec & key_system_hw_secure_codec_mask) == 0)
+ if ((codec & key_system_hw_secure_codec_mask) != codec)
support = EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED;
}
@@ -730,15 +800,17 @@ bool CanUseAesDecryptor(const std::string& key_system) {
// "media" where "UNIT_TEST" is not defined. So we need to specify
// "MEDIA_EXPORT" here again so that they are visible to tests.
-MEDIA_EXPORT void AddCodecMask(EmeMediaType media_type,
- const std::string& codec,
- uint32_t mask) {
- KeySystemsImpl::GetInstance()->AddCodecMask(media_type, codec, mask);
+MEDIA_EXPORT void AddCodecMaskForTesting(EmeMediaType media_type,
+ const std::string& codec,
+ uint32_t mask) {
+ KeySystemsImpl::GetInstance()->AddCodecMaskForTesting(media_type, codec,
+ mask);
}
-MEDIA_EXPORT void AddMimeTypeCodecMask(const std::string& mime_type,
- uint32_t mask) {
- KeySystemsImpl::GetInstance()->AddMimeTypeCodecMask(mime_type, mask);
+MEDIA_EXPORT void AddMimeTypeCodecMaskForTesting(const std::string& mime_type,
+ uint32_t mask) {
+ KeySystemsImpl::GetInstance()->AddMimeTypeCodecMaskForTesting(mime_type,
+ mask);
}
} // namespace media
diff --git a/chromium/media/base/key_systems.h b/chromium/media/base/key_systems.h
index 8dd05fbe165..d9e0205e384 100644
--- a/chromium/media/base/key_systems.h
+++ b/chromium/media/base/key_systems.h
@@ -44,7 +44,7 @@ class MEDIA_EXPORT KeySystems {
const std::string& key_system,
EncryptionMode encryption_scheme) const = 0;
- // Returns the configuration rule for supporting a container and list of
+ // Returns the configuration rule for supporting a container and a list of
// codecs.
virtual EmeConfigRule GetContentTypeConfigRule(
const std::string& key_system,
@@ -94,13 +94,13 @@ MEDIA_EXPORT bool CanUseAesDecryptor(const std::string& key_system);
#if defined(UNIT_TEST)
// Helper functions to add container/codec types for testing purposes.
-// Call AddCodecMask() first to ensure the mask values passed to
-// AddMimeTypeCodecMask() already exist.
-MEDIA_EXPORT void AddCodecMask(EmeMediaType media_type,
- const std::string& codec,
- uint32_t mask);
-MEDIA_EXPORT void AddMimeTypeCodecMask(const std::string& mime_type,
- uint32_t mask);
+// Call AddCodecMaskForTesting() first to ensure the mask values passed to
+// AddMimeTypeCodecMaskForTesting() already exist.
+MEDIA_EXPORT void AddCodecMaskForTesting(EmeMediaType media_type,
+ const std::string& codec,
+ uint32_t mask);
+MEDIA_EXPORT void AddMimeTypeCodecMaskForTesting(const std::string& mime_type,
+ uint32_t mask);
#endif // defined(UNIT_TEST)
} // namespace media
diff --git a/chromium/media/base/key_systems_unittest.cc b/chromium/media/base/key_systems_unittest.cc
index dc6f2ff72a4..cbfc657bf76 100644
--- a/chromium/media/base/key_systems_unittest.cc
+++ b/chromium/media/base/key_systems_unittest.cc
@@ -228,12 +228,12 @@ void AddContainerAndCodecMasksForTest() {
if (is_test_masks_added)
return;
- AddCodecMask(EmeMediaType::AUDIO, "fooaudio", TEST_CODEC_FOO_AUDIO);
- AddCodecMask(EmeMediaType::VIDEO, "foovideo", TEST_CODEC_FOO_VIDEO);
- AddCodecMask(EmeMediaType::VIDEO, "securefoovideo",
- TEST_CODEC_FOO_SECURE_VIDEO);
- AddMimeTypeCodecMask("audio/foo", TEST_CODEC_FOO_AUDIO_ALL);
- AddMimeTypeCodecMask("video/foo", TEST_CODEC_FOO_VIDEO_ALL);
+ AddCodecMaskForTesting(EmeMediaType::AUDIO, "fooaudio", TEST_CODEC_FOO_AUDIO);
+ AddCodecMaskForTesting(EmeMediaType::VIDEO, "foovideo", TEST_CODEC_FOO_VIDEO);
+ AddCodecMaskForTesting(EmeMediaType::VIDEO, "securefoovideo",
+ TEST_CODEC_FOO_SECURE_VIDEO);
+ AddMimeTypeCodecMaskForTesting("audio/foo", TEST_CODEC_FOO_AUDIO_ALL);
+ AddMimeTypeCodecMaskForTesting("video/foo", TEST_CODEC_FOO_VIDEO_ALL);
is_test_masks_added = true;
}
diff --git a/chromium/media/base/limits.h b/chromium/media/base/limits.h
index 5b2cdadebaa..2b3e81196cd 100644
--- a/chromium/media/base/limits.h
+++ b/chromium/media/base/limits.h
@@ -74,6 +74,10 @@ enum {
// Maximum buffer size supported by Web Audio.
kMaxWebAudioBufferSize = 8192,
+
+ // Bounds for the number of threads used for software video decoding.
+ kMinVideoDecodeThreads = 2,
+ kMaxVideoDecodeThreads = 32,
};
} // namespace limits
diff --git a/chromium/media/base/media_client.h b/chromium/media/base/media_client.h
index 0b1bd6b5c1c..da43b06b059 100644
--- a/chromium/media/base/media_client.h
+++ b/chromium/media/base/media_client.h
@@ -27,7 +27,7 @@ class MediaClient;
MEDIA_EXPORT void SetMediaClient(MediaClient* media_client);
// Media's embedder API should only be used by media.
-#if defined(MEDIA_IMPLEMENTATION) || defined(MEDIA_BLINK_IMPLEMENTATION)
+#if defined(IS_MEDIA_IMPL) || defined(MEDIA_BLINK_IMPLEMENTATION)
// Getter for the client. Returns NULL if no customized client is needed.
MEDIA_EXPORT MediaClient* GetMediaClient();
#endif
diff --git a/chromium/media/base/media_export.h b/chromium/media/base/media_export.h
index 48f6f45e59a..08091844ada 100644
--- a/chromium/media/base/media_export.h
+++ b/chromium/media/base/media_export.h
@@ -5,28 +5,8 @@
#ifndef MEDIA_BASE_MEDIA_EXPORT_H_
#define MEDIA_BASE_MEDIA_EXPORT_H_
-// Define MEDIA_EXPORT so that functionality implemented by the Media module
-// can be exported to consumers.
+#include "base/component_export.h"
-#if defined(COMPONENT_BUILD)
-#if defined(WIN32)
-
-#if defined(MEDIA_IMPLEMENTATION)
-#define MEDIA_EXPORT __declspec(dllexport)
-#else
-#define MEDIA_EXPORT __declspec(dllimport)
-#endif // defined(MEDIA_IMPLEMENTATION)
-
-#else // defined(WIN32)
-#if defined(MEDIA_IMPLEMENTATION)
-#define MEDIA_EXPORT __attribute__((visibility("default")))
-#else
-#define MEDIA_EXPORT
-#endif
-#endif
-
-#else // defined(COMPONENT_BUILD)
-#define MEDIA_EXPORT
-#endif
+#define MEDIA_EXPORT COMPONENT_EXPORT(MEDIA)
#endif // MEDIA_BASE_MEDIA_EXPORT_H_
diff --git a/chromium/media/base/media_log.cc b/chromium/media/base/media_log.cc
index 9d7da2e1ccd..c3990189754 100644
--- a/chromium/media/base/media_log.cc
+++ b/chromium/media/base/media_log.cc
@@ -192,7 +192,7 @@ MediaLog::~MediaLog() {
}
void MediaLog::AddEvent(std::unique_ptr<MediaLogEvent> event) {
- base::AutoLock auto_lock(lock());
+ base::AutoLock auto_lock(parent_log_record_->lock);
// Forward to the parent log's implementation.
if (parent_log_record_->media_log)
parent_log_record_->media_log->AddEventLocked(std::move(event));
@@ -201,7 +201,7 @@ void MediaLog::AddEvent(std::unique_ptr<MediaLogEvent> event) {
void MediaLog::AddEventLocked(std::unique_ptr<MediaLogEvent> event) {}
std::string MediaLog::GetErrorMessage() {
- base::AutoLock auto_lock(lock());
+ base::AutoLock auto_lock(parent_log_record_->lock);
// Forward to the parent log's implementation.
if (parent_log_record_->media_log)
return parent_log_record_->media_log->GetErrorMessageLocked();
@@ -214,7 +214,7 @@ std::string MediaLog::GetErrorMessageLocked() {
}
void MediaLog::RecordRapporWithSecurityOrigin(const std::string& metric) {
- base::AutoLock auto_lock(lock());
+ base::AutoLock auto_lock(parent_log_record_->lock);
// Forward to the parent log's implementation.
if (parent_log_record_->media_log)
parent_log_record_->media_log->RecordRapporWithSecurityOriginLocked(metric);
@@ -370,7 +370,7 @@ MediaLog::ParentLogRecord::ParentLogRecord(MediaLog* log) : media_log(log) {}
MediaLog::ParentLogRecord::~ParentLogRecord() = default;
void MediaLog::InvalidateLog() {
- base::AutoLock auto_lock(lock());
+ base::AutoLock auto_lock(parent_log_record_->lock);
// It's almost certainly unintentional to invalidate a parent log.
DCHECK(parent_log_record_->media_log == nullptr ||
parent_log_record_->media_log == this);
diff --git a/chromium/media/base/media_log.h b/chromium/media/base/media_log.h
index f59819f6455..3c529ed38d0 100644
--- a/chromium/media/base/media_log.h
+++ b/chromium/media/base/media_log.h
@@ -15,6 +15,7 @@
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
+#include "base/thread_annotations.h"
#include "media/base/buffering_state.h"
#include "media/base/media_export.h"
#include "media/base/media_log_event.h"
@@ -161,7 +162,7 @@ class MEDIA_EXPORT MediaLog {
base::Lock lock;
// Original media log, or null.
- MediaLog* media_log = nullptr;
+ MediaLog* media_log GUARDED_BY(lock) = nullptr;
protected:
friend class base::RefCountedThreadSafe<ParentLogRecord>;
@@ -174,10 +175,6 @@ class MEDIA_EXPORT MediaLog {
MediaLog(scoped_refptr<ParentLogRecord> parent_log_record);
private:
- // Return a lock that will be taken during InvalidateLog on the parent log,
- // and before calls to the *Locked methods.
- base::Lock& lock() { return parent_log_record_->lock; }
-
// The underlying media log.
scoped_refptr<ParentLogRecord> parent_log_record_;
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index 41bfa91724b..b0c5e587faf 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -13,7 +13,7 @@ namespace switches {
const char kAudioBufferSize[] = "audio-buffer-size";
// Set a timeout (in milliseconds) for the audio service to quit if there are no
-// client connections to it. If the value is zero the service never quits.
+// client connections to it. If the value is negative the service never quits.
const char kAudioServiceQuitTimeoutMs[] = "audio-service-quit-timeout-ms";
// Command line flag name to set the autoplay policy.
@@ -195,10 +195,6 @@ const char kUserGestureRequiredForCrossOriginPolicy[] =
namespace media {
-// Use new audio rendering mixer.
-const base::Feature kNewAudioRenderingMixingStrategy{
- "NewAudioRenderingMixingStrategy", base::FEATURE_DISABLED_BY_DEFAULT};
-
// Only used for disabling overlay fullscreen (aka SurfaceView) in Clank.
const base::Feature kOverlayFullscreenVideo{"overlay-fullscreen-video",
base::FEATURE_ENABLED_BY_DEFAULT};
@@ -228,6 +224,10 @@ const base::Feature kResumeBackgroundVideo {
#endif
};
+// Enable Media Capabilities with finch-parameters.
+const base::Feature kMediaCapabilitiesWithParameters{
+ "MediaCapabilitiesWithParameters", base::FEATURE_ENABLED_BY_DEFAULT};
+
// Display the Cast overlay button on the media controls.
const base::Feature kMediaCastOverlayButton{"MediaCastOverlayButton",
base::FEATURE_ENABLED_BY_DEFAULT};
@@ -241,10 +241,6 @@ const base::Feature kUseAndroidOverlay{"UseAndroidOverlay",
const base::Feature kUseAndroidOverlayAggressively{
"UseAndroidOverlayAggressively", base::FEATURE_ENABLED_BY_DEFAULT};
-// Enables playback of AV1 video files.
-const base::Feature kAv1Decoder{"Av1Decoder",
- base::FEATURE_ENABLED_BY_DEFAULT};
-
// Let video track be unselected when video is playing in the background.
const base::Feature kBackgroundSrcVideoTrackOptimization{
"BackgroundSrcVideoTrackOptimization", base::FEATURE_DISABLED_BY_DEFAULT};
@@ -259,14 +255,15 @@ const base::Feature kBackgroundVideoPauseOptimization{
const base::Feature kMemoryPressureBasedSourceBufferGC{
"MemoryPressureBasedSourceBufferGC", base::FEATURE_DISABLED_BY_DEFAULT};
-// Enable MojoVideoDecoder. On Android, we use this by default. Elsewhere,
-// it's experimental.
+// Enable MojoVideoDecoder, replacing GpuVideoDecoder.
const base::Feature kMojoVideoDecoder {
"MojoVideoDecoder",
-#if defined(OS_ANDROID)
- base::FEATURE_ENABLED_BY_DEFAULT
-#else
+#if defined(OS_CHROMEOS)
+ // TODO(posciak): Re-enable once the feature is verified on CrOS.
+ // https://crbug.com/902968.
base::FEATURE_DISABLED_BY_DEFAULT
+#else
+ base::FEATURE_ENABLED_BY_DEFAULT
#endif
};
@@ -341,7 +338,7 @@ const base::Feature kUseSurfaceLayerForVideoPIP{
// Enable VA-API hardware encode acceleration for VP8.
const base::Feature kVaapiVP8Encoder{"VaapiVP8Encoder",
- base::FEATURE_DISABLED_BY_DEFAULT};
+ base::FEATURE_ENABLED_BY_DEFAULT};
// Inform video blitter of video color space.
const base::Feature kVideoBlitColorAccuracy{"video-blit-color-accuracy",
@@ -360,6 +357,10 @@ const base::Feature kExternalClearKeyForTesting{
const base::Feature kHardwareSecureDecryption{
"HardwareSecureDecryption", base::FEATURE_DISABLED_BY_DEFAULT};
+// Enables handling of hardware media keys for controlling media.
+const base::Feature kHardwareMediaKeyHandling{
+ "HardwareMediaKeyHandling", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Limits number of media tags loading in parallel to 6. This speeds up
// preloading of any media that requires multiple requests to preload.
const base::Feature kLimitParallelMediaPreloading{
@@ -374,7 +375,7 @@ const base::Feature kLowDelayVideoRenderingOnLiveStream{
// autoplay policy will be hardcoded to be the legacy one on based on the
// platform
const base::Feature kAutoplayIgnoreWebAudio{"AutoplayIgnoreWebAudio",
- base::FEATURE_ENABLED_BY_DEFAULT};
+ base::FEATURE_DISABLED_BY_DEFAULT};
// Whether we should show a setting to disable autoplay policy.
const base::Feature kAutoplayDisableSettings{"AutoplayDisableSettings",
@@ -382,7 +383,7 @@ const base::Feature kAutoplayDisableSettings{"AutoplayDisableSettings",
// Whether we should allow autoplay whitelisting via sounds settings.
const base::Feature kAutoplayWhitelistSettings{
- "AutoplayWhitelistSettings", base::FEATURE_DISABLED_BY_DEFAULT};
+ "AutoplayWhitelistSettings", base::FEATURE_ENABLED_BY_DEFAULT};
#if defined(OS_ANDROID)
// Enable a gesture to make the media controls expaned into the display cutout.
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index 5bb7c850fd9..0a0655ea600 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -101,7 +101,6 @@ namespace media {
MEDIA_EXPORT extern const base::Feature kAutoplayIgnoreWebAudio;
MEDIA_EXPORT extern const base::Feature kAutoplayDisableSettings;
MEDIA_EXPORT extern const base::Feature kAutoplayWhitelistSettings;
-MEDIA_EXPORT extern const base::Feature kAv1Decoder;
MEDIA_EXPORT extern const base::Feature kBackgroundSrcVideoTrackOptimization;
MEDIA_EXPORT extern const base::Feature kBackgroundVideoPauseOptimization;
MEDIA_EXPORT extern const base::Feature kD3D11EncryptedMedia;
@@ -109,15 +108,16 @@ MEDIA_EXPORT extern const base::Feature kD3D11VP9Decoder;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoder;
MEDIA_EXPORT extern const base::Feature kExternalClearKeyForTesting;
MEDIA_EXPORT extern const base::Feature kFallbackAfterDecodeError;
+MEDIA_EXPORT extern const base::Feature kHardwareMediaKeyHandling;
MEDIA_EXPORT extern const base::Feature kHardwareSecureDecryption;
MEDIA_EXPORT extern const base::Feature kLimitParallelMediaPreloading;
MEDIA_EXPORT extern const base::Feature kLowDelayVideoRenderingOnLiveStream;
+MEDIA_EXPORT extern const base::Feature kMediaCapabilitiesWithParameters;
MEDIA_EXPORT extern const base::Feature kMediaCastOverlayButton;
MEDIA_EXPORT extern const base::Feature kMediaEngagementBypassAutoplayPolicies;
MEDIA_EXPORT extern const base::Feature kMemoryPressureBasedSourceBufferGC;
MEDIA_EXPORT extern const base::Feature kMojoVideoDecoder;
MEDIA_EXPORT extern const base::Feature kMseBufferByPts;
-MEDIA_EXPORT extern const base::Feature kNewAudioRenderingMixingStrategy;
MEDIA_EXPORT extern const base::Feature kNewEncodeCpuLoadEstimator;
MEDIA_EXPORT extern const base::Feature kNewRemotePlaybackPipeline;
MEDIA_EXPORT extern const base::Feature kOverflowIconsForMediaControls;
diff --git a/chromium/media/base/media_util.cc b/chromium/media/base/media_util.cc
index 2d2003cbe0e..501574d3301 100644
--- a/chromium/media/base/media_util.cc
+++ b/chromium/media/base/media_util.cc
@@ -4,10 +4,40 @@
#include "media/base/media_util.h"
+#include "base/metrics/histogram_macros.h"
#include "media/base/encryption_pattern.h"
namespace media {
+namespace {
+
+// Reported to UMA server. Do not renumber or reuse values.
+enum class MediaVideoHeight {
+ k360_OrLower,
+ k480,
+ k720,
+ k1080,
+ k1440,
+ k2160_OrHigher,
+ kMaxValue = k2160_OrHigher,
+};
+
+MediaVideoHeight GetMediaVideoHeight(int height) {
+ if (height <= 400)
+ return MediaVideoHeight::k360_OrLower;
+ if (height <= 600)
+ return MediaVideoHeight::k480;
+ if (height <= 900)
+ return MediaVideoHeight::k720;
+ if (height <= 1260)
+ return MediaVideoHeight::k1080;
+ if (height <= 1800)
+ return MediaVideoHeight::k1440;
+ return MediaVideoHeight::k2160_OrHigher;
+}
+
+} // namespace
+
std::vector<uint8_t> EmptyExtraData() {
return std::vector<uint8_t>();
}
@@ -21,4 +51,14 @@ EncryptionScheme AesCtrEncryptionScheme() {
EncryptionPattern());
}
+void ReportPepperVideoDecoderOutputPictureCountHW(int height) {
+ UMA_HISTOGRAM_ENUMERATION("Media.PepperVideoDecoderOutputPictureCount.HW",
+ GetMediaVideoHeight(height));
+}
+
+void ReportPepperVideoDecoderOutputPictureCountSW(int height) {
+ UMA_HISTOGRAM_ENUMERATION("Media.PepperVideoDecoderOutputPictureCount.SW",
+ GetMediaVideoHeight(height));
+}
+
} // namespace media
diff --git a/chromium/media/base/media_util.h b/chromium/media/base/media_util.h
index 89ee54934ed..d5f26003256 100644
--- a/chromium/media/base/media_util.h
+++ b/chromium/media/base/media_util.h
@@ -24,6 +24,11 @@ MEDIA_EXPORT std::vector<uint8_t> EmptyExtraData();
MEDIA_EXPORT EncryptionScheme Unencrypted();
MEDIA_EXPORT EncryptionScheme AesCtrEncryptionScheme();
+// Helpers for PPAPI UMAs. There wasn't an obvious place to put them in
+// //content/renderer/pepper.
+MEDIA_EXPORT void ReportPepperVideoDecoderOutputPictureCountHW(int height);
+MEDIA_EXPORT void ReportPepperVideoDecoderOutputPictureCountSW(int height);
+
class MEDIA_EXPORT NullMediaLog : public media::MediaLog {
public:
NullMediaLog() = default;
diff --git a/chromium/media/base/mime_util_internal.cc b/chromium/media/base/mime_util_internal.cc
index e95d82a4a08..424e101d326 100644
--- a/chromium/media/base/mime_util_internal.cc
+++ b/chromium/media/base/mime_util_internal.cc
@@ -30,58 +30,64 @@
namespace media {
namespace internal {
+// A map from codec string to MimeUtil::Codec.
+using StringToCodecMap = base::flat_map<std::string, MimeUtil::Codec>;
+
// Wrapped to avoid static initializer startup cost.
-const base::flat_map<std::string, MimeUtil::Codec>& GetStringToCodecMap() {
- static const base::NoDestructor<base::flat_map<std::string, MimeUtil::Codec>>
- kStringToCodecMap(base::flat_map<std::string, MimeUtil::Codec>(
- {
- // We only allow this for WAV so it isn't ambiguous.
- {"1", MimeUtil::PCM},
- // avc1/avc3.XXXXXX may be unambiguous; handled by
- // ParseAVCCodecId(). hev1/hvc1.XXXXXX may be unambiguous;
- // handled by ParseHEVCCodecID(). vp9, vp9.0,
- // vp09.xx.xx.xx.xx.xx.xx.xx may be unambiguous; handled by
- // ParseVp9CodecID().
- {"mp3", MimeUtil::MP3},
- // Following is the list of RFC 6381 compliant audio codec
- // strings:
- // mp4a.66 - MPEG-2 AAC MAIN
- // mp4a.67 - MPEG-2 AAC LC
- // mp4a.68 - MPEG-2 AAC SSR
- // mp4a.69 - MPEG-2 extension to MPEG-1 (MP3)
- // mp4a.6B - MPEG-1 audio (MP3)
- // mp4a.40.2 - MPEG-4 AAC LC
- // mp4a.40.02 - MPEG-4 AAC LC (leading 0 in aud-oti for
- // compatibility)
- // mp4a.40.5 - MPEG-4 HE-AAC v1 (AAC LC + SBR)
- // mp4a.40.05 - MPEG-4 HE-AAC v1 (AAC LC + SBR) (leading 0
- // in aud-oti for compatibility)
- // mp4a.40.29 - MPEG-4 HE-AAC v2 (AAC LC + SBR + PS)
- {"mp4a.66", MimeUtil::MPEG2_AAC},
- {"mp4a.67", MimeUtil::MPEG2_AAC},
- {"mp4a.68", MimeUtil::MPEG2_AAC}, {"mp4a.69", MimeUtil::MP3},
- {"mp4a.6B", MimeUtil::MP3}, {"mp4a.40.2", MimeUtil::MPEG4_AAC},
- {"mp4a.40.02", MimeUtil::MPEG4_AAC},
- {"mp4a.40.5", MimeUtil::MPEG4_AAC},
- {"mp4a.40.05", MimeUtil::MPEG4_AAC},
- {"mp4a.40.29", MimeUtil::MPEG4_AAC},
-#if BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
- // TODO(servolk): Strictly speaking only mp4a.A5 and mp4a.A6
- // codec ids are valid according to RFC 6381 section 3.3, 3.4.
- // Lower-case oti (mp4a.a5 and mp4a.a6) should be rejected. But
- // we used to allow those in older versions of Chromecast
- // firmware and some apps (notably MPL) depend on those codec
- // types being supported, so they should be allowed for now
- // (crbug.com/564960).
- {"ac-3", MimeUtil::AC3}, {"mp4a.a5", MimeUtil::AC3},
- {"mp4a.A5", MimeUtil::AC3}, {"ec-3", MimeUtil::EAC3},
- {"mp4a.a6", MimeUtil::EAC3}, {"mp4a.A6", MimeUtil::EAC3},
-#endif
- {"vorbis", MimeUtil::VORBIS}, {"opus", MimeUtil::OPUS},
- {"flac", MimeUtil::FLAC}, {"vp8", MimeUtil::VP8},
- {"vp8.0", MimeUtil::VP8}, {"theora", MimeUtil::THEORA},
- },
- base::KEEP_FIRST_OF_DUPES));
+const StringToCodecMap& GetStringToCodecMap() {
+ static const base::NoDestructor<StringToCodecMap> kStringToCodecMap({
+ // We only allow this for WAV so it isn't ambiguous.
+ {"1", MimeUtil::PCM},
+ // avc1/avc3.XXXXXX may be unambiguous; handled by
+ // ParseAVCCodecId(). hev1/hvc1.XXXXXX may be unambiguous;
+ // handled by ParseHEVCCodecID(). vp9, vp9.0,
+ // vp09.xx.xx.xx.xx.xx.xx.xx may be unambiguous; handled by
+ // ParseVp9CodecID().
+ {"mp3", MimeUtil::MP3},
+ // Following is the list of RFC 6381 compliant audio codec
+ // strings:
+ // mp4a.66 - MPEG-2 AAC MAIN
+ // mp4a.67 - MPEG-2 AAC LC
+ // mp4a.68 - MPEG-2 AAC SSR
+ // mp4a.69 - MPEG-2 extension to MPEG-1 (MP3)
+ // mp4a.6B - MPEG-1 audio (MP3)
+ // mp4a.40.2 - MPEG-4 AAC LC
+ // mp4a.40.02 - MPEG-4 AAC LC (leading 0 in aud-oti for
+ // compatibility)
+ // mp4a.40.5 - MPEG-4 HE-AAC v1 (AAC LC + SBR)
+ // mp4a.40.05 - MPEG-4 HE-AAC v1 (AAC LC + SBR) (leading 0
+ // in aud-oti for compatibility)
+ // mp4a.40.29 - MPEG-4 HE-AAC v2 (AAC LC + SBR + PS)
+ {"mp4a.66", MimeUtil::MPEG2_AAC},
+ {"mp4a.67", MimeUtil::MPEG2_AAC},
+ {"mp4a.68", MimeUtil::MPEG2_AAC},
+ {"mp4a.69", MimeUtil::MP3},
+ {"mp4a.6B", MimeUtil::MP3},
+ {"mp4a.40.2", MimeUtil::MPEG4_AAC},
+ {"mp4a.40.02", MimeUtil::MPEG4_AAC},
+ {"mp4a.40.5", MimeUtil::MPEG4_AAC},
+ {"mp4a.40.05", MimeUtil::MPEG4_AAC},
+ {"mp4a.40.29", MimeUtil::MPEG4_AAC},
+ // TODO(servolk): Strictly speaking only mp4a.A5 and mp4a.A6
+ // codec ids are valid according to RFC 6381 section 3.3, 3.4.
+ // Lower-case oti (mp4a.a5 and mp4a.a6) should be rejected. But
+ // we used to allow those in older versions of Chromecast
+ // firmware and some apps (notably MPL) depend on those codec
+ // types being supported, so they should be allowed for now
+ // (crbug.com/564960).
+ {"ac-3", MimeUtil::AC3},
+ {"mp4a.a5", MimeUtil::AC3},
+ {"mp4a.A5", MimeUtil::AC3},
+ {"ec-3", MimeUtil::EAC3},
+ {"mp4a.a6", MimeUtil::EAC3},
+ {"mp4a.A6", MimeUtil::EAC3},
+ {"vorbis", MimeUtil::VORBIS},
+ {"opus", MimeUtil::OPUS},
+ {"flac", MimeUtil::FLAC},
+ {"vp8", MimeUtil::VP8},
+ {"vp8.0", MimeUtil::VP8},
+ {"theora", MimeUtil::THEORA},
+ });
return *kStringToCodecMap;
}
@@ -114,7 +120,7 @@ static bool IsValidH264Level(uint8_t level_idc) {
(level_idc >= 20 && level_idc <= 22) ||
(level_idc >= 30 && level_idc <= 32) ||
(level_idc >= 40 && level_idc <= 42) ||
- (level_idc >= 50 && level_idc <= 51));
+ (level_idc >= 50 && level_idc <= 52));
}
MimeUtil::MimeUtil() : allow_proprietary_codecs_(false) {
@@ -275,8 +281,7 @@ void MimeUtil::AddSupportedMediaFormats() {
const CodecSet webm_audio_codecs{OPUS, VORBIS};
CodecSet webm_video_codecs{VP8, VP9};
#if BUILDFLAG(ENABLE_AV1_DECODER)
- if (base::FeatureList::IsEnabled(kAv1Decoder))
- webm_video_codecs.emplace(AV1);
+ webm_video_codecs.emplace(AV1);
#endif
CodecSet webm_codecs(webm_audio_codecs);
@@ -317,8 +322,7 @@ void MimeUtil::AddSupportedMediaFormats() {
#endif // BUILDFLAG(ENABLE_DOLBY_VISION_DEMUXING)
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_AV1_DECODER)
- if (base::FeatureList::IsEnabled(kAv1Decoder))
- mp4_video_codecs.emplace(AV1);
+ mp4_video_codecs.emplace(AV1);
#endif
CodecSet mp4_codecs(mp4_audio_codecs);
@@ -815,8 +819,7 @@ bool MimeUtil::ParseCodecHelper(const std::string& mime_type_lower_case,
}
#if BUILDFLAG(ENABLE_AV1_DECODER)
- if (base::FeatureList::IsEnabled(kAv1Decoder) &&
- ParseAv1CodecId(codec_id, out_profile, out_level, out_color_space)) {
+ if (ParseAv1CodecId(codec_id, out_profile, out_level, out_color_space)) {
out_result->codec = MimeUtil::AV1;
return true;
}
@@ -904,13 +907,6 @@ SupportsType MimeUtil::IsCodecSupported(const std::string& mime_type_lower_case,
default:
ambiguous_platform_support = true;
}
- } else if (codec == MimeUtil::VP9 && video_profile != VP9PROFILE_PROFILE0 &&
- is_encrypted) {
- // LibVPX is not generally used for encrypted videos, so we do not know
- // whether higher profiles are supported.
- // TODO(chcunningham/xhwang): Add details to indicate which key system will
- // be used and check support by querying the matching KeySystemProperties.
- ambiguous_platform_support = true;
}
AudioCodec audio_codec = MimeUtilToAudioCodec(codec);
diff --git a/chromium/media/base/mock_audio_renderer_sink.cc b/chromium/media/base/mock_audio_renderer_sink.cc
index b35968196e8..4c7c656d135 100644
--- a/chromium/media/base/mock_audio_renderer_sink.cc
+++ b/chromium/media/base/mock_audio_renderer_sink.cc
@@ -4,6 +4,9 @@
#include "media/base/mock_audio_renderer_sink.h"
+#include "base/bind.h"
+#include "base/threading/sequenced_task_runner_handle.h"
+
namespace media {
MockAudioRendererSink::MockAudioRendererSink()
: MockAudioRendererSink(OUTPUT_DEVICE_STATUS_OK) {}
@@ -29,12 +32,11 @@ MockAudioRendererSink::MockAudioRendererSink(
MockAudioRendererSink::~MockAudioRendererSink() = default;
-void MockAudioRendererSink::SwitchOutputDevice(
- const std::string& device_id,
- const OutputDeviceStatusCB& callback) {
+void MockAudioRendererSink::SwitchOutputDevice(const std::string& device_id,
+ OutputDeviceStatusCB callback) {
// NB: output device won't be changed, since it's not required by any tests
// now.
- callback.Run(output_device_info_.device_status());
+ std::move(callback).Run(output_device_info_.device_status());
}
void MockAudioRendererSink::Initialize(const AudioParameters& params,
@@ -46,6 +48,12 @@ OutputDeviceInfo MockAudioRendererSink::GetOutputDeviceInfo() {
return output_device_info_;
}
+void MockAudioRendererSink::GetOutputDeviceInfoAsync(
+ OutputDeviceInfoCB info_cb) {
+ base::SequencedTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE, base::BindOnce(std::move(info_cb), output_device_info_));
+}
+
bool MockAudioRendererSink::IsOptimizedForHardwareParameters() {
return false;
}
diff --git a/chromium/media/base/mock_audio_renderer_sink.h b/chromium/media/base/mock_audio_renderer_sink.h
index 4551a2e76bc..3c84881284b 100644
--- a/chromium/media/base/mock_audio_renderer_sink.h
+++ b/chromium/media/base/mock_audio_renderer_sink.h
@@ -32,11 +32,12 @@ class MockAudioRendererSink : public SwitchableAudioRendererSink {
MOCK_METHOD0(CurrentThreadIsRenderingThread, bool());
OutputDeviceInfo GetOutputDeviceInfo() override;
+ void GetOutputDeviceInfoAsync(OutputDeviceInfoCB info_cb) override;
bool IsOptimizedForHardwareParameters() override;
void SwitchOutputDevice(const std::string& device_id,
- const OutputDeviceStatusCB& callback) override;
+ OutputDeviceStatusCB callback) override;
void Initialize(const AudioParameters& params,
RenderCallback* renderer) override;
AudioRendererSink::RenderCallback* callback() { return callback_; }
diff --git a/chromium/media/base/output_device_info.h b/chromium/media/base/output_device_info.h
index 96469d556bb..02bc309b64c 100644
--- a/chromium/media/base/output_device_info.h
+++ b/chromium/media/base/output_device_info.h
@@ -26,7 +26,7 @@ enum OutputDeviceStatus {
OUTPUT_DEVICE_STATUS_MAX = OUTPUT_DEVICE_STATUS_ERROR_INTERNAL
};
-using OutputDeviceStatusCB = base::Callback<void(OutputDeviceStatus)>;
+using OutputDeviceStatusCB = base::OnceCallback<void(OutputDeviceStatus)>;
// Output device information returned by
// AudioRendererSink::GetOutputDeviceInfo()
diff --git a/chromium/media/base/pipeline_impl.cc b/chromium/media/base/pipeline_impl.cc
index 55a0ca72bf6..2739aa7c1af 100644
--- a/chromium/media/base/pipeline_impl.cc
+++ b/chromium/media/base/pipeline_impl.cc
@@ -80,9 +80,10 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
base::OnceClosure change_completed_cb);
private:
- // Contains state shared between main and media thread.
- // Main thread can only read. Media thread can both - read and write.
- // So it is not necessary to lock when reading from the media thread.
+ // Contains state shared between main and media thread. On the media thread
+ // each member can be read without locking, but writing requires locking. On
+ // the main thread reading requires a lock and writing is prohibited.
+ //
// This struct should only contain state that is not immediately needed by
// PipelineClient and can be cached on the media thread until queried.
// Alternatively we could cache it on the main thread by posting the
@@ -93,6 +94,10 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
struct SharedState {
// TODO(scherkus): Enforce that Renderer is only called on a single thread,
// even for accessing media time http://crbug.com/370634
+ //
+ // Note: Renderer implementations must support GetMediaTime() being called
+ // on both the main and media threads. RendererWrapper::GetMediaTime() calls
+ // it from the main thread (locked).
std::unique_ptr<Renderer> renderer;
// True when OnBufferedTimeRangesChanged() has been called more recently
@@ -161,6 +166,7 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
CdmContext* cdm_context_;
// Lock used to serialize |shared_state_|.
+ // TODO(crbug.com/893739): Add GUARDED_BY annotations.
mutable base::Lock shared_state_lock_;
// State shared between main and media thread.
@@ -232,12 +238,12 @@ void PipelineImpl::RendererWrapper::Start(
SetState(kStarting);
DCHECK(!demuxer_);
- DCHECK(!shared_state_.renderer);
DCHECK(!renderer_ended_);
DCHECK(!text_renderer_ended_);
demuxer_ = demuxer;
{
base::AutoLock auto_lock(shared_state_lock_);
+ DCHECK(!shared_state_.renderer);
shared_state_.renderer = std::move(renderer);
}
weak_pipeline_ = weak_pipeline;
@@ -353,7 +359,6 @@ void PipelineImpl::RendererWrapper::Suspend() {
OnPipelineError(PIPELINE_ERROR_INVALID_STATE);
return;
}
- DCHECK(shared_state_.renderer);
DCHECK(!pending_callbacks_.get());
SetState(kSuspending);
@@ -362,6 +367,7 @@ void PipelineImpl::RendererWrapper::Suspend() {
shared_state_.renderer->SetPlaybackRate(0.0);
{
base::AutoLock auto_lock(shared_state_lock_);
+ DCHECK(shared_state_.renderer);
shared_state_.suspend_timestamp = shared_state_.renderer->GetMediaTime();
DCHECK(shared_state_.suspend_timestamp != kNoTimestamp);
}
@@ -388,13 +394,13 @@ void PipelineImpl::RendererWrapper::Resume(std::unique_ptr<Renderer> renderer,
OnPipelineError(PIPELINE_ERROR_INVALID_STATE);
return;
}
- DCHECK(!shared_state_.renderer);
DCHECK(!pending_callbacks_.get());
SetState(kResuming);
{
base::AutoLock auto_lock(shared_state_lock_);
+ DCHECK(!shared_state_.renderer);
shared_state_.renderer = std::move(renderer);
}
@@ -535,6 +541,7 @@ void PipelineImpl::RendererWrapper::OnEnded() {
// TODO(crbug/817089): Combine this functionality into renderer->GetMediaTime().
base::TimeDelta PipelineImpl::RendererWrapper::GetCurrentTimestamp() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(demuxer_);
DCHECK(shared_state_.renderer || state_ != kPlaying);
@@ -904,9 +911,10 @@ void PipelineImpl::RendererWrapper::InitializeRenderer(
break;
}
- if (cdm_context_)
+ if (cdm_context_) {
shared_state_.renderer->SetCdm(cdm_context_,
base::Bind(&IgnoreCdmAttached));
+ }
shared_state_.renderer->Initialize(demuxer_, this, done_cb);
}
diff --git a/chromium/media/base/renderer.h b/chromium/media/base/renderer.h
index 075ae48ff90..07e772bdb5e 100644
--- a/chromium/media/base/renderer.h
+++ b/chromium/media/base/renderer.h
@@ -56,6 +56,8 @@ class MEDIA_EXPORT Renderer {
virtual void SetVolume(float volume) = 0;
// Returns the current media time.
+ //
+ // This method must be safe to call from any thread.
virtual base::TimeDelta GetMediaTime() = 0;
// Provides a list of DemuxerStreams correlating to the tracks which should
diff --git a/chromium/media/base/scoped_async_trace.cc b/chromium/media/base/scoped_async_trace.cc
new file mode 100644
index 00000000000..cb525c79306
--- /dev/null
+++ b/chromium/media/base/scoped_async_trace.cc
@@ -0,0 +1,32 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/scoped_async_trace.h"
+
+#include "base/memory/ptr_util.h"
+#include "base/trace_event/trace_event.h"
+
+namespace media {
+
+namespace {
+constexpr const char kCategory[] = "media";
+} // namespace
+
+// static
+std::unique_ptr<ScopedAsyncTrace> ScopedAsyncTrace::CreateIfEnabled(
+ const char* name) {
+ bool enabled = false;
+ TRACE_EVENT_CATEGORY_GROUP_ENABLED(kCategory, &enabled);
+ return enabled ? base::WrapUnique(new ScopedAsyncTrace(name)) : nullptr;
+}
+
+ScopedAsyncTrace::ScopedAsyncTrace(const char* name) : name_(name) {
+ TRACE_EVENT_ASYNC_BEGIN0(kCategory, name_, this);
+}
+
+ScopedAsyncTrace::~ScopedAsyncTrace() {
+ TRACE_EVENT_ASYNC_END0(kCategory, name_, this);
+}
+
+} // namespace media
diff --git a/chromium/media/base/scoped_async_trace.h b/chromium/media/base/scoped_async_trace.h
new file mode 100644
index 00000000000..b82df2eabd8
--- /dev/null
+++ b/chromium/media/base/scoped_async_trace.h
@@ -0,0 +1,43 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_SCOPED_ASYNC_TRACE_H_
+#define MEDIA_BASE_SCOPED_ASYNC_TRACE_H_
+
+#include <memory>
+#include <string>
+
+#include "base/macros.h"
+#include "media/base/media_export.h"
+
+namespace media {
+
+// Utility class that starts and stops an async trace event. The intention is
+// that it it will be created somewhere to start the trace event, passed around
+// such as via unique_ptr argument in a callback, and eventually freed to end
+// the trace event. This guarantees that it'll be closed, even if the callback
+// is destroyed without being run.
+class MEDIA_EXPORT ScopedAsyncTrace {
+ public:
+ // Create a ScopedAsyncTrace if tracing for "media" is enabled, else return
+ // nullptr. |name| provided to the trace as the name(!).
+ // IMPORTANT: These strings must outlive |this|, since tracing needs it. In
+ // other words, use literal strings only. See trace_event_common.h .
+ static std::unique_ptr<ScopedAsyncTrace> CreateIfEnabled(const char* name);
+
+ ~ScopedAsyncTrace();
+
+ // TODO(liberato): Add StepInto / StepPast.
+
+ private:
+ explicit ScopedAsyncTrace(const char* name);
+
+ const char* name_ = nullptr;
+
+ DISALLOW_COPY_AND_ASSIGN(ScopedAsyncTrace);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_SCOPED_ASYNC_TRACE_H_
diff --git a/chromium/media/base/silent_sink_suspender.cc b/chromium/media/base/silent_sink_suspender.cc
index 5d5fc42b13f..618f8efd878 100644
--- a/chromium/media/base/silent_sink_suspender.cc
+++ b/chromium/media/base/silent_sink_suspender.cc
@@ -114,13 +114,21 @@ void SilentSinkSuspender::OnRenderError() {
callback_->OnRenderError();
}
+bool SilentSinkSuspender::IsUsingFakeSinkForTesting() {
+ base::AutoLock al(transition_lock_);
+ return is_using_fake_sink_;
+}
+
void SilentSinkSuspender::TransitionSinks(bool use_fake_sink) {
DCHECK(task_runner_->BelongsToCurrentThread());
// Ignore duplicate requests which can occur if the transition takes too long
// and multiple Render() events occur.
- if (use_fake_sink == is_using_fake_sink_)
- return;
+ {
+ base::AutoLock al(transition_lock_);
+ if (use_fake_sink == is_using_fake_sink_)
+ return;
+ }
if (use_fake_sink) {
sink_->Pause();
diff --git a/chromium/media/base/silent_sink_suspender.h b/chromium/media/base/silent_sink_suspender.h
index fa5cdba4d77..d9638651159 100644
--- a/chromium/media/base/silent_sink_suspender.h
+++ b/chromium/media/base/silent_sink_suspender.h
@@ -13,6 +13,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "base/time/time.h"
#include "media/base/audio_parameters.h"
#include "media/base/audio_renderer_sink.h"
@@ -52,7 +53,7 @@ class MEDIA_EXPORT SilentSinkSuspender
AudioBus* dest) override;
void OnRenderError() override;
- bool is_using_fake_sink_for_testing() const { return is_using_fake_sink_; }
+ bool IsUsingFakeSinkForTesting();
private:
// If |use_fake_sink| is true, pauses |sink_| and plays |fake_sink_|; if
@@ -88,11 +89,11 @@ class MEDIA_EXPORT SilentSinkSuspender
// Whether audio output is directed to |fake_sink_|. Must only be used when
// |transition_lock_| is held or both sinks are stopped.
- bool is_using_fake_sink_ = false;
+ bool is_using_fake_sink_ GUARDED_BY(transition_lock_) = false;
// Whether we're in the middle of a transition to or from |fake_sink_|. Must
// only be used when |transition_lock_| is held or both sinks are stopped.
- bool is_transition_pending_ = false;
+ bool is_transition_pending_ GUARDED_BY(transition_lock_) = false;
// Buffers accumulated during the transition from |fake_sink_| to |sink_|.
base::circular_deque<std::unique_ptr<AudioBus>> buffers_after_silence_;
diff --git a/chromium/media/base/silent_sink_suspender_unittest.cc b/chromium/media/base/silent_sink_suspender_unittest.cc
index 1e0c2c9e783..e6b2c851c47 100644
--- a/chromium/media/base/silent_sink_suspender_unittest.cc
+++ b/chromium/media/base/silent_sink_suspender_unittest.cc
@@ -59,14 +59,14 @@ TEST_F(SilentSinkSuspenderTest, BasicPassthough) {
TEST_F(SilentSinkSuspenderTest, SuspendResumeTriggered) {
// Verify a normal Render() doesn't invoke suspend.
- EXPECT_FALSE(suspender_.is_using_fake_sink_for_testing());
+ EXPECT_FALSE(suspender_.IsUsingFakeSinkForTesting());
temp_bus_->Zero();
EXPECT_EQ(temp_bus_->frames(),
suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0,
temp_bus_.get()));
EXPECT_FALSE(temp_bus_->AreFramesZero());
base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(suspender_.is_using_fake_sink_for_testing());
+ EXPECT_FALSE(suspender_.IsUsingFakeSinkForTesting());
// Mute all audio generated by the callback, this should suspend immediately.
fake_callback_.set_volume(0);
@@ -80,7 +80,7 @@ TEST_F(SilentSinkSuspenderTest, SuspendResumeTriggered) {
EXPECT_CALL(*mock_sink_, Pause())
.WillOnce(RunClosure(run_loop.QuitClosure()));
run_loop.Run();
- EXPECT_TRUE(suspender_.is_using_fake_sink_for_testing());
+ EXPECT_TRUE(suspender_.IsUsingFakeSinkForTesting());
}
// Unmute the audio, the FakeWorker inside |suspender_| should be running now,
@@ -92,7 +92,7 @@ TEST_F(SilentSinkSuspenderTest, SuspendResumeTriggered) {
EXPECT_CALL(*mock_sink_, Play())
.WillOnce(RunClosure(run_loop.QuitClosure()));
run_loop.Run();
- EXPECT_FALSE(suspender_.is_using_fake_sink_for_testing());
+ EXPECT_FALSE(suspender_.IsUsingFakeSinkForTesting());
}
// The first Render() after resume should return the first buffer which was
@@ -126,7 +126,7 @@ TEST_F(SilentSinkSuspenderTest, MultipleSuspend) {
EXPECT_CALL(*mock_sink_, Pause());
base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(suspender_.is_using_fake_sink_for_testing());
+ EXPECT_TRUE(suspender_.IsUsingFakeSinkForTesting());
}
TEST_F(SilentSinkSuspenderTest, MultipleResume) {
@@ -139,7 +139,7 @@ TEST_F(SilentSinkSuspenderTest, MultipleResume) {
EXPECT_TRUE(temp_bus_->AreFramesZero());
EXPECT_CALL(*mock_sink_, Pause());
base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(suspender_.is_using_fake_sink_for_testing());
+ EXPECT_TRUE(suspender_.IsUsingFakeSinkForTesting());
// Unmute the data.
fake_callback_.set_volume(1);
@@ -167,7 +167,7 @@ TEST_F(SilentSinkSuspenderTest, MultipleResume) {
suspender_.Render(base::TimeDelta(), base::TimeTicks(), 0, nullptr));
EXPECT_CALL(*mock_sink_, Play());
base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(suspender_.is_using_fake_sink_for_testing());
+ EXPECT_FALSE(suspender_.IsUsingFakeSinkForTesting());
// Each render after resuming should return one of the non-silent bus.
EXPECT_EQ(temp_bus_->frames(),
diff --git a/chromium/media/base/test_data_util.cc b/chromium/media/base/test_data_util.cc
index 2a53b136eb5..567029400b4 100644
--- a/chromium/media/base/test_data_util.cc
+++ b/chromium/media/base/test_data_util.cc
@@ -6,8 +6,10 @@
#include <stdint.h>
+#include "base/containers/flat_map.h"
#include "base/files/file_util.h"
#include "base/logging.h"
+#include "base/no_destructor.h"
#include "base/numerics/safe_conversions.h"
#include "base/path_service.h"
#include "media/base/decoder_buffer.h"
@@ -16,6 +18,135 @@ namespace media {
namespace {
+// Mime types for test files. Sorted in the ASCII code order of the variable
+// names.
+const char kAacAdtsAudio[] = "audio/aac";
+const char kMp2AudioSBR[] = "video/mp2t; codecs=\"avc1.4D4041,mp4a.40.5\"";
+const char kMp2tAudioVideo[] = "video/mp2t; codecs=\"mp4a.40.2, avc1.42E01E\"";
+const char kMp3Audio[] = "audio/mpeg";
+// MP4
+const char kMp4AacAudio[] = "audio/mp4; codecs=\"mp4a.40.2\"";
+const char kMp4Av110bitVideo[] = "video/mp4; codecs=\"av01.0.04M.10\"";
+const char kMp4Av1Video[] = "video/mp4; codecs=\"av01.0.04M.08\"";
+const char kMp4Avc1Video[] = "video/mp4; codecs=\"avc1.64001E\"";
+const char kMp4AacAudioAvc1Video[] =
+ "video/mp4; codecs=\"mp4a.40.2, avc1.64001E\"";
+const char kMp4Avc3Video[] = "video/mp4; codecs=\"avc3.64001f\"";
+const char kMp4FlacAudio[] = "audio/mp4; codecs=\"flac\"";
+const char kMp4OpusAudio[] = "audio/mp4; codecs=\"opus\"";
+const char kMp4Vp9Profile2Video[] =
+ "video/mp4; codecs=\"vp09.02.10.10.01.02.02.02.00\"";
+const char kMp4Vp9Video[] =
+ "video/mp4; codecs=\"vp09.00.10.08.01.02.02.02.00\"";
+// WebM
+const char kWebMAv110bitVideo[] = "video/webm; codecs=\"av01.0.04M.10\"";
+const char kWebMAv1Video[] = "video/webm; codecs=\"av01.0.04M.08\"";
+const char kWebMOpusAudio[] = "audio/webm; codecs=\"opus\"";
+const char kWebMOpusAudioVp9Video[] = "video/webm; codecs=\"opus, vp9\"";
+const char kWebMVorbisAudio[] = "audio/webm; codecs=\"vorbis\"";
+const char kWebMVorbisAudioVp8Video[] = "video/webm; codecs=\"vorbis, vp8\"";
+const char kWebMVp8Video[] = "video/webm; codecs=\"vp8\"";
+const char kWebMVp9Profile2Video[] =
+ "video/webm; codecs=\"vp09.02.10.10.01.02.02.02.00\"";
+const char kWebMVp9Video[] = "video/webm; codecs=\"vp9\"";
+
+// A map from a test file name to its mime type. The file is located at
+// media/test/data.
+using FileToMimeTypeMap = base::flat_map<std::string, std::string>;
+
+// Wrapped to avoid static initializer startup cost. The list is sorted in the
+// the ASCII code order of file names.
+// Note: Some files are old and the codec string in the mime type may not be
+// accurate.
+// Warning: When adding new files, make sure the codec string is accurate. For
+// example kMp4Avc1Video is for H264 high profile. If you add a file that uses
+// main profile, a new mime type should be added.
+const FileToMimeTypeMap& GetFileToMimeTypeMap() {
+ static const base::NoDestructor<FileToMimeTypeMap> kFileToMimeTypeMap({
+ {"bear-1280x720-a_frag-cenc-key_rotation.mp4", kMp4AacAudio},
+ {"bear-1280x720-a_frag-cenc.mp4", kMp4AacAudio},
+ {"bear-1280x720-a_frag-cenc_clear-all.mp4", kMp4AacAudio},
+ {"bear-1280x720-aac_he.ts", kMp2AudioSBR},
+ {"bear-1280x720-v_frag-avc3.mp4", kMp4Avc3Video},
+ {"bear-1280x720-v_frag-cenc-key_rotation.mp4", kMp4Avc1Video},
+ {"bear-1280x720-v_frag-cenc.mp4", kMp4Avc1Video},
+ {"bear-1280x720-v_frag-cenc_clear-all.mp4", kMp4Avc1Video},
+ {"bear-1280x720.ts", kMp2tAudioVideo},
+ {"bear-320x240-16x9-aspect-av_enc-av.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240-16x9-aspect.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240-audio-only.webm", kWebMVorbisAudio},
+ {"bear-320x240-av_enc-a.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240-av_enc-av.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240-av_enc-av_clear-1s.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240-av_enc-av_clear-all.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240-av_enc-v.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240-live.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240-opus-a_enc-a.webm", kWebMOpusAudio},
+ {"bear-320x240-opus-av_enc-av.webm", kWebMOpusAudioVp9Video},
+ {"bear-320x240-opus-av_enc-v.webm", kWebMOpusAudioVp9Video},
+ {"bear-320x240-v-vp9_fullsample_enc-v.webm", kWebMVp9Video},
+ {"bear-320x240-v-vp9_profile2_subsample_cenc-v.mp4",
+ kMp4Vp9Profile2Video},
+ {"bear-320x240-v-vp9_profile2_subsample_cenc-v.webm",
+ kWebMVp9Profile2Video},
+ {"bear-320x240-v-vp9_subsample_enc-v.webm", kWebMVp9Video},
+ {"bear-320x240-v_enc-v.webm", kWebMVp8Video},
+ {"bear-320x240-v_frag-vp9-cenc.mp4", kMp4Vp9Video},
+ {"bear-320x240-v_frag-vp9.mp4", kMp4Vp9Video},
+ {"bear-320x240-video-only.webm", kWebMVp8Video},
+ {"bear-320x240.webm", kWebMVorbisAudioVp8Video},
+ {"bear-320x240_corrupted_after_init_segment.webm",
+ kWebMVorbisAudioVp8Video},
+ {"bear-640x360-a_frag-cbcs.mp4", kMp4AacAudio},
+ {"bear-640x360-a_frag-cenc.mp4", kMp4AacAudio},
+ {"bear-640x360-a_frag.mp4", kMp4AacAudio},
+ {"bear-640x360-av_frag.mp4", kMp4AacAudioAvc1Video},
+ {"bear-640x360-v_frag-cbc1.mp4", kMp4Avc1Video},
+ {"bear-640x360-v_frag-cbcs.mp4", kMp4Avc1Video},
+ {"bear-640x360-v_frag-cenc-key_rotation.mp4", kMp4Avc1Video},
+ {"bear-640x360-v_frag-cenc-mdat.mp4", kMp4Avc1Video},
+ {"bear-640x360-v_frag-cenc-senc-no-saiz-saio.mp4", kMp4Avc1Video},
+ {"bear-640x360-v_frag-cenc-senc.mp4", kMp4Avc1Video},
+ {"bear-640x360-v_frag-cenc.mp4", kMp4Avc1Video},
+ {"bear-640x360-v_frag-cens.mp4", kMp4Avc1Video},
+ {"bear-640x360-v_frag.mp4", kMp4Avc1Video},
+ {"bear-a_enc-a.webm", kWebMVorbisAudio},
+ {"bear-audio-implicit-he-aac-v1.aac", kAacAdtsAudio},
+ {"bear-audio-implicit-he-aac-v2.aac", kAacAdtsAudio},
+ {"bear-audio-lc-aac.aac", kAacAdtsAudio},
+ {"bear-audio-main-aac.aac", kAacAdtsAudio},
+ {"bear-audio-mp4a.69.ts", "video/mp2t; codecs=\"mp4a.69\""},
+ {"bear-audio-mp4a.6B.ts", "video/mp2t; codecs=\"mp4a.6B\""},
+ {"bear-av1-320x180-10bit-cenc.mp4", kMp4Av110bitVideo},
+ {"bear-av1-320x180-10bit-cenc.webm", kWebMAv110bitVideo},
+ {"bear-av1-320x180-10bit.mp4", kMp4Av110bitVideo},
+ {"bear-av1-320x180-10bit.webm", kWebMAv110bitVideo},
+ {"bear-av1-480x360.webm", kWebMAv1Video},
+ {"bear-av1-cenc.mp4", kMp4Av1Video},
+ {"bear-av1-cenc.webm", kWebMAv1Video},
+ {"bear-av1.mp4", kMp4Av1Video},
+ {"bear-av1.webm", kWebMAv1Video},
+ {"bear-flac-cenc.mp4", kMp4FlacAudio},
+ {"bear-flac_frag.mp4", kMp4FlacAudio},
+ {"bear-opus.mp4", kMp4OpusAudio},
+ {"bear-opus.webm", kWebMOpusAudio},
+ {"bear-vp8a.webm", kWebMVp8Video},
+ {"bear-vp9-blockgroup.webm", kWebMVp9Video},
+ {"bear-vp9.webm", kWebMVp9Video},
+ {"frame_size_change-av_enc-v.webm", kWebMVorbisAudioVp8Video},
+ {"icy_sfx.mp3", kMp3Audio},
+ {"opus-trimming-test.mp4", kMp4OpusAudio},
+ {"opus-trimming-test.webm", kWebMOpusAudio},
+ {"sfx-flac_frag.mp4", kMp4FlacAudio},
+ {"sfx-opus-441.webm", kWebMOpusAudio},
+ {"sfx-opus_frag.mp4", kMp4OpusAudio},
+ {"sfx.adts", kAacAdtsAudio},
+ {"sfx.mp3", kMp3Audio},
+ });
+
+ return *kFileToMimeTypeMap;
+}
+
// Key used to encrypt test files.
const uint8_t kSecretKey[] = {0xeb, 0xdd, 0x62, 0xf1, 0x68, 0x14, 0xd2, 0x7b,
0x68, 0xef, 0x12, 0x2a, 0xfc, 0xe4, 0xae, 0x3c};
@@ -23,7 +154,8 @@ const uint8_t kSecretKey[] = {0xeb, 0xdd, 0x62, 0xf1, 0x68, 0x14, 0xd2, 0x7b,
// The key ID for all encrypted files.
const uint8_t kKeyId[] = {0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
0x38, 0x39, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35};
-}
+
+} // namespace
// TODO(sandersd): Change the tests to use a more unique message.
// See http://crbug.com/592067
@@ -51,6 +183,13 @@ base::FilePath GetTestDataPath() {
return base::FilePath(kTestDataPath);
}
+std::string GetMimeTypeForFile(const std::string& file_name) {
+ const auto& map = GetFileToMimeTypeMap();
+ auto itr = map.find(file_name);
+ CHECK(itr != map.end()) << ": file_name = " << file_name;
+ return itr->second;
+}
+
std::string GetURLQueryString(const base::StringPairs& query_params) {
std::string query = "";
auto itr = query_params.begin();
@@ -72,10 +211,9 @@ scoped_refptr<DecoderBuffer> ReadTestDataFile(const std::string& name) {
int file_size = base::checked_cast<int>(tmp);
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(file_size));
- CHECK_EQ(file_size,
- base::ReadFile(
- file_path, reinterpret_cast<char*>(buffer->writable_data()),
- file_size)) << "Failed to read '" << name << "'";
+ auto* data = reinterpret_cast<char*>(buffer->writable_data());
+ CHECK_EQ(file_size, base::ReadFile(file_path, data, file_size))
+ << "Failed to read '" << name << "'";
return buffer;
}
diff --git a/chromium/media/base/test_data_util.h b/chromium/media/base/test_data_util.h
index 5924aaa964e..ba6a3427ba7 100644
--- a/chromium/media/base/test_data_util.h
+++ b/chromium/media/base/test_data_util.h
@@ -29,6 +29,9 @@ base::FilePath GetTestDataFilePath(const std::string& name);
// Returns relative path for test data folder: media/test/data.
base::FilePath GetTestDataPath();
+// Returns the mime type for media/test/data/<file_name>.
+std::string GetMimeTypeForFile(const std::string& file_name);
+
// Returns a string containing key value query params in the form of:
// "key_1=value_1&key_2=value2"
std::string GetURLQueryString(const base::StringPairs& query_params);
diff --git a/chromium/media/base/test_helpers.cc b/chromium/media/base/test_helpers.cc
index 3757bc79df8..8a1f79eb415 100644
--- a/chromium/media/base/test_helpers.cc
+++ b/chromium/media/base/test_helpers.cc
@@ -127,6 +127,7 @@ void WaitableMessageLoopEvent::OnTimeout() {
static VideoDecoderConfig GetTestConfig(VideoCodec codec,
VideoCodecProfile config,
+ const VideoColorSpace& color_space,
VideoRotation rotation,
gfx::Size coded_size,
bool is_encrypted) {
@@ -134,7 +135,7 @@ static VideoDecoderConfig GetTestConfig(VideoCodec codec,
gfx::Size natural_size = coded_size;
return VideoDecoderConfig(
- codec, config, PIXEL_FORMAT_I420, COLOR_SPACE_JPEG, rotation, coded_size,
+ codec, config, PIXEL_FORMAT_I420, color_space, rotation, coded_size,
visible_rect, natural_size, EmptyExtraData(),
is_encrypted ? AesCtrEncryptionScheme() : Unencrypted());
}
@@ -145,50 +146,64 @@ static const gfx::Size kLargeSize(640, 480);
// static
VideoDecoderConfig TestVideoConfig::Invalid() {
return GetTestConfig(kUnknownVideoCodec, VIDEO_CODEC_PROFILE_UNKNOWN,
- VIDEO_ROTATION_0, kNormalSize, false);
+ VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kNormalSize,
+ false);
}
// static
VideoDecoderConfig TestVideoConfig::Normal(VideoCodec codec) {
- return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, VIDEO_ROTATION_0,
- kNormalSize, false);
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kNormalSize,
+ false);
+}
+
+// static
+VideoDecoderConfig TestVideoConfig::NormalWithColorSpace(
+ VideoCodec codec,
+ const VideoColorSpace& color_space) {
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, color_space,
+ VIDEO_ROTATION_0, kNormalSize, false);
}
// static
VideoDecoderConfig TestVideoConfig::NormalH264(VideoCodecProfile config) {
- return GetTestConfig(kCodecH264, config, VIDEO_ROTATION_0, kNormalSize,
- false);
+ return GetTestConfig(kCodecH264, config, VideoColorSpace::JPEG(),
+ VIDEO_ROTATION_0, kNormalSize, false);
}
// static
VideoDecoderConfig TestVideoConfig::NormalCodecProfile(
VideoCodec codec,
VideoCodecProfile profile) {
- return GetTestConfig(codec, profile, VIDEO_ROTATION_0, kNormalSize, false);
+ return GetTestConfig(codec, profile, VideoColorSpace::JPEG(),
+ VIDEO_ROTATION_0, kNormalSize, false);
}
// static
VideoDecoderConfig TestVideoConfig::NormalEncrypted(VideoCodec codec) {
- return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, VIDEO_ROTATION_0,
- kNormalSize, true);
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kNormalSize,
+ true);
}
// static
VideoDecoderConfig TestVideoConfig::NormalRotated(VideoRotation rotation) {
- return GetTestConfig(kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN, rotation,
- kNormalSize, false);
+ return GetTestConfig(kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoColorSpace::JPEG(), rotation, kNormalSize, false);
}
// static
VideoDecoderConfig TestVideoConfig::Large(VideoCodec codec) {
- return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, VIDEO_ROTATION_0,
- kLargeSize, false);
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kLargeSize,
+ false);
}
// static
VideoDecoderConfig TestVideoConfig::LargeEncrypted(VideoCodec codec) {
- return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, VIDEO_ROTATION_0,
- kLargeSize, true);
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoColorSpace::JPEG(), VIDEO_ROTATION_0, kLargeSize,
+ true);
}
// static
diff --git a/chromium/media/base/test_helpers.h b/chromium/media/base/test_helpers.h
index ea54a5b3285..e16c6e1398c 100644
--- a/chromium/media/base/test_helpers.h
+++ b/chromium/media/base/test_helpers.h
@@ -88,6 +88,9 @@ class TestVideoConfig {
static VideoDecoderConfig Invalid();
static VideoDecoderConfig Normal(VideoCodec codec = kCodecVP8);
+ static VideoDecoderConfig NormalWithColorSpace(
+ VideoCodec codec,
+ const VideoColorSpace& color_space);
static VideoDecoderConfig NormalH264(
VideoCodecProfile = VIDEO_CODEC_PROFILE_UNKNOWN);
static VideoDecoderConfig NormalCodecProfile(
diff --git a/chromium/media/base/unaligned_shared_memory.cc b/chromium/media/base/unaligned_shared_memory.cc
index 315e209c7e8..208b72a1318 100644
--- a/chromium/media/base/unaligned_shared_memory.cc
+++ b/chromium/media/base/unaligned_shared_memory.cc
@@ -7,7 +7,7 @@
#include <limits>
#include "base/logging.h"
-#include "base/sys_info.h"
+#include "base/system/sys_info.h"
#include "mojo/public/cpp/system/platform_handle.h"
namespace media {
diff --git a/chromium/media/base/user_input_monitor_unittest.cc b/chromium/media/base/user_input_monitor_unittest.cc
index e6b7e7b685d..f174daffefc 100644
--- a/chromium/media/base/user_input_monitor_unittest.cc
+++ b/chromium/media/base/user_input_monitor_unittest.cc
@@ -7,8 +7,9 @@
#include <memory>
#include <utility>
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
+#include "base/test/scoped_task_environment.h"
+#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -20,14 +21,15 @@ namespace media {
TEST(UserInputMonitorTest, CreatePlatformSpecific) {
#if defined(OS_LINUX)
- base::MessageLoopForIO message_loop;
- base::FileDescriptorWatcher file_descriptor_watcher(&message_loop);
+ base::test::ScopedTaskEnvironment task_environment(
+ base::test::ScopedTaskEnvironment::MainThreadType::IO);
#else
- base::MessageLoopForUI message_loop;
+ base::test::ScopedTaskEnvironment task_environment(
+ base::test::ScopedTaskEnvironment::MainThreadType::UI);
#endif // defined(OS_LINUX)
std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
- message_loop.task_runner(), message_loop.task_runner());
+ base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get());
if (!monitor)
return;
@@ -41,14 +43,15 @@ TEST(UserInputMonitorTest, CreatePlatformSpecific) {
TEST(UserInputMonitorTest, CreatePlatformSpecificWithMapping) {
#if defined(OS_LINUX)
- base::MessageLoopForIO message_loop;
- base::FileDescriptorWatcher file_descriptor_watcher(&message_loop);
+ base::test::ScopedTaskEnvironment task_environment(
+ base::test::ScopedTaskEnvironment::MainThreadType::IO);
#else
- base::MessageLoopForUI message_loop;
+ base::test::ScopedTaskEnvironment task_environment(
+ base::test::ScopedTaskEnvironment::MainThreadType::UI);
#endif // defined(OS_LINUX)
std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
- message_loop.task_runner(), message_loop.task_runner());
+ base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get());
if (!monitor)
return;
diff --git a/chromium/media/base/video_codecs.cc b/chromium/media/base/video_codecs.cc
index 5d9655365b2..c0f18356686 100644
--- a/chromium/media/base/video_codecs.cc
+++ b/chromium/media/base/video_codecs.cc
@@ -264,11 +264,11 @@ bool ParseAv1CodecId(const std::string& codec_id,
uint8_t* level_idc,
VideoColorSpace* color_space) {
// The codecs parameter string for the AOM AV1 codec is as follows:
+ // See https://aomediacodec.github.io/av1-isobmff/#codecsparam.
//
// <sample entry4CC>.<profile>.<level><tier>.<bitDepth>.<monochrome>.
// <chromaSubsampling>.<colorPrimaries>.<transferCharacteristics>.
// <matrixCoefficients>.<videoFullRangeFlag>
- //
std::vector<std::string> fields = base::SplitString(
codec_id, ".", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL);
diff --git a/chromium/media/base/video_color_space.cc b/chromium/media/base/video_color_space.cc
index c56304dffaa..59f22f0e8b0 100644
--- a/chromium/media/base/video_color_space.cc
+++ b/chromium/media/base/video_color_space.cc
@@ -188,7 +188,9 @@ gfx::ColorSpace VideoColorSpace::ToGfxColorSpace() const {
switch (matrix) {
case MatrixID::RGB:
- matrix_id = gfx::ColorSpace::MatrixID::RGB;
+ // RGB-encoded video actually puts the green in the Y channel,
+ // the blue in the Cb (U) channel and the red in the Cr (V) channel.
+ matrix_id = gfx::ColorSpace::MatrixID::GBR;
break;
case MatrixID::BT709:
matrix_id = gfx::ColorSpace::MatrixID::BT709;
diff --git a/chromium/media/base/video_decoder.cc b/chromium/media/base/video_decoder.cc
index 8eeb18eb449..3a7647313fb 100644
--- a/chromium/media/base/video_decoder.cc
+++ b/chromium/media/base/video_decoder.cc
@@ -4,6 +4,11 @@
#include "media/base/video_decoder.h"
+#include "base/command_line.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/system/sys_info.h"
+#include "media/base/limits.h"
+#include "media/base/media_switches.h"
#include "media/base/video_frame.h"
namespace media {
@@ -32,6 +37,34 @@ int VideoDecoder::GetMaxDecodeRequests() const {
return 1;
}
+// static
+int VideoDecoder::GetRecommendedThreadCount(int desired_threads) {
+ // If the thread count is specified on the command line, respect it so long as
+ // it's greater than zero.
+ const auto threads =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
+ switches::kVideoThreads);
+ int decode_threads;
+ if (base::StringToInt(threads, &decode_threads) && decode_threads > 0)
+ return decode_threads;
+
+ // Clamp to the number of available logical processors/cores.
+ desired_threads =
+ std::min(desired_threads, base::SysInfo::NumberOfProcessors());
+
+ // Always try to use at least two threads for video decoding. There is little
+ // reason not to since current day CPUs tend to be multi-core and we measured
+ // performance benefits on older machines such as P4s with hyperthreading.
+ //
+ // All our software video decoders treat having one thread the same as having
+ // zero threads; I.e., decoding will execute on the calling thread. Therefore,
+ // at least two threads are required to allow decoding to progress outside of
+ // each Decode() call.
+ return std::min(std::max(desired_threads,
+ static_cast<int>(limits::kMinVideoDecodeThreads)),
+ static_cast<int>(limits::kMaxVideoDecodeThreads));
+}
+
} // namespace media
namespace std {
diff --git a/chromium/media/base/video_decoder.h b/chromium/media/base/video_decoder.h
index e8226079ca1..f9deaf3bc13 100644
--- a/chromium/media/base/video_decoder.h
+++ b/chromium/media/base/video_decoder.h
@@ -128,6 +128,13 @@ class MEDIA_EXPORT VideoDecoder {
// Returns maximum number of parallel decode requests.
virtual int GetMaxDecodeRequests() const;
+ // Returns the recommended number of threads for software video decoding. If
+ // the --video-threads command line option is specified and is valid, that
+ // value is returned. Otherwise |desired_threads| is clamped to the number of
+ // logical processors and then further clamped to
+ // [|limits::kMinVideoDecodeThreads|, |limits::kMaxVideoDecodeThreads|].
+ static int GetRecommendedThreadCount(int desired_threads);
+
protected:
// Deletion is only allowed via Destroy().
virtual ~VideoDecoder();
diff --git a/chromium/media/base/video_decoder_config.cc b/chromium/media/base/video_decoder_config.cc
index f3b95eb05ee..aff9a78bbde 100644
--- a/chromium/media/base/video_decoder_config.cc
+++ b/chromium/media/base/video_decoder_config.cc
@@ -61,14 +61,13 @@ VideoDecoderConfig::VideoDecoderConfig()
: codec_(kUnknownVideoCodec),
profile_(VIDEO_CODEC_PROFILE_UNKNOWN),
format_(PIXEL_FORMAT_UNKNOWN),
- color_space_(COLOR_SPACE_UNSPECIFIED),
rotation_(VIDEO_ROTATION_0) {}
VideoDecoderConfig::VideoDecoderConfig(
VideoCodec codec,
VideoCodecProfile profile,
VideoPixelFormat format,
- ColorSpace color_space,
+ const VideoColorSpace& color_space,
VideoRotation rotation,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -84,11 +83,6 @@ VideoDecoderConfig::VideoDecoderConfig(const VideoDecoderConfig& other) =
VideoDecoderConfig::~VideoDecoderConfig() = default;
-void VideoDecoderConfig::set_color_space_info(
- const VideoColorSpace& color_space_info) {
- color_space_info_ = color_space_info;
-}
-
const VideoColorSpace& VideoDecoderConfig::color_space_info() const {
return color_space_info_;
}
@@ -104,7 +98,7 @@ const base::Optional<HDRMetadata>& VideoDecoderConfig::hdr_metadata() const {
void VideoDecoderConfig::Initialize(VideoCodec codec,
VideoCodecProfile profile,
VideoPixelFormat format,
- ColorSpace color_space,
+ const VideoColorSpace& color_space,
VideoRotation rotation,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -114,28 +108,13 @@ void VideoDecoderConfig::Initialize(VideoCodec codec,
codec_ = codec;
profile_ = profile;
format_ = format;
- color_space_ = color_space;
rotation_ = rotation;
coded_size_ = coded_size;
visible_rect_ = visible_rect;
natural_size_ = natural_size;
extra_data_ = extra_data;
encryption_scheme_ = encryption_scheme;
-
- switch (color_space) {
- case ColorSpace::COLOR_SPACE_JPEG:
- color_space_info_ = VideoColorSpace::JPEG();
- break;
- case ColorSpace::COLOR_SPACE_HD_REC709:
- color_space_info_ = VideoColorSpace::REC709();
- break;
- case ColorSpace::COLOR_SPACE_SD_REC601:
- color_space_info_ = VideoColorSpace::REC601();
- break;
- case ColorSpace::COLOR_SPACE_UNSPECIFIED:
- default:
- break;
- }
+ color_space_info_ = color_space;
}
bool VideoDecoderConfig::IsValidConfig() const {
diff --git a/chromium/media/base/video_decoder_config.h b/chromium/media/base/video_decoder_config.h
index 26f6f7864f6..0ef659e1da0 100644
--- a/chromium/media/base/video_decoder_config.h
+++ b/chromium/media/base/video_decoder_config.h
@@ -38,7 +38,7 @@ class MEDIA_EXPORT VideoDecoderConfig {
VideoDecoderConfig(VideoCodec codec,
VideoCodecProfile profile,
VideoPixelFormat format,
- ColorSpace color_space,
+ const VideoColorSpace& color_space,
VideoRotation rotation,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -54,7 +54,7 @@ class MEDIA_EXPORT VideoDecoderConfig {
void Initialize(VideoCodec codec,
VideoCodecProfile profile,
VideoPixelFormat format,
- ColorSpace color_space,
+ const VideoColorSpace& color_space,
VideoRotation rotation,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -83,11 +83,6 @@ class MEDIA_EXPORT VideoDecoderConfig {
// Video format used to determine YUV buffer sizes.
VideoPixelFormat format() const { return format_; }
- // The default color space of the decoded frames. Decoders should output
- // frames tagged with this color space unless they find a different value in
- // the bitstream.
- ColorSpace color_space() const { return color_space_; }
-
// Default is VIDEO_ROTATION_0.
VideoRotation video_rotation() const { return rotation_; }
@@ -123,7 +118,6 @@ class MEDIA_EXPORT VideoDecoderConfig {
return encryption_scheme_;
}
- void set_color_space_info(const VideoColorSpace& color_space_info);
const VideoColorSpace& color_space_info() const;
void set_hdr_metadata(const HDRMetadata& hdr_metadata);
@@ -139,9 +133,6 @@ class MEDIA_EXPORT VideoDecoderConfig {
VideoPixelFormat format_;
- // TODO(servolk): Deprecated, use color_space_info_ instead.
- ColorSpace color_space_;
-
VideoRotation rotation_;
// Deprecated. TODO(wolenetz): Remove. See https://crbug.com/665539.
diff --git a/chromium/media/base/video_decoder_config_unittest.cc b/chromium/media/base/video_decoder_config_unittest.cc
index 4697c769a4e..33d849e4ceb 100644
--- a/chromium/media/base/video_decoder_config_unittest.cc
+++ b/chromium/media/base/video_decoder_config_unittest.cc
@@ -17,7 +17,7 @@ static const gfx::Size kNaturalSize(320, 240);
TEST(VideoDecoderConfigTest, Invalid_UnsupportedPixelFormat) {
VideoDecoderConfig config(kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
- PIXEL_FORMAT_UNKNOWN, COLOR_SPACE_UNSPECIFIED,
+ PIXEL_FORMAT_UNKNOWN, VideoColorSpace(),
VIDEO_ROTATION_0, kCodedSize, kVisibleRect,
kNaturalSize, EmptyExtraData(), Unencrypted());
EXPECT_FALSE(config.IsValidConfig());
@@ -26,36 +26,36 @@ TEST(VideoDecoderConfigTest, Invalid_UnsupportedPixelFormat) {
TEST(VideoDecoderConfigTest, Invalid_AspectRatioNumeratorZero) {
gfx::Size natural_size = GetNaturalSize(kVisibleRect.size(), 0, 1);
VideoDecoderConfig config(kCodecVP8, VP8PROFILE_ANY, kVideoFormat,
- COLOR_SPACE_UNSPECIFIED, VIDEO_ROTATION_0,
- kCodedSize, kVisibleRect, natural_size,
- EmptyExtraData(), Unencrypted());
+ VideoColorSpace(), VIDEO_ROTATION_0, kCodedSize,
+ kVisibleRect, natural_size, EmptyExtraData(),
+ Unencrypted());
EXPECT_FALSE(config.IsValidConfig());
}
TEST(VideoDecoderConfigTest, Invalid_AspectRatioDenominatorZero) {
gfx::Size natural_size = GetNaturalSize(kVisibleRect.size(), 1, 0);
VideoDecoderConfig config(kCodecVP8, VP8PROFILE_ANY, kVideoFormat,
- COLOR_SPACE_UNSPECIFIED, VIDEO_ROTATION_0,
- kCodedSize, kVisibleRect, natural_size,
- EmptyExtraData(), Unencrypted());
+ VideoColorSpace(), VIDEO_ROTATION_0, kCodedSize,
+ kVisibleRect, natural_size, EmptyExtraData(),
+ Unencrypted());
EXPECT_FALSE(config.IsValidConfig());
}
TEST(VideoDecoderConfigTest, Invalid_AspectRatioNumeratorNegative) {
gfx::Size natural_size = GetNaturalSize(kVisibleRect.size(), -1, 1);
VideoDecoderConfig config(kCodecVP8, VP8PROFILE_ANY, kVideoFormat,
- COLOR_SPACE_UNSPECIFIED, VIDEO_ROTATION_0,
- kCodedSize, kVisibleRect, natural_size,
- EmptyExtraData(), Unencrypted());
+ VideoColorSpace(), VIDEO_ROTATION_0, kCodedSize,
+ kVisibleRect, natural_size, EmptyExtraData(),
+ Unencrypted());
EXPECT_FALSE(config.IsValidConfig());
}
TEST(VideoDecoderConfigTest, Invalid_AspectRatioDenominatorNegative) {
gfx::Size natural_size = GetNaturalSize(kVisibleRect.size(), 1, -1);
VideoDecoderConfig config(kCodecVP8, VP8PROFILE_ANY, kVideoFormat,
- COLOR_SPACE_UNSPECIFIED, VIDEO_ROTATION_0,
- kCodedSize, kVisibleRect, natural_size,
- EmptyExtraData(), Unencrypted());
+ VideoColorSpace(), VIDEO_ROTATION_0, kCodedSize,
+ kVisibleRect, natural_size, EmptyExtraData(),
+ Unencrypted());
EXPECT_FALSE(config.IsValidConfig());
}
@@ -64,9 +64,9 @@ TEST(VideoDecoderConfigTest, Invalid_AspectRatioNumeratorTooLarge) {
int num = ceil(static_cast<double>(limits::kMaxDimension + 1) / width);
gfx::Size natural_size = GetNaturalSize(kVisibleRect.size(), num, 1);
VideoDecoderConfig config(kCodecVP8, VP8PROFILE_ANY, kVideoFormat,
- COLOR_SPACE_UNSPECIFIED, VIDEO_ROTATION_0,
- kCodedSize, kVisibleRect, natural_size,
- EmptyExtraData(), Unencrypted());
+ VideoColorSpace(), VIDEO_ROTATION_0, kCodedSize,
+ kVisibleRect, natural_size, EmptyExtraData(),
+ Unencrypted());
EXPECT_FALSE(config.IsValidConfig());
}
@@ -78,9 +78,9 @@ TEST(VideoDecoderConfigTest, Invalid_AspectRatioDenominatorVeryLarge) {
EXPECT_EQ(320, natural_size.width());
EXPECT_EQ(240 * 641, natural_size.height());
VideoDecoderConfig config(kCodecVP8, VP8PROFILE_ANY, kVideoFormat,
- COLOR_SPACE_UNSPECIFIED, VIDEO_ROTATION_0,
- kCodedSize, kVisibleRect, natural_size,
- EmptyExtraData(), Unencrypted());
+ VideoColorSpace(), VIDEO_ROTATION_0, kCodedSize,
+ kVisibleRect, natural_size, EmptyExtraData(),
+ Unencrypted());
EXPECT_FALSE(config.IsValidConfig());
}
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index 6d238b8d40b..9cf68b397d6 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -107,13 +107,15 @@ static bool AreValidPixelFormatsForWrap(VideoPixelFormat source_format,
// If it is required to allocate aligned to multiple-of-two size overall for the
// frame of pixel |format|.
-bool RequiresEvenSizeAllocation(VideoPixelFormat format) {
+static bool RequiresEvenSizeAllocation(VideoPixelFormat format) {
switch (format) {
case PIXEL_FORMAT_ARGB:
case PIXEL_FORMAT_XRGB:
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_RGB32:
case PIXEL_FORMAT_Y16:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
return false;
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
@@ -143,6 +145,59 @@ bool RequiresEvenSizeAllocation(VideoPixelFormat format) {
return false;
}
+// Creates VideoFrameLayout for tightly packed frame.
+static base::Optional<VideoFrameLayout> GetDefaultLayout(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size) {
+ std::vector<VideoFrameLayout::Plane> planes;
+
+ switch (format) {
+ case PIXEL_FORMAT_I420: {
+ int uv_width = (coded_size.width() + 1) / 2;
+ int uv_height = (coded_size.height() + 1) / 2;
+ int uv_stride = uv_width;
+ int uv_size = uv_width * uv_height;
+ planes = std::vector<VideoFrameLayout::Plane>{
+ VideoFrameLayout::Plane(coded_size.width(), 0),
+ VideoFrameLayout::Plane(uv_stride, coded_size.GetArea()),
+ VideoFrameLayout::Plane(uv_stride, coded_size.GetArea() + uv_size),
+ };
+ break;
+ }
+
+ case PIXEL_FORMAT_Y16:
+ planes = std::vector<VideoFrameLayout::Plane>{
+ VideoFrameLayout::Plane(coded_size.width() * 2, 0)};
+ break;
+
+ case PIXEL_FORMAT_ARGB:
+ planes = std::vector<VideoFrameLayout::Plane>{
+ VideoFrameLayout::Plane(coded_size.width() * 4, 0)};
+ break;
+
+ case PIXEL_FORMAT_NV12: {
+ int uv_width = (coded_size.width() + 1) / 2;
+ int uv_stride = uv_width * 2;
+ planes = std::vector<VideoFrameLayout::Plane>{
+ VideoFrameLayout::Plane(coded_size.width(), 0),
+ VideoFrameLayout::Plane(uv_stride, coded_size.GetArea()),
+ };
+ break;
+ }
+
+ default:
+ // TODO(miu): This function should support any pixel format.
+ // http://crbug.com/555909 .
+ DLOG(ERROR)
+ << "Only PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_NV12, "
+ "and PIXEL_FORMAT_ARGB formats are supported: "
+ << VideoPixelFormatToString(format);
+ return base::nullopt;
+ }
+
+ return VideoFrameLayout::CreateWithPlanes(format, coded_size, planes);
+}
+
// static
bool VideoFrame::IsValidConfig(VideoPixelFormat format,
StorageType storage_type,
@@ -160,8 +215,9 @@ bool VideoFrame::IsValidConfig(VideoPixelFormat format,
visible_rect.bottom() > coded_size.height() ||
natural_size_area > limits::kMaxCanvas ||
natural_size.width() > limits::kMaxDimension ||
- natural_size.height() > limits::kMaxDimension)
+ natural_size.height() > limits::kMaxDimension) {
return false;
+ }
// TODO(mcasas): Remove parameter |storage_type| when the opaque storage types
// comply with the checks below. Right now we skip them.
@@ -169,7 +225,7 @@ bool VideoFrame::IsValidConfig(VideoPixelFormat format,
return true;
// Make sure new formats are properly accounted for in the method.
- static_assert(PIXEL_FORMAT_MAX == 26,
+ static_assert(PIXEL_FORMAT_MAX == 28,
"Added pixel format, please review IsValidConfig()");
if (format == PIXEL_FORMAT_UNKNOWN) {
@@ -227,8 +283,14 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTextures(
return nullptr;
}
- scoped_refptr<VideoFrame> frame = new VideoFrame(
- format, storage, coded_size, visible_rect, natural_size, timestamp);
+ auto layout = VideoFrameLayout::Create(format, coded_size);
+ if (!layout) {
+ DLOG(ERROR) << "Invalid layout.";
+ return nullptr;
+ }
+
+ scoped_refptr<VideoFrame> frame =
+ new VideoFrame(*layout, storage, visible_rect, natural_size, timestamp);
memcpy(&frame->mailbox_holders_, mailbox_holders,
sizeof(frame->mailbox_holders_));
frame->mailbox_holders_release_cb_ = std::move(mailbox_holder_release_cb);
@@ -248,10 +310,24 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalData(
uint8_t* data,
size_t data_size,
base::TimeDelta timestamp) {
- return WrapExternalStorage(format, STORAGE_UNOWNED_MEMORY, coded_size,
- visible_rect, natural_size, data, data_size,
- timestamp, nullptr, nullptr,
- base::SharedMemoryHandle(), 0);
+ auto layout = GetDefaultLayout(format, coded_size);
+ if (!layout)
+ return nullptr;
+ return WrapExternalDataWithLayout(*layout, visible_rect, natural_size, data,
+ data_size, timestamp);
+}
+
+// static
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalDataWithLayout(
+ const VideoFrameLayout& layout,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8_t* data,
+ size_t data_size,
+ base::TimeDelta timestamp) {
+ return WrapExternalStorage(STORAGE_UNOWNED_MEMORY, layout, visible_rect,
+ natural_size, data, data_size, timestamp, nullptr,
+ nullptr, base::SharedMemoryHandle(), 0);
}
// static
@@ -265,9 +341,12 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalReadOnlySharedMemory(
base::ReadOnlySharedMemoryRegion* region,
size_t data_offset,
base::TimeDelta timestamp) {
- return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect,
- natural_size, data, data_size, timestamp, region,
- nullptr, base::SharedMemoryHandle(), data_offset);
+ auto layout = GetDefaultLayout(format, coded_size);
+ if (!layout)
+ return nullptr;
+ return WrapExternalStorage(STORAGE_SHMEM, *layout, visible_rect, natural_size,
+ data, data_size, timestamp, region, nullptr,
+ base::SharedMemoryHandle(), data_offset);
}
// static
@@ -281,9 +360,12 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalUnsafeSharedMemory(
base::UnsafeSharedMemoryRegion* region,
size_t data_offset,
base::TimeDelta timestamp) {
- return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect,
- natural_size, data, data_size, timestamp, nullptr,
- region, base::SharedMemoryHandle(), data_offset);
+ auto layout = GetDefaultLayout(format, coded_size);
+ if (!layout)
+ return nullptr;
+ return WrapExternalStorage(STORAGE_SHMEM, *layout, visible_rect, natural_size,
+ data, data_size, timestamp, nullptr, region,
+ base::SharedMemoryHandle(), data_offset);
}
// static
@@ -297,9 +379,12 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory(
base::SharedMemoryHandle handle,
size_t data_offset,
base::TimeDelta timestamp) {
- return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect,
- natural_size, data, data_size, timestamp, nullptr,
- nullptr, handle, data_offset);
+ auto layout = GetDefaultLayout(format, coded_size);
+ if (!layout)
+ return nullptr;
+ return WrapExternalStorage(STORAGE_SHMEM, *layout, visible_rect, natural_size,
+ data, data_size, timestamp, nullptr, nullptr,
+ handle, data_offset);
}
// static
@@ -324,12 +409,17 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData(
}
const size_t height = coded_size.height();
- scoped_refptr<VideoFrame> frame(new VideoFrame(
- VideoFrameLayout(
- format, coded_size, {y_stride, u_stride, v_stride},
- {std::abs(y_stride) * height, std::abs(u_stride) * height,
- std::abs(v_stride) * height}),
- storage, visible_rect, natural_size, timestamp));
+ auto layout = VideoFrameLayout::CreateWithStrides(
+ format, coded_size, {y_stride, u_stride, v_stride},
+ {std::abs(y_stride) * height, std::abs(u_stride) * height,
+ std::abs(v_stride) * height});
+ if (!layout) {
+ DLOG(ERROR) << "Invalid layout.";
+ return nullptr;
+ }
+
+ scoped_refptr<VideoFrame> frame(
+ new VideoFrame(*layout, storage, visible_rect, natural_size, timestamp));
frame->data_[kYPlane] = y_data;
frame->data_[kUPlane] = u_data;
frame->data_[kVPlane] = v_data;
@@ -366,12 +456,17 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvaData(
}
const size_t height = coded_size.height();
- scoped_refptr<VideoFrame> frame(new VideoFrame(
- VideoFrameLayout(format, coded_size,
- {y_stride, u_stride, v_stride, a_stride},
- {abs(y_stride) * height, abs(u_stride) * height,
- abs(v_stride) * height, abs(a_stride) * height}),
- storage, visible_rect, natural_size, timestamp));
+ auto layout = VideoFrameLayout::CreateWithStrides(
+ format, coded_size, {y_stride, u_stride, v_stride, a_stride},
+ {abs(y_stride) * height, abs(u_stride) * height, abs(v_stride) * height,
+ abs(a_stride) * height});
+ if (!layout) {
+ DLOG(ERROR) << "Invalid layout";
+ return nullptr;
+ }
+
+ scoped_refptr<VideoFrame> frame(
+ new VideoFrame(*layout, storage, visible_rect, natural_size, timestamp));
frame->data_[kYPlane] = y_data;
frame->data_[kUPlane] = u_data;
frame->data_[kVPlane] = v_data;
@@ -456,8 +551,14 @@ scoped_refptr<VideoFrame> VideoFrame::WrapCVPixelBuffer(
return nullptr;
}
- scoped_refptr<VideoFrame> frame(new VideoFrame(
- format, storage, coded_size, visible_rect, natural_size, timestamp));
+ auto layout = VideoFrameLayout::Create(format, coded_size);
+ if (!layout) {
+ DLOG(ERROR) << "Invalid layout.";
+ return nullptr;
+ }
+
+ scoped_refptr<VideoFrame> frame(
+ new VideoFrame(*layout, storage, visible_rect, natural_size, timestamp));
frame->cv_pixel_buffer_.reset(cv_pixel_buffer, base::scoped_policy::RETAIN);
return frame;
@@ -498,8 +599,10 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
// Copy all metadata to the wrapped frame.
wrapping_frame->metadata()->MergeMetadataFrom(frame->metadata());
- for (size_t i = 0; i < NumPlanes(format); ++i) {
- wrapping_frame->data_[i] = frame->data(i);
+ if (frame->IsMappable()) {
+ for (size_t i = 0; i < NumPlanes(format); ++i) {
+ wrapping_frame->data_[i] = frame->data(i);
+ }
}
#if defined(OS_LINUX)
@@ -533,9 +636,13 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
// static
scoped_refptr<VideoFrame> VideoFrame::CreateEOSFrame() {
- scoped_refptr<VideoFrame> frame =
- new VideoFrame(PIXEL_FORMAT_UNKNOWN, STORAGE_UNKNOWN, gfx::Size(),
- gfx::Rect(), gfx::Size(), kNoTimestamp);
+ auto layout = VideoFrameLayout::Create(PIXEL_FORMAT_UNKNOWN, gfx::Size());
+ if (!layout) {
+ DLOG(ERROR) << "Invalid layout.";
+ return nullptr;
+ }
+ scoped_refptr<VideoFrame> frame = new VideoFrame(
+ *layout, STORAGE_UNKNOWN, gfx::Rect(), gfx::Size(), kNoTimestamp);
frame->metadata()->SetBoolean(VideoFrameMetadata::END_OF_STREAM, true);
return frame;
}
@@ -576,43 +683,7 @@ scoped_refptr<VideoFrame> VideoFrame::CreateTransparentFrame(
// static
size_t VideoFrame::NumPlanes(VideoPixelFormat format) {
- switch (format) {
- case PIXEL_FORMAT_UYVY:
- case PIXEL_FORMAT_YUY2:
- case PIXEL_FORMAT_ARGB:
- case PIXEL_FORMAT_XRGB:
- case PIXEL_FORMAT_RGB24:
- case PIXEL_FORMAT_RGB32:
- case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_Y16:
- return 1;
- case PIXEL_FORMAT_NV12:
- case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_MT21:
- return 2;
- case PIXEL_FORMAT_I420:
- case PIXEL_FORMAT_YV12:
- case PIXEL_FORMAT_I422:
- case PIXEL_FORMAT_I444:
- case PIXEL_FORMAT_YUV420P9:
- case PIXEL_FORMAT_YUV422P9:
- case PIXEL_FORMAT_YUV444P9:
- case PIXEL_FORMAT_YUV420P10:
- case PIXEL_FORMAT_YUV422P10:
- case PIXEL_FORMAT_YUV444P10:
- case PIXEL_FORMAT_YUV420P12:
- case PIXEL_FORMAT_YUV422P12:
- case PIXEL_FORMAT_YUV444P12:
- return 3;
- case PIXEL_FORMAT_I420A:
- return 4;
- case PIXEL_FORMAT_UNKNOWN:
- // Note: PIXEL_FORMAT_UNKNOWN is used for end-of-stream frame.
- // Set its NumPlanes() to zero to avoid NOTREACHED().
- return 0;
- }
- NOTREACHED() << "Unsupported video frame format: " << format;
- return 0;
+ return VideoFrameLayout::NumPlanes(format);
}
// static
@@ -677,6 +748,8 @@ int VideoFrame::BytesPerElement(VideoPixelFormat format, size_t plane) {
case PIXEL_FORMAT_ARGB:
case PIXEL_FORMAT_XRGB:
case PIXEL_FORMAT_RGB32:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
return 4;
case PIXEL_FORMAT_RGB24:
return 3;
@@ -923,9 +996,8 @@ size_t VideoFrame::BitDepth() const {
// static
scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
- VideoPixelFormat format,
StorageType storage_type,
- const gfx::Size& coded_size,
+ const VideoFrameLayout& layout,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
uint8_t* data,
@@ -937,54 +1009,20 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
size_t data_offset) {
DCHECK(IsStorageTypeMappable(storage_type));
- // TODO(miu): This function should support any pixel format.
- // http://crbug.com/555909
- if (format != PIXEL_FORMAT_I420 && format != PIXEL_FORMAT_Y16 &&
- format != PIXEL_FORMAT_ARGB) {
- DLOG(ERROR) << "Only PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, and "
- "PIXEL_FORMAT_ARGB formats are supported: "
- << VideoPixelFormatToString(format);
- return nullptr;
- }
-
- if (!IsValidConfig(format, storage_type, coded_size, visible_rect,
- natural_size)) {
+ if (!IsValidConfig(layout.format(), storage_type, layout.coded_size(),
+ visible_rect, natural_size)) {
DLOG(ERROR) << __func__ << " Invalid config."
- << ConfigToString(format, storage_type, coded_size,
- visible_rect, natural_size);
+ << ConfigToString(layout.format(), storage_type,
+ layout.coded_size(), visible_rect,
+ natural_size);
return nullptr;
}
- scoped_refptr<VideoFrame> frame;
- switch (NumPlanes(format)) {
- case 1:
- frame = new VideoFrame(
- VideoFrameLayout(format, coded_size,
- std::vector<int>({RowBytes(kYPlane, format,
- coded_size.width())})),
- storage_type, visible_rect, natural_size, timestamp);
- frame->data_[kYPlane] = data;
- break;
- case 3:
- DCHECK_EQ(format, PIXEL_FORMAT_I420);
- // TODO(miu): This always rounds widths down, whereas
- // VideoFrame::RowBytes() always rounds up. This inconsistency must be
- // resolved. Perhaps a CommonAlignment() check should be made in
- // IsValidConfig()?
- // http://crbug.com/555909
- frame = new VideoFrame(
- VideoFrameLayout(format, coded_size,
- {RowBytes(kYPlane, format, coded_size.width()),
- coded_size.width() / 2, coded_size.width() / 2}),
- storage_type, visible_rect, natural_size, timestamp);
- frame->data_[kYPlane] = data;
- frame->data_[kVPlane] = data + (coded_size.GetArea() * 5 / 4);
- frame->data_[kUPlane] = data + coded_size.GetArea();
- break;
- default:
- DLOG(ERROR) << "Invalid number of planes: " << NumPlanes(format)
- << " in format: " << VideoPixelFormatToString(format);
- return nullptr;
+ scoped_refptr<VideoFrame> frame = new VideoFrame(
+ layout, storage_type, visible_rect, natural_size, timestamp);
+
+ for (size_t i = 0; i < layout.planes().size(); ++i) {
+ frame->data_[i] = data + layout.planes()[i].offset;
}
if (storage_type == STORAGE_SHMEM) {
@@ -1006,6 +1044,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
frame->shared_memory_offset_ = data_offset;
}
}
+
return frame;
}
@@ -1030,18 +1069,6 @@ VideoFrame::VideoFrame(const VideoFrameLayout& layout,
memset(&data_, 0, sizeof(data_));
}
-VideoFrame::VideoFrame(VideoPixelFormat format,
- StorageType storage_type,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- base::TimeDelta timestamp)
- : VideoFrame(VideoFrameLayout(format, coded_size),
- storage_type,
- visible_rect,
- natural_size,
- timestamp) {}
-
VideoFrame::~VideoFrame() {
if (mailbox_holders_release_cb_) {
gpu::SyncToken release_sync_token;
@@ -1101,10 +1128,15 @@ scoped_refptr<VideoFrame> VideoFrame::CreateFrameInternal(
// we can pad the requested |coded_size| if necessary if the request does not
// line up on sample boundaries. See discussion at http://crrev.com/1240833003
const gfx::Size new_coded_size = DetermineAlignedSize(format, coded_size);
- return CreateFrameWithLayout(
- VideoFrameLayout(format, new_coded_size,
- ComputeStrides(format, coded_size)),
- visible_rect, natural_size, timestamp, zero_initialize_memory);
+ auto layout = VideoFrameLayout::CreateWithStrides(
+ format, new_coded_size, ComputeStrides(format, coded_size));
+ if (!layout) {
+ DLOG(ERROR) << "Invalid layout.";
+ return nullptr;
+ }
+
+ return CreateFrameWithLayout(*layout, visible_rect, natural_size, timestamp,
+ zero_initialize_memory);
}
scoped_refptr<VideoFrame> VideoFrame::CreateFrameWithLayout(
@@ -1177,6 +1209,8 @@ gfx::Size VideoFrame::SampleSize(VideoPixelFormat format, size_t plane) {
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_RGB32:
case PIXEL_FORMAT_MJPEG:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
break;
}
}
@@ -1210,7 +1244,7 @@ void VideoFrame::AllocateMemory(bool zero_initialize_memory) {
}
uint8_t* data = reinterpret_cast<uint8_t*>(
- base::AlignedAlloc(total_buffer_size, kFrameAddressAlignment));
+ base::AlignedAlloc(total_buffer_size, layout_.buffer_addr_align()));
if (zero_initialize_memory) {
memset(data, 0, total_buffer_size);
}
diff --git a/chromium/media/base/video_frame.h b/chromium/media/base/video_frame.h
index 21bd15ee7e6..596eb136fcd 100644
--- a/chromium/media/base/video_frame.h
+++ b/chromium/media/base/video_frame.h
@@ -23,6 +23,7 @@
#include "base/memory/shared_memory_handle.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "build/build_config.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/video_frame_layout.h"
@@ -45,9 +46,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
kFrameSizeAlignment = 16,
kFrameSizePadding = 16,
- // Note: This value is dependent on what's used by ffmpeg, do not change
- // without inspecting av_frame_get_buffer() first.
- kFrameAddressAlignment = 32
+ kFrameAddressAlignment = VideoFrameLayout::kBufferAddressAlignment
};
enum {
@@ -163,6 +162,14 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
size_t data_size,
base::TimeDelta timestamp);
+ static scoped_refptr<VideoFrame> WrapExternalDataWithLayout(
+ const VideoFrameLayout& layout,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8_t* data,
+ size_t data_size,
+ base::TimeDelta timestamp);
+
// Same as WrapExternalData() with a ReadOnlySharedMemoryRegion and its
// offset. Neither |region| nor |data| are owned by this VideoFrame. The
// region and mapping which back |data| must outlive this instance; a
@@ -500,14 +507,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// Clients must use the static factory/wrapping methods to create a new frame.
// Derived classes should create their own factory/wrapping methods, and use
// this constructor to do basic initialization.
- VideoFrame(VideoPixelFormat format,
- StorageType storage_type,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- base::TimeDelta timestamp);
-
- // VideoFrameLayout is initialized at caller side.
VideoFrame(const VideoFrameLayout& layout,
StorageType storage_type,
const gfx::Rect& visible_rect,
@@ -538,9 +537,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
private:
static scoped_refptr<VideoFrame> WrapExternalStorage(
- VideoPixelFormat format,
StorageType storage_type,
- const gfx::Size& coded_size,
+ const VideoFrameLayout& layout,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
uint8_t* data,
@@ -642,7 +640,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::TimeDelta timestamp_;
base::Lock release_sync_token_lock_;
- gpu::SyncToken release_sync_token_;
+ gpu::SyncToken release_sync_token_ GUARDED_BY(release_sync_token_lock_);
VideoFrameMetadata metadata_;
diff --git a/chromium/media/base/video_frame_layout.cc b/chromium/media/base/video_frame_layout.cc
index 668e69d77d2..d541d96b8ea 100644
--- a/chromium/media/base/video_frame_layout.cc
+++ b/chromium/media/base/video_frame_layout.cc
@@ -7,6 +7,8 @@
#include <numeric>
#include <sstream>
+#include "base/logging.h"
+
namespace media {
namespace {
@@ -37,28 +39,95 @@ std::vector<VideoFrameLayout::Plane> PlanesFromStrides(
} // namespace
-VideoFrameLayout::VideoFrameLayout(VideoPixelFormat format,
- const gfx::Size& coded_size,
- std::vector<int32_t> strides,
- std::vector<size_t> buffer_sizes)
- : format_(format),
- coded_size_(coded_size),
- planes_(PlanesFromStrides(strides)),
- buffer_sizes_(std::move(buffer_sizes)) {}
+// static
+size_t VideoFrameLayout::NumPlanes(VideoPixelFormat format) {
+ switch (format) {
+ case PIXEL_FORMAT_UYVY:
+ case PIXEL_FORMAT_YUY2:
+ case PIXEL_FORMAT_ARGB:
+ case PIXEL_FORMAT_XRGB:
+ case PIXEL_FORMAT_RGB24:
+ case PIXEL_FORMAT_RGB32:
+ case PIXEL_FORMAT_MJPEG:
+ case PIXEL_FORMAT_Y16:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
+ return 1;
+ case PIXEL_FORMAT_NV12:
+ case PIXEL_FORMAT_NV21:
+ case PIXEL_FORMAT_MT21:
+ return 2;
+ case PIXEL_FORMAT_I420:
+ case PIXEL_FORMAT_YV12:
+ case PIXEL_FORMAT_I422:
+ case PIXEL_FORMAT_I444:
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV420P10:
+ case PIXEL_FORMAT_YUV422P10:
+ case PIXEL_FORMAT_YUV444P10:
+ case PIXEL_FORMAT_YUV420P12:
+ case PIXEL_FORMAT_YUV422P12:
+ case PIXEL_FORMAT_YUV444P12:
+ return 3;
+ case PIXEL_FORMAT_I420A:
+ return 4;
+ case PIXEL_FORMAT_UNKNOWN:
+ // Note: PIXEL_FORMAT_UNKNOWN is used for end-of-stream frame.
+ // Set its NumPlanes() to zero to avoid NOTREACHED().
+ return 0;
+ }
+ NOTREACHED() << "Unsupported video frame format: " << format;
+ return 0;
+}
+
+// static
+base::Optional<VideoFrameLayout> VideoFrameLayout::Create(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size) {
+ return CreateWithStrides(format, coded_size,
+ std::vector<int32_t>(NumPlanes(format), 0));
+}
+
+// static
+base::Optional<VideoFrameLayout> VideoFrameLayout::CreateWithStrides(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ std::vector<int32_t> strides,
+ std::vector<size_t> buffer_sizes) {
+ return CreateWithPlanes(format, coded_size, PlanesFromStrides(strides),
+ std::move(buffer_sizes));
+}
+
+// static
+base::Optional<VideoFrameLayout> VideoFrameLayout::CreateWithPlanes(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ std::vector<Plane> planes,
+ std::vector<size_t> buffer_sizes,
+ size_t buffer_addr_align) {
+ // NOTE: Even if format is UNKNOWN, it is valid if coded_sizes is not Empty().
+ // TODO(crbug.com/896135): Return base::nullopt,
+ // if (format != PIXEL_FORMAT_UNKNOWN || !coded_sizes.IsEmpty())
+ // TODO(crbug.com/896135): Return base::nullopt,
+ // if (planes.size() != NumPlanes(format))
+ // TODO(crbug.com/896135): Return base::nullopt,
+ // if (buffer_sizes.size() > planes.size())
+ return VideoFrameLayout(format, coded_size, std::move(planes),
+ std::move(buffer_sizes), buffer_addr_align);
+}
VideoFrameLayout::VideoFrameLayout(VideoPixelFormat format,
const gfx::Size& coded_size,
std::vector<Plane> planes,
- std::vector<size_t> buffer_sizes)
+ std::vector<size_t> buffer_sizes,
+ size_t buffer_addr_align)
: format_(format),
coded_size_(coded_size),
planes_(std::move(planes)),
- buffer_sizes_(std::move(buffer_sizes)) {}
-
-VideoFrameLayout::VideoFrameLayout()
- : format_(PIXEL_FORMAT_UNKNOWN),
- planes_(kDefaultPlaneCount),
- buffer_sizes_(kDefaultBufferCount, 0) {}
+ buffer_sizes_(std::move(buffer_sizes)),
+ buffer_addr_align_(buffer_addr_align) {}
VideoFrameLayout::~VideoFrameLayout() = default;
VideoFrameLayout::VideoFrameLayout(const VideoFrameLayout&) = default;
@@ -70,21 +139,38 @@ size_t VideoFrameLayout::GetTotalBufferSize() const {
return std::accumulate(buffer_sizes_.begin(), buffer_sizes_.end(), 0u);
}
-std::string VideoFrameLayout::ToString() const {
- std::ostringstream s;
- s << "VideoFrameLayout format: " << VideoPixelFormatToString(format_)
- << ", coded_size: " << coded_size_.ToString()
- << ", num_buffers: " << num_buffers()
- << ", buffer_sizes: " << VectorToString(buffer_sizes_)
- << ", num_planes: " << num_planes()
- << ", planes (stride, offset): " << VectorToString(planes_);
- return s.str();
-}
-
std::ostream& operator<<(std::ostream& ostream,
const VideoFrameLayout::Plane& plane) {
ostream << "(" << plane.stride << ", " << plane.offset << ")";
return ostream;
}
+bool VideoFrameLayout::Plane::operator==(
+ const VideoFrameLayout::Plane& rhs) const {
+ return stride == rhs.stride && offset == rhs.offset;
+}
+
+bool VideoFrameLayout::Plane::operator!=(
+ const VideoFrameLayout::Plane& rhs) const {
+ return !(*this == rhs);
+}
+
+bool VideoFrameLayout::operator==(const VideoFrameLayout& rhs) const {
+ return format_ == rhs.format_ && coded_size_ == rhs.coded_size_ &&
+ planes_ == rhs.planes_ && buffer_sizes_ == rhs.buffer_sizes_;
+}
+
+bool VideoFrameLayout::operator!=(const VideoFrameLayout& rhs) const {
+ return !(*this == rhs);
+}
+
+std::ostream& operator<<(std::ostream& ostream,
+ const VideoFrameLayout& layout) {
+ ostream << "VideoFrameLayout(format: " << layout.format()
+ << ", coded_size: " << layout.coded_size().ToString()
+ << ", planes (stride, offset): " << VectorToString(layout.planes())
+ << ", buffer_sizes: " << VectorToString(layout.buffer_sizes()) << ")";
+ return ostream;
+}
+
} // namespace media
diff --git a/chromium/media/base/video_frame_layout.h b/chromium/media/base/video_frame_layout.h
index 97d1d6c78b3..d69d0bf3776 100644
--- a/chromium/media/base/video_frame_layout.h
+++ b/chromium/media/base/video_frame_layout.h
@@ -13,6 +13,7 @@
#include <utility>
#include <vector>
+#include "base/optional.h"
#include "media/base/media_export.h"
#include "media/base/video_types.h"
#include "ui/gfx/geometry/size.h"
@@ -29,13 +30,17 @@ namespace media {
// Note that it is copyable.
class MEDIA_EXPORT VideoFrameLayout {
public:
+ // Default alignment for buffers.
+ // Note: This value is dependent on what's used by ffmpeg, do not change
+ // without inspecting av_frame_get_buffer() first.
+ static constexpr size_t kBufferAddressAlignment = 32;
+
struct Plane {
Plane() = default;
Plane(int32_t stride, size_t offset) : stride(stride), offset(offset) {}
- bool operator==(const Plane& rhs) const {
- return stride == rhs.stride && offset == rhs.offset;
- }
+ bool operator==(const Plane& rhs) const;
+ bool operator!=(const Plane& rhs) const;
// Strides of a plane, typically greater or equal to the
// width of the surface divided by the horizontal sampling period. Note that
@@ -47,35 +52,44 @@ class MEDIA_EXPORT VideoFrameLayout {
size_t offset = 0;
};
- enum {
- kDefaultPlaneCount = 4,
- kDefaultBufferCount = 4,
- };
-
- // Constructor with strides and buffers' size.
- // If strides and buffer_sizes are not assigned, strides, offsets and
- // buffer_sizes are {0, 0, 0, 0}.
- VideoFrameLayout(VideoPixelFormat format,
- const gfx::Size& coded_size,
- std::vector<int32_t> strides =
- std::vector<int32_t>(kDefaultPlaneCount, 0),
- std::vector<size_t> buffer_sizes =
- std::vector<size_t>(kDefaultBufferCount, 0));
-
- // Constructor with plane's stride/offset, and buffers' size.
- // If buffer_sizes are not assigned, it is {0, 0, 0, 0}.
- VideoFrameLayout(VideoPixelFormat format,
- const gfx::Size& coded_size,
- std::vector<Plane> planes,
- std::vector<size_t> buffer_sizes =
- std::vector<size_t>(kDefaultBufferCount, 0));
-
- VideoFrameLayout();
+ // Factory functions.
+ // |format| and |coded_size| must be specified.
+ // |strides|, |planes| and |buffer_sizes| are optional, whereas they should
+ // be specified if |buffer_sizes| are given.
+ // The size of |buffer_sizes| must be less than or equal to |planes|.
+ // Unless they are specified, num_planes() is NumPlanes(|format|) and
+ // num_buffers() is 0.
+ // |buffer_addr_align| can be specified to request a specific buffer memory
+ // alignment.
+ // The returned base::Optional will be base::nullopt if the configured values
+ // are invalid.
+ static base::Optional<VideoFrameLayout> Create(VideoPixelFormat format,
+ const gfx::Size& coded_size);
+
+ // The size of |strides| must be NumPlanes(|format|). Planes' offset will be
+ // 0.
+ static base::Optional<VideoFrameLayout> CreateWithStrides(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ std::vector<int32_t> strides,
+ std::vector<size_t> buffer_sizes = {});
+
+ // The size of |planes| must be NumPlanes(|format|).
+ static base::Optional<VideoFrameLayout> CreateWithPlanes(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ std::vector<Plane> planes,
+ std::vector<size_t> buffer_sizes = {},
+ size_t buffer_addr_align = kBufferAddressAlignment);
+
+ VideoFrameLayout() = delete;
VideoFrameLayout(const VideoFrameLayout&);
VideoFrameLayout(VideoFrameLayout&&);
VideoFrameLayout& operator=(const VideoFrameLayout&);
~VideoFrameLayout();
+ static size_t NumPlanes(VideoPixelFormat format);
+
VideoPixelFormat format() const { return format_; }
const gfx::Size& coded_size() const { return coded_size_; }
@@ -91,13 +105,21 @@ class MEDIA_EXPORT VideoFrameLayout {
// Returns sum of bytes of all buffers.
size_t GetTotalBufferSize() const;
- // Composes VideoFrameLayout as human readable string.
- std::string ToString() const;
+ bool operator==(const VideoFrameLayout& rhs) const;
+ bool operator!=(const VideoFrameLayout& rhs) const;
- // Returns false if it is invalid.
- bool IsValid() const { return format_ != PIXEL_FORMAT_UNKNOWN; }
+ // Returns the required memory alignment for buffers.
+ size_t buffer_addr_align() const {
+ return buffer_addr_align_;
+ }
private:
+ VideoFrameLayout(VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ std::vector<Plane> planes,
+ std::vector<size_t> buffer_sizes,
+ size_t buffer_addr_align);
+
VideoPixelFormat format_;
// Width and height of the video frame in pixels. This must include pixel
@@ -113,11 +135,20 @@ class MEDIA_EXPORT VideoFrameLayout {
// Vector of sizes for each buffer, typically greater or equal to the area of
// |coded_size_|.
std::vector<size_t> buffer_sizes_;
+
+ // Memory address alignment of the buffers. This is only relevant when
+ // allocating physical memory for the buffer, so it doesn't need to be
+ // serialized when frames are passed through Mojo.
+ size_t buffer_addr_align_;
};
// Outputs VideoFrameLayout::Plane to stream.
-std::ostream& operator<<(std::ostream& ostream,
- const VideoFrameLayout::Plane& plane);
+MEDIA_EXPORT std::ostream& operator<<(std::ostream& ostream,
+ const VideoFrameLayout::Plane& plane);
+
+// Outputs VideoFrameLayout to stream.
+MEDIA_EXPORT std::ostream& operator<<(std::ostream& ostream,
+ const VideoFrameLayout& layout);
} // namespace media
diff --git a/chromium/media/base/video_frame_layout_unittest.cc b/chromium/media/base/video_frame_layout_unittest.cc
index 5a46d08311e..a1d2b2b3866 100644
--- a/chromium/media/base/video_frame_layout_unittest.cc
+++ b/chromium/media/base/video_frame_layout_unittest.cc
@@ -7,10 +7,12 @@
#include <stddef.h>
#include <stdint.h>
+#include <sstream>
#include <string>
#include <utility>
#include "base/logging.h"
+#include "media/base/video_frame.h"
#include "media/base/video_types.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/geometry/size.h"
@@ -33,57 +35,116 @@ std::vector<VideoFrameLayout::Plane> CreatePlanes(
} // namespace
-TEST(VideoFrameLayout, Constructor) {
+TEST(VideoFrameLayout, CreateI420) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ auto layout = VideoFrameLayout::Create(PIXEL_FORMAT_I420, coded_size);
+ ASSERT_TRUE(layout.has_value());
+
+ auto num_of_planes = VideoFrame::NumPlanes(PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout->format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout->coded_size(), coded_size);
+ EXPECT_EQ(layout->GetTotalBufferSize(), 0u);
+ EXPECT_EQ(layout->num_planes(), num_of_planes);
+ EXPECT_EQ(layout->num_buffers(), 0u);
+ for (size_t i = 0; i < num_of_planes; ++i) {
+ EXPECT_EQ(layout->planes()[i].stride, 0);
+ EXPECT_EQ(layout->planes()[i].offset, 0u);
+ }
+}
+
+TEST(VideoFrameLayout, CreateNV12) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ auto layout = VideoFrameLayout::Create(PIXEL_FORMAT_NV12, coded_size);
+ ASSERT_TRUE(layout.has_value());
+
+ auto num_of_planes = VideoFrame::NumPlanes(PIXEL_FORMAT_NV12);
+ EXPECT_EQ(layout->format(), PIXEL_FORMAT_NV12);
+ EXPECT_EQ(layout->coded_size(), coded_size);
+ EXPECT_EQ(layout->GetTotalBufferSize(), 0u);
+ EXPECT_EQ(layout->num_planes(), num_of_planes);
+ EXPECT_EQ(layout->num_buffers(), 0u);
+ for (size_t i = 0; i < num_of_planes; ++i) {
+ EXPECT_EQ(layout->planes()[i].stride, 0);
+ EXPECT_EQ(layout->planes()[i].offset, 0u);
+ }
+}
+
+TEST(VideoFrameLayout, CreateWithStrides) {
gfx::Size coded_size = gfx::Size(320, 180);
std::vector<int32_t> strides = {384, 192, 192};
std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
- VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size, strides, buffer_sizes);
+ auto layout = VideoFrameLayout::CreateWithStrides(
+ PIXEL_FORMAT_I420, coded_size, strides, buffer_sizes);
+ ASSERT_TRUE(layout.has_value());
+
+ EXPECT_EQ(layout->format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout->coded_size(), coded_size);
+ EXPECT_EQ(layout->num_planes(), 3u);
+ EXPECT_EQ(layout->num_buffers(), 3u);
+ EXPECT_EQ(layout->GetTotalBufferSize(), 110592u);
+ for (size_t i = 0; i < 3; ++i) {
+ EXPECT_EQ(layout->planes()[i].stride, strides[i]);
+ EXPECT_EQ(layout->planes()[i].offset, 0u);
+ EXPECT_EQ(layout->buffer_sizes()[i], buffer_sizes[i]);
+ }
+}
+
+TEST(VideoFrameLayout, CreateWithStridesNoBufferSizes) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ std::vector<int32_t> strides = {384, 192, 192};
+ auto layout = VideoFrameLayout::CreateWithStrides(PIXEL_FORMAT_I420,
+ coded_size, strides);
+ ASSERT_TRUE(layout.has_value());
- EXPECT_EQ(layout.format(), PIXEL_FORMAT_I420);
- EXPECT_EQ(layout.coded_size(), coded_size);
- EXPECT_EQ(layout.num_planes(), 3u);
- EXPECT_EQ(layout.num_buffers(), 3u);
- EXPECT_EQ(layout.GetTotalBufferSize(), 110592u);
+ EXPECT_EQ(layout->format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout->coded_size(), coded_size);
+ EXPECT_EQ(layout->num_planes(), 3u);
+ EXPECT_EQ(layout->num_buffers(), 0u);
+ EXPECT_EQ(layout->GetTotalBufferSize(), 0u);
for (size_t i = 0; i < 3; ++i) {
- EXPECT_EQ(layout.planes()[i].stride, strides[i]);
- EXPECT_EQ(layout.planes()[i].offset, 0u);
- EXPECT_EQ(layout.buffer_sizes()[i], buffer_sizes[i]);
+ EXPECT_EQ(layout->planes()[i].stride, strides[i]);
+ EXPECT_EQ(layout->planes()[i].offset, 0u);
}
}
-TEST(VideoFrameLayout, ConstructorWithPlanes) {
+TEST(VideoFrameLayout, CreateWithPlanes) {
gfx::Size coded_size = gfx::Size(320, 180);
std::vector<int32_t> strides = {384, 192, 192};
std::vector<size_t> offsets = {0, 100, 200};
std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
- VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size,
- CreatePlanes(strides, offsets), buffer_sizes);
-
- EXPECT_EQ(layout.format(), PIXEL_FORMAT_I420);
- EXPECT_EQ(layout.coded_size(), coded_size);
- EXPECT_EQ(layout.num_planes(), 3u);
- EXPECT_EQ(layout.num_buffers(), 3u);
- EXPECT_EQ(layout.GetTotalBufferSize(), 110592u);
+ auto layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, CreatePlanes(strides, offsets),
+ buffer_sizes);
+ ASSERT_TRUE(layout.has_value());
+
+ EXPECT_EQ(layout->format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout->coded_size(), coded_size);
+ EXPECT_EQ(layout->num_planes(), 3u);
+ EXPECT_EQ(layout->num_buffers(), 3u);
+ EXPECT_EQ(layout->GetTotalBufferSize(), 110592u);
for (size_t i = 0; i < 3; ++i) {
- EXPECT_EQ(layout.planes()[i].stride, strides[i]);
- EXPECT_EQ(layout.planes()[i].offset, offsets[i]);
- EXPECT_EQ(layout.buffer_sizes()[i], buffer_sizes[i]);
+ EXPECT_EQ(layout->planes()[i].stride, strides[i]);
+ EXPECT_EQ(layout->planes()[i].offset, offsets[i]);
+ EXPECT_EQ(layout->buffer_sizes()[i], buffer_sizes[i]);
}
}
-TEST(VideoFrameLayout, ConstructorNoStrideBufferSize) {
+TEST(VideoFrameLayout, CreateWithPlanesNoBufferSizes) {
gfx::Size coded_size = gfx::Size(320, 180);
- VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size);
-
- EXPECT_EQ(layout.format(), PIXEL_FORMAT_I420);
- EXPECT_EQ(layout.coded_size(), coded_size);
- EXPECT_EQ(layout.GetTotalBufferSize(), 0u);
- EXPECT_EQ(layout.num_planes(), 4u);
- EXPECT_EQ(layout.num_buffers(), 4u);
- for (size_t i = 0; i < 4u; ++i) {
- EXPECT_EQ(layout.planes()[i].stride, 0);
- EXPECT_EQ(layout.planes()[i].offset, 0u);
- EXPECT_EQ(layout.buffer_sizes()[i], 0u);
+ std::vector<int32_t> strides = {384, 192, 192};
+ std::vector<size_t> offsets = {0, 100, 200};
+ auto layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, CreatePlanes(strides, offsets));
+ ASSERT_TRUE(layout.has_value());
+
+ EXPECT_EQ(layout->format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout->coded_size(), coded_size);
+ EXPECT_EQ(layout->num_planes(), 3u);
+ EXPECT_EQ(layout->num_buffers(), 0u);
+ EXPECT_EQ(layout->GetTotalBufferSize(), 0u);
+ for (size_t i = 0; i < 3; ++i) {
+ EXPECT_EQ(layout->planes()[i].stride, strides[i]);
+ EXPECT_EQ(layout->planes()[i].offset, offsets[i]);
}
}
@@ -92,33 +153,35 @@ TEST(VideoFrameLayout, CopyConstructor) {
std::vector<int32_t> strides = {384, 192, 192};
std::vector<size_t> offsets = {0, 100, 200};
std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
- VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size,
- CreatePlanes(strides, offsets), buffer_sizes);
-
- VideoFrameLayout layout_clone(layout);
+ auto layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, CreatePlanes(strides, offsets),
+ buffer_sizes);
+ ASSERT_TRUE(layout.has_value());
+ VideoFrameLayout layout_clone(*layout);
EXPECT_EQ(layout_clone.format(), PIXEL_FORMAT_I420);
EXPECT_EQ(layout_clone.coded_size(), coded_size);
EXPECT_EQ(layout_clone.num_planes(), 3u);
EXPECT_EQ(layout_clone.num_buffers(), 3u);
EXPECT_EQ(layout_clone.GetTotalBufferSize(), 110592u);
for (size_t i = 0; i < 3; ++i) {
- EXPECT_EQ(layout.planes()[i].stride, strides[i]);
- EXPECT_EQ(layout.planes()[i].offset, offsets[i]);
+ EXPECT_EQ(layout->planes()[i].stride, strides[i]);
+ EXPECT_EQ(layout->planes()[i].offset, offsets[i]);
EXPECT_EQ(layout_clone.buffer_sizes()[i], buffer_sizes[i]);
}
}
-TEST(VideoFrameLayout, AssignmentOperator) {
+TEST(VideoFrameLayout, CopyAssignmentOperator) {
gfx::Size coded_size = gfx::Size(320, 180);
std::vector<int32_t> strides = {384, 192, 192};
std::vector<size_t> offsets = {0, 100, 200};
std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
- VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size,
- CreatePlanes(strides, offsets), buffer_sizes);
-
- VideoFrameLayout layout_clone = layout;
+ auto layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, CreatePlanes(strides, offsets),
+ buffer_sizes);
+ ASSERT_TRUE(layout.has_value());
+ VideoFrameLayout layout_clone = *layout;
EXPECT_EQ(layout_clone.format(), PIXEL_FORMAT_I420);
EXPECT_EQ(layout_clone.coded_size(), coded_size);
EXPECT_EQ(layout_clone.num_planes(), 3u);
@@ -136,10 +199,12 @@ TEST(VideoFrameLayout, MoveConstructor) {
std::vector<int32_t> strides = {384, 192, 192};
std::vector<size_t> offsets = {0, 100, 200};
std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
- VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size,
- CreatePlanes(strides, offsets), buffer_sizes);
+ auto layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, CreatePlanes(strides, offsets),
+ buffer_sizes);
+ ASSERT_TRUE(layout.has_value());
- VideoFrameLayout layout_move(std::move(layout));
+ VideoFrameLayout layout_move(std::move(*layout));
EXPECT_EQ(layout_move.format(), PIXEL_FORMAT_I420);
EXPECT_EQ(layout_move.coded_size(), coded_size);
@@ -153,24 +218,27 @@ TEST(VideoFrameLayout, MoveConstructor) {
}
// Members in object being moved are cleared except const members.
- EXPECT_EQ(layout.format(), PIXEL_FORMAT_I420);
- EXPECT_EQ(layout.coded_size(), coded_size);
- EXPECT_EQ(layout.num_planes(), 0u);
- EXPECT_EQ(layout.num_buffers(), 0u);
- EXPECT_EQ(layout.GetTotalBufferSize(), 0u);
+ EXPECT_EQ(layout->format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout->coded_size(), coded_size);
+ EXPECT_EQ(layout->num_planes(), 0u);
+ EXPECT_EQ(layout->num_buffers(), 0u);
+ EXPECT_EQ(layout->GetTotalBufferSize(), 0u);
}
TEST(VideoFrameLayout, ToString) {
gfx::Size coded_size = gfx::Size(320, 180);
std::vector<int32_t> strides = {384, 192, 192};
std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
- VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size, strides, buffer_sizes);
+ auto layout = VideoFrameLayout::CreateWithStrides(
+ PIXEL_FORMAT_I420, coded_size, strides, buffer_sizes);
+ ASSERT_TRUE(layout.has_value());
- EXPECT_EQ(layout.ToString(),
- "VideoFrameLayout format: PIXEL_FORMAT_I420, coded_size: 320x180, "
- "num_buffers: 3, buffer_sizes: [73728, 18432, 18432], "
- "num_planes: 3, "
- "planes (stride, offset): [(384, 0), (192, 0), (192, 0)]");
+ std::ostringstream ostream;
+ ostream << *layout;
+ EXPECT_EQ(ostream.str(),
+ "VideoFrameLayout(format: PIXEL_FORMAT_I420, coded_size: 320x180, "
+ "planes (stride, offset): [(384, 0), (192, 0), (192, 0)], "
+ "buffer_sizes: [73728, 18432, 18432])");
}
TEST(VideoFrameLayout, ToStringOneBuffer) {
@@ -178,23 +246,52 @@ TEST(VideoFrameLayout, ToStringOneBuffer) {
std::vector<int32_t> strides = {384};
std::vector<size_t> offsets = {100};
std::vector<size_t> buffer_sizes = {122880};
- VideoFrameLayout layout(PIXEL_FORMAT_NV12, coded_size,
- CreatePlanes(strides, offsets), buffer_sizes);
+ auto layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_NV12, coded_size, CreatePlanes(strides, offsets),
+ buffer_sizes);
+ ASSERT_TRUE(layout.has_value());
- EXPECT_EQ(layout.ToString(),
- "VideoFrameLayout format: PIXEL_FORMAT_NV12, coded_size: 320x180, "
- "num_buffers: 1, buffer_sizes: [122880], "
- "num_planes: 1, planes (stride, offset): [(384, 100)]");
+ std::ostringstream ostream;
+ ostream << *layout;
+ EXPECT_EQ(ostream.str(),
+ "VideoFrameLayout(format: PIXEL_FORMAT_NV12, coded_size: 320x180, "
+ "planes (stride, offset): [(384, 100)], buffer_sizes: [122880])");
}
TEST(VideoFrameLayout, ToStringNoBufferInfo) {
gfx::Size coded_size = gfx::Size(320, 180);
- VideoFrameLayout layout(PIXEL_FORMAT_NV12, coded_size);
+ auto layout = VideoFrameLayout::Create(PIXEL_FORMAT_NV12, coded_size);
+ ASSERT_TRUE(layout.has_value());
+
+ std::ostringstream ostream;
+ ostream << *layout;
+ EXPECT_EQ(ostream.str(),
+ "VideoFrameLayout(format: PIXEL_FORMAT_NV12, coded_size: 320x180, "
+ "planes (stride, offset): [(0, 0), (0, 0)], buffer_sizes: [])");
+}
+
+TEST(VideoFrameLayout, EqualOperator) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ std::vector<int32_t> strides = {384, 192, 192};
+ std::vector<size_t> offsets = {0, 100, 200};
+ std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
+ auto layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, CreatePlanes(strides, offsets),
+ buffer_sizes);
+ ASSERT_TRUE(layout.has_value());
+
+ auto same_layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, CreatePlanes(strides, offsets),
+ buffer_sizes);
+ ASSERT_TRUE(same_layout.has_value());
+ EXPECT_EQ(*layout, *same_layout);
- EXPECT_EQ(layout.ToString(),
- "VideoFrameLayout format: PIXEL_FORMAT_NV12, coded_size: 320x180, "
- "num_buffers: 4, buffer_sizes: [0, 0, 0, 0], num_planes: 4, "
- "planes (stride, offset): [(0, 0), (0, 0), (0, 0), (0, 0)]");
+ std::vector<size_t> another_buffer_sizes = {73728};
+ auto different_layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, CreatePlanes(strides, offsets),
+ another_buffer_sizes);
+ ASSERT_TRUE(different_layout.has_value());
+ EXPECT_NE(*layout, *different_layout);
}
} // namespace media
diff --git a/chromium/media/base/video_frame_metadata.h b/chromium/media/base/video_frame_metadata.h
index 92741c3f410..c04d06df087 100644
--- a/chromium/media/base/video_frame_metadata.h
+++ b/chromium/media/base/video_frame_metadata.h
@@ -111,13 +111,13 @@ class MEDIA_EXPORT VideoFrameMetadata {
// notified about its promotability to an overlay.
WANTS_PROMOTION_HINT,
- // Windows only: if set, then this frame must be displayed in an overlay
- // rather than being composited into the framebuffer.
- REQUIRE_OVERLAY,
-
- // Windows only: this video has protected content.
+ // This video frame comes from protected content.
PROTECTED_VIDEO,
+ // This video frame is protected by hardware. This option is valid only if
+ // PROTECTED_VIDEO is also set to true.
+ HW_PROTECTED,
+
// Whether this frame was decoded in a power efficient way.
POWER_EFFICIENT,
diff --git a/chromium/media/base/video_frame_pool.cc b/chromium/media/base/video_frame_pool.cc
index a58fcf6fd0e..b70a1f3a962 100644
--- a/chromium/media/base/video_frame_pool.cc
+++ b/chromium/media/base/video_frame_pool.cc
@@ -9,6 +9,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "base/time/default_tick_clock.h"
namespace media {
@@ -31,7 +32,10 @@ class VideoFramePool::PoolImpl
// |frames_|.
void Shutdown();
- size_t get_pool_size_for_testing() const { return frames_.size(); }
+ size_t get_pool_size_for_testing() {
+ base::AutoLock auto_lock(lock_);
+ return frames_.size();
+ }
void set_tick_clock_for_testing(const base::TickClock* tick_clock) {
tick_clock_ = tick_clock;
@@ -49,14 +53,14 @@ class VideoFramePool::PoolImpl
void FrameReleased(scoped_refptr<VideoFrame> frame);
base::Lock lock_;
- bool is_shutdown_ = false;
+ bool is_shutdown_ GUARDED_BY(lock_) = false;
struct FrameEntry {
base::TimeTicks last_use_time;
scoped_refptr<VideoFrame> frame;
};
- base::circular_deque<FrameEntry> frames_;
+ base::circular_deque<FrameEntry> frames_ GUARDED_BY(lock_);
// |tick_clock_| is always a DefaultTickClock outside of testing.
const base::TickClock* tick_clock_;
diff --git a/chromium/media/base/video_frame_unittest.cc b/chromium/media/base/video_frame_unittest.cc
index ba3e99b3177..56576fc933c 100644
--- a/chromium/media/base/video_frame_unittest.cc
+++ b/chromium/media/base/video_frame_unittest.cc
@@ -402,11 +402,13 @@ TEST(VideoFrame, WrapExternalDmabufs) {
planes[i].offset = offsets[i];
}
auto timestamp = base::TimeDelta::FromMilliseconds(1);
- VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size, planes, buffer_sizes);
+ auto layout = VideoFrameLayout::CreateWithPlanes(
+ PIXEL_FORMAT_I420, coded_size, planes, buffer_sizes);
+ ASSERT_TRUE(layout);
std::vector<base::ScopedFD> dmabuf_fds(3u);
- auto frame =
- VideoFrame::WrapExternalDmabufs(layout, visible_rect, visible_rect.size(),
- std::move(dmabuf_fds), timestamp);
+ auto frame = VideoFrame::WrapExternalDmabufs(
+ *layout, visible_rect, visible_rect.size(), std::move(dmabuf_fds),
+ timestamp);
EXPECT_EQ(frame->layout().format(), PIXEL_FORMAT_I420);
EXPECT_EQ(frame->layout().coded_size(), coded_size);
@@ -625,6 +627,8 @@ TEST(VideoFrame, AllocationSize_OddSize) {
case PIXEL_FORMAT_XRGB:
case PIXEL_FORMAT_I420A:
case PIXEL_FORMAT_RGB32:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
EXPECT_EQ(60u, VideoFrame::AllocationSize(format, size))
<< VideoPixelFormatToString(format);
break;
diff --git a/chromium/media/base/video_rotation.h b/chromium/media/base/video_rotation.h
index aaa354ca024..05690aef559 100644
--- a/chromium/media/base/video_rotation.h
+++ b/chromium/media/base/video_rotation.h
@@ -11,7 +11,7 @@ namespace media {
// Enumeration to represent 90 degree video rotation for MP4 videos
// where it can be rotated by 90 degree intervals.
-enum VideoRotation {
+enum VideoRotation : int {
VIDEO_ROTATION_0 = 0,
VIDEO_ROTATION_90,
VIDEO_ROTATION_180,
diff --git a/chromium/media/base/video_thumbnail_decoder_unittest.cc b/chromium/media/base/video_thumbnail_decoder_unittest.cc
index 8edfd5abf47..63d54bd494d 100644
--- a/chromium/media/base/video_thumbnail_decoder_unittest.cc
+++ b/chromium/media/base/video_thumbnail_decoder_unittest.cc
@@ -33,7 +33,7 @@ class VideoThumbnailDecoderTest : public testing::Test {
auto mock_video_decoder = std::make_unique<MockVideoDecoder>();
mock_video_decoder_ = mock_video_decoder.get();
VideoDecoderConfig valid_config(
- kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, COLOR_SPACE_UNSPECIFIED,
+ kCodecVP8, VP8PROFILE_ANY, PIXEL_FORMAT_I420, VideoColorSpace(),
VIDEO_ROTATION_0, gfx::Size(1, 1), gfx::Rect(1, 1), gfx::Size(1, 1),
EmptyExtraData(), Unencrypted());
diff --git a/chromium/media/base/video_types.cc b/chromium/media/base/video_types.cc
index 6eb554ea9e3..815fb5dca05 100644
--- a/chromium/media/base/video_types.cc
+++ b/chromium/media/base/video_types.cc
@@ -63,6 +63,10 @@ std::string VideoPixelFormatToString(VideoPixelFormat format) {
return "PIXEL_FORMAT_YUV444P12";
case PIXEL_FORMAT_Y16:
return "PIXEL_FORMAT_Y16";
+ case PIXEL_FORMAT_ABGR:
+ return "PIXEL_FORMAT_ABGR";
+ case PIXEL_FORMAT_XBGR:
+ return "PIXEL_FORMAT_XBGR";
}
NOTREACHED() << "Invalid VideoPixelFormat provided: " << format;
return "";
@@ -114,6 +118,8 @@ bool IsYuvPlanar(VideoPixelFormat format) {
case PIXEL_FORMAT_RGB32:
case PIXEL_FORMAT_MJPEG:
case PIXEL_FORMAT_Y16:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
return false;
}
return false;
@@ -144,10 +150,12 @@ bool IsOpaque(VideoPixelFormat format) {
case PIXEL_FORMAT_YUV422P12:
case PIXEL_FORMAT_YUV444P12:
case PIXEL_FORMAT_Y16:
+ case PIXEL_FORMAT_XBGR:
return true;
case PIXEL_FORMAT_I420A:
case PIXEL_FORMAT_ARGB:
case PIXEL_FORMAT_RGB32:
+ case PIXEL_FORMAT_ABGR:
break;
}
return false;
@@ -173,6 +181,8 @@ size_t BitDepth(VideoPixelFormat format) {
case PIXEL_FORMAT_RGB32:
case PIXEL_FORMAT_MJPEG:
case PIXEL_FORMAT_MT21:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
return 8;
case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV422P9:
diff --git a/chromium/media/base/video_types.h b/chromium/media/base/video_types.h
index 0ce0d3ffc3a..edf07692dd0 100644
--- a/chromium/media/base/video_types.h
+++ b/chromium/media/base/video_types.h
@@ -53,8 +53,9 @@ enum VideoPixelFormat {
// Plane size = Row pitch * (((height+31)/32)*32)
PIXEL_FORMAT_MT21 = 15,
- // The P* in the formats below designates the number of bits per pixel. I.e.
- // P9 is 9-bits per pixel, P10 is 10-bits per pixel, etc.
+ // The P* in the formats below designates the number of bits per pixel
+ // component. I.e. P9 is 9-bits per pixel component, P10 is 10-bits per pixel
+ // component, etc.
PIXEL_FORMAT_YUV420P9 = 16,
PIXEL_FORMAT_YUV420P10 = 17,
PIXEL_FORMAT_YUV422P9 = 18,
@@ -68,22 +69,12 @@ enum VideoPixelFormat {
/* PIXEL_FORMAT_Y8 = 25, Deprecated */
PIXEL_FORMAT_Y16 = 26, // single 16bpp plane.
+ PIXEL_FORMAT_ABGR = 27, // 32bpp RGBA, 1 plane.
+ PIXEL_FORMAT_XBGR = 28, // 24bpp RGB, 1 plane.
+
// Please update UMA histogram enumeration when adding new formats here.
PIXEL_FORMAT_MAX =
- PIXEL_FORMAT_Y16, // Must always be equal to largest entry logged.
-};
-
-// Color space or color range used for the pixels.
-// Logged to UMA, so never reuse values. Leave gaps if necessary.
-// This enum is deprecated, use VideoColorSpace or gfx::ColorSpace instead.
-enum ColorSpace {
- COLOR_SPACE_UNSPECIFIED = 0, // In general this is Rec601.
- // The JPEG color space is the combination of Rec.601 and full range colors
- // (aka pc range colors).
- COLOR_SPACE_JPEG = 1,
- COLOR_SPACE_HD_REC709 = 2, // Rec709 "HD" color space.
- COLOR_SPACE_SD_REC601 = 3, // Rec601 "SD" color space.
- COLOR_SPACE_MAX = COLOR_SPACE_SD_REC601,
+ PIXEL_FORMAT_XBGR, // Must always be equal to largest entry logged.
};
// Returns the name of a Format as a string.
diff --git a/chromium/media/base/wall_clock_time_source.cc b/chromium/media/base/wall_clock_time_source.cc
index 9ce5b4d8201..9ddf51eb1af 100644
--- a/chromium/media/base/wall_clock_time_source.cc
+++ b/chromium/media/base/wall_clock_time_source.cc
@@ -81,6 +81,12 @@ bool WallClockTimeSource::GetWallClockTimes(
return playback_rate_ && ticking_;
}
+void WallClockTimeSource::SetTickClockForTesting(
+ const base::TickClock* tick_clock) {
+ base::AutoLock auto_lock(lock_);
+ tick_clock_ = tick_clock;
+}
+
base::TimeDelta WallClockTimeSource::CurrentMediaTime_Locked() {
lock_.AssertAcquired();
if (!ticking_ || !playback_rate_)
diff --git a/chromium/media/base/wall_clock_time_source.h b/chromium/media/base/wall_clock_time_source.h
index 1e4ddaa3d70..028c3522138 100644
--- a/chromium/media/base/wall_clock_time_source.h
+++ b/chromium/media/base/wall_clock_time_source.h
@@ -7,6 +7,7 @@
#include "base/macros.h"
#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
#include "base/time/default_tick_clock.h"
#include "media/base/media_export.h"
#include "media/base/time_source.h"
@@ -29,24 +30,22 @@ class MEDIA_EXPORT WallClockTimeSource : public TimeSource {
const std::vector<base::TimeDelta>& media_timestamps,
std::vector<base::TimeTicks>* wall_clock_times) override;
- void set_tick_clock_for_testing(const base::TickClock* tick_clock) {
- tick_clock_ = tick_clock;
- }
+ void SetTickClockForTesting(const base::TickClock* tick_clock);
private:
- base::TimeDelta CurrentMediaTime_Locked();
+ base::TimeDelta CurrentMediaTime_Locked() EXCLUSIVE_LOCKS_REQUIRED(lock_);
// Allow for an injectable tick clock for testing.
- const base::TickClock* tick_clock_;
+ const base::TickClock* tick_clock_ GUARDED_BY(lock_);
- bool ticking_;
+ bool ticking_ GUARDED_BY(lock_);
// While ticking we can interpolate the current media time by measuring the
// delta between our reference ticks and the current system ticks and scaling
// that time by the playback rate.
- double playback_rate_;
- base::TimeDelta base_timestamp_;
- base::TimeTicks reference_time_;
+ double playback_rate_ GUARDED_BY(lock_);
+ base::TimeDelta base_timestamp_ GUARDED_BY(lock_);
+ base::TimeTicks reference_time_ GUARDED_BY(lock_);
// TODO(scherkus): Remove internal locking from this class after access to
// Renderer::CurrentMediaTime() is single threaded http://crbug.com/370634
diff --git a/chromium/media/base/wall_clock_time_source_unittest.cc b/chromium/media/base/wall_clock_time_source_unittest.cc
index 9468eb4d8ca..49b6134c5a7 100644
--- a/chromium/media/base/wall_clock_time_source_unittest.cc
+++ b/chromium/media/base/wall_clock_time_source_unittest.cc
@@ -14,7 +14,7 @@ namespace media {
class WallClockTimeSourceTest : public testing::Test {
public:
WallClockTimeSourceTest() : tick_clock_(new base::SimpleTestTickClock()) {
- time_source_.set_tick_clock_for_testing(tick_clock_.get());
+ time_source_.SetTickClockForTesting(tick_clock_.get());
AdvanceTimeInSeconds(1);
}
~WallClockTimeSourceTest() override = default;