summaryrefslogtreecommitdiff
path: root/chromium/media/capture/video
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2021-05-20 09:47:09 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2021-06-07 11:15:42 +0000
commit189d4fd8fad9e3c776873be51938cd31a42b6177 (patch)
tree6497caeff5e383937996768766ab3bb2081a40b2 /chromium/media/capture/video
parent8bc75099d364490b22f43a7ce366b366c08f4164 (diff)
downloadqtwebengine-chromium-189d4fd8fad9e3c776873be51938cd31a42b6177.tar.gz
BASELINE: Update Chromium to 90.0.4430.221
Change-Id: Iff4d9d18d2fcf1a576f3b1f453010f744a232920 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/media/capture/video')
-rw-r--r--chromium/media/capture/video/DEPS1
-rw-r--r--chromium/media/capture/video/android/BUILD.gn2
-rw-r--r--chromium/media/capture/video/android/video_capture_device_factory_android.cc11
-rw-r--r--chromium/media/capture/video/chromeos/DEPS2
-rw-r--r--chromium/media/capture/video/chromeos/ash/DEPS3
-rw-r--r--chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.cc (renamed from chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc)302
-rw-r--r--chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h283
-rw-r--r--chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc423
-rw-r--r--chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.cc84
-rw-r--r--chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.h66
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.cc63
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.h12
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc141
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h53
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_impl.cc87
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_impl.h34
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc25
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h11
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_context.cc5
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc380
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.h57
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc43
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc170
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.h45
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc17
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h146
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc227
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.cc7
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.h5
-rw-r--r--chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc3
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.cc11
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.h12
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.cc5
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.h5
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera3.mojom20
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_app.mojom10
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_common.mojom13
-rw-r--r--chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom79
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.cc133
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.h27
-rw-r--r--chromium/media/capture/video/chromeos/request_manager_unittest.cc27
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.cc31
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.h10
-rw-r--r--chromium/media/capture/video/chromeos/token_manager.cc157
-rw-r--r--chromium/media/capture/video/chromeos/token_manager.h72
-rw-r--r--chromium/media/capture/video/chromeos/token_manager_unittest.cc97
-rw-r--r--chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc34
-rw-r--r--chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h5
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc282
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.h133
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc288
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h80
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc50
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h9
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h9
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc19
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h9
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.cc37
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.h9
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.cc1
-rw-r--r--chromium/media/capture/video/file_video_capture_device_factory.cc1
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc7
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc8
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc5
-rw-r--r--chromium/media/capture/video/gpu_memory_buffer_utils.cc2
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate.cc13
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.cc34
-rw-r--r--chromium/media/capture/video/mac/DEPS6
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm30
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm30
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc385
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac.h67
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm607
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm4
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h36
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm290
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm424
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h15
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h3
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm17
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_mac.h5
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_mac.mm15
-rw-r--r--chromium/media/capture/video/mac/video_capture_metrics_mac.h23
-rw-r--r--chromium/media/capture/video/mac/video_capture_metrics_mac.mm88
-rw-r--r--chromium/media/capture/video/mac/video_capture_metrics_mac_unittest.mm87
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc4
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.h9
-rw-r--r--chromium/media/capture/video/mock_video_capture_device_client.h7
-rw-r--r--chromium/media/capture/video/mock_video_frame_receiver.h12
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.cc12
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc4
-rw-r--r--chromium/media/capture/video/video_capture_device.cc24
-rw-r--r--chromium/media/capture/video/video_capture_device.h19
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc91
-rw-r--r--chromium/media/capture/video/video_capture_device_client.h19
-rw-r--r--chromium/media/capture/video/video_capture_device_client_unittest.cc14
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.cc6
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.h4
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc38
-rw-r--r--chromium/media/capture/video/video_capture_metrics.cc180
-rw-r--r--chromium/media/capture/video/video_capture_metrics.h19
-rw-r--r--chromium/media/capture/video/video_capture_metrics_unittest.cc59
-rw-r--r--chromium/media/capture/video/video_capture_system_impl.cc6
-rw-r--r--chromium/media/capture/video/video_frame_receiver.cc36
-rw-r--r--chromium/media/capture/video/video_frame_receiver.h28
-rw-r--r--chromium/media/capture/video/video_frame_receiver_on_task_runner.cc10
-rw-r--r--chromium/media/capture/video/video_frame_receiver_on_task_runner.h8
-rw-r--r--chromium/media/capture/video/win/OWNERS6
-rw-r--r--chromium/media/capture/video/win/d3d_capture_test_utils.cc918
-rw-r--r--chromium/media/capture/video/win/d3d_capture_test_utils.h731
-rw-r--r--chromium/media/capture/video/win/gpu_memory_buffer_tracker.cc151
-rw-r--r--chromium/media/capture/video/win/gpu_memory_buffer_tracker.h54
-rw-r--r--chromium/media/capture/video/win/gpu_memory_buffer_tracker_unittest.cc167
-rw-r--r--chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.cc37
-rw-r--r--chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.h35
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc36
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.h9
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc100
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.cc231
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.h21
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc153
-rw-r--r--chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc92
-rw-r--r--chromium/media/capture/video/win/video_capture_dxgi_device_manager.h50
123 files changed, 8048 insertions, 1936 deletions
diff --git a/chromium/media/capture/video/DEPS b/chromium/media/capture/video/DEPS
index 1ddde61f105..aa779c908c9 100644
--- a/chromium/media/capture/video/DEPS
+++ b/chromium/media/capture/video/DEPS
@@ -1,5 +1,6 @@
include_rules = [
"+chromeos/dbus",
+ "+components/device_event_log",
"+mojo/public/cpp",
"+third_party/libyuv",
]
diff --git a/chromium/media/capture/video/android/BUILD.gn b/chromium/media/capture/video/android/BUILD.gn
index d2c29492525..038445788ef 100644
--- a/chromium/media/capture/video/android/BUILD.gn
+++ b/chromium/media/capture/video/android/BUILD.gn
@@ -52,7 +52,7 @@ android_library("capture_java") {
deps = [
"//base:base_java",
"//base:jni_java",
- "//third_party/android_deps:androidx_annotation_annotation_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
]
annotation_processor_deps = [ "//base/android/jni_generator:jni_processor" ]
diff --git a/chromium/media/capture/video/android/video_capture_device_factory_android.cc b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
index ff95a22a29d..0708e1ecdf4 100644
--- a/chromium/media/capture/video/android/video_capture_device_factory_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
@@ -82,8 +82,10 @@ void VideoCaptureDeviceFactoryAndroid::GetDevicesInfo(
const std::string device_id =
base::android::ConvertJavaStringToUTF8(device_id_jstring);
- const int capture_api_type =
- Java_VideoCaptureFactory_getCaptureApiType(env, camera_index);
+ const VideoCaptureApi capture_api_type = static_cast<VideoCaptureApi>(
+ Java_VideoCaptureFactory_getCaptureApiType(env, camera_index));
+ if (capture_api_type == VideoCaptureApi::UNKNOWN)
+ continue;
VideoCaptureControlSupport control_support;
const int facing_mode =
Java_VideoCaptureFactory_getFacingMode(env, camera_index);
@@ -101,9 +103,8 @@ void VideoCaptureDeviceFactoryAndroid::GetDevicesInfo(
// currently only used for USB model identifiers, so this implementation
// just indicates an unknown device model (by not providing one).
VideoCaptureDeviceInfo device_info(VideoCaptureDeviceDescriptor(
- display_name, device_id, "" /*model_id*/,
- static_cast<VideoCaptureApi>(capture_api_type), control_support,
- VideoCaptureTransportType::OTHER_TRANSPORT,
+ display_name, device_id, "" /*model_id*/, capture_api_type,
+ control_support, VideoCaptureTransportType::OTHER_TRANSPORT,
static_cast<VideoFacingMode>(facing_mode)));
auto it = supported_formats_cache_.find(device_id);
diff --git a/chromium/media/capture/video/chromeos/DEPS b/chromium/media/capture/video/chromeos/DEPS
index 156f915bf69..1d9cd2a7d89 100644
--- a/chromium/media/capture/video/chromeos/DEPS
+++ b/chromium/media/capture/video/chromeos/DEPS
@@ -1,5 +1,5 @@
include_rules = [
- "+chromeos/dbus",
+ "+ash/constants/ash_features.h",
"+components/chromeos_camera",
"+third_party/libsync",
]
diff --git a/chromium/media/capture/video/chromeos/ash/DEPS b/chromium/media/capture/video/chromeos/ash/DEPS
new file mode 100644
index 00000000000..09b7125849a
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+chromeos/dbus",
+]
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.cc
index 39fa08efe27..fe9613b55d1 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
+++ b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.cc
@@ -2,8 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
-
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
#include <fcntl.h>
#include <grp.h>
#include <poll.h>
@@ -16,6 +15,7 @@
#include "base/files/file.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
+#include "base/notreached.h"
#include "base/posix/eintr_wrapper.h"
#include "base/rand_util.h"
#include "base/single_thread_task_runner.h"
@@ -23,6 +23,8 @@
#include "base/strings/string_number_conversions.h"
#include "base/synchronization/waitable_event.h"
#include "base/trace_event/trace_event.h"
+#include "components/device_event_log/device_event_log.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/platform/named_platform_channel.h"
@@ -50,7 +52,8 @@ std::string GenerateRandomToken() {
// to here, and the write side will be closed in such a case.
bool WaitForSocketReadable(int raw_socket_fd, int raw_cancel_fd) {
struct pollfd fds[2] = {
- {raw_socket_fd, POLLIN, 0}, {raw_cancel_fd, POLLIN, 0},
+ {raw_socket_fd, POLLIN, 0},
+ {raw_cancel_fd, POLLIN, 0},
};
if (HANDLE_EINTR(poll(fds, base::size(fds), -1)) <= 0) {
@@ -67,11 +70,22 @@ bool WaitForSocketReadable(int raw_socket_fd, int raw_cancel_fd) {
return true;
}
+bool HasCrosCameraTest() {
+ static constexpr char kCrosCameraTestPath[] =
+ "/usr/local/bin/cros_camera_test";
+
+ base::FilePath path(kCrosCameraTestPath);
+ return base::PathExists(path);
+}
+
class MojoCameraClientObserver : public CameraClientObserver {
public:
explicit MojoCameraClientObserver(
- mojo::PendingRemote<cros::mojom::CameraHalClient> client)
- : client_(std::move(client)) {}
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ base::UnguessableToken auth_token)
+ : CameraClientObserver(type, std::move(auth_token)),
+ client_(std::move(client)) {}
void OnChannelCreated(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module) override {
@@ -89,6 +103,33 @@ class MojoCameraClientObserver : public CameraClientObserver {
CameraClientObserver::~CameraClientObserver() = default;
+bool CameraClientObserver::Authenticate(TokenManager* token_manager) {
+ auto authenticated_type =
+ token_manager->AuthenticateClient(type_, auth_token_);
+ if (!authenticated_type) {
+ return false;
+ }
+ type_ = authenticated_type.value();
+ return true;
+}
+
+FailedCameraHalServerCallbacks::FailedCameraHalServerCallbacks()
+ : callbacks_(this) {}
+FailedCameraHalServerCallbacks::~FailedCameraHalServerCallbacks() = default;
+
+mojo::PendingRemote<cros::mojom::CameraHalServerCallbacks>
+FailedCameraHalServerCallbacks::GetRemote() {
+ return callbacks_.BindNewPipeAndPassRemote();
+}
+
+void FailedCameraHalServerCallbacks::CameraDeviceActivityChange(
+ int32_t camera_id,
+ bool opened,
+ cros::mojom::CameraClientType type) {}
+
+void FailedCameraHalServerCallbacks::CameraPrivacySwitchStateChange(
+ cros::mojom::CameraPrivacySwitchState state) {}
+
// static
CameraHalDispatcherImpl* CameraHalDispatcherImpl::GetInstance() {
return base::Singleton<CameraHalDispatcherImpl>::get();
@@ -119,10 +160,25 @@ bool CameraHalDispatcherImpl::Start(
if (!StartThreads()) {
return false;
}
+ // This event is for adding camera category to categories list.
+ TRACE_EVENT0("camera", "CameraHalDispatcherImpl");
+ base::trace_event::TraceLog::GetInstance()->AddEnabledStateObserver(this);
+
jda_factory_ = std::move(jda_factory);
jea_factory_ = std::move(jea_factory);
base::WaitableEvent started(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
+ // It's important we generate tokens before creating the socket, because once
+ // it is available, everyone connecting to socket would start fetching
+ // tokens.
+ if (!token_manager_.GenerateServerToken()) {
+ LOG(ERROR) << "Failed to generate authentication token for server";
+ return false;
+ }
+ if (HasCrosCameraTest() && !token_manager_.GenerateTestClientToken()) {
+ LOG(ERROR) << "Failed to generate token for test client";
+ return false;
+ }
blocking_io_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&CameraHalDispatcherImpl::CreateSocket,
@@ -132,14 +188,16 @@ bool CameraHalDispatcherImpl::Start(
}
void CameraHalDispatcherImpl::AddClientObserver(
- std::unique_ptr<CameraClientObserver> observer) {
+ std::unique_ptr<CameraClientObserver> observer,
+ base::OnceCallback<void(int32_t)> result_callback) {
// If |proxy_thread_| fails to start in Start() then CameraHalDelegate will
// not be created, and this function will not be called.
DCHECK(proxy_thread_.IsRunning());
proxy_thread_.task_runner()->PostTask(
FROM_HERE,
base::BindOnce(&CameraHalDispatcherImpl::AddClientObserverOnProxyThread,
- base::Unretained(this), std::move(observer)));
+ base::Unretained(this), std::move(observer),
+ std::move(result_callback)));
}
bool CameraHalDispatcherImpl::IsStarted() {
@@ -147,13 +205,58 @@ bool CameraHalDispatcherImpl::IsStarted() {
proxy_fd_.is_valid();
}
+void CameraHalDispatcherImpl::AddActiveClientObserver(
+ CameraActiveClientObserver* observer) {
+ base::AutoLock lock(opened_camera_id_map_lock_);
+ for (auto& opened_camera_id_pair : opened_camera_id_map_) {
+ const auto& camera_client_type = opened_camera_id_pair.first;
+ const auto& camera_id_set = opened_camera_id_pair.second;
+ if (!camera_id_set.empty()) {
+ observer->OnActiveClientChange(camera_client_type, /*is_active=*/true);
+ }
+ }
+ active_client_observers_->AddObserver(observer);
+}
+
+void CameraHalDispatcherImpl::RemoveActiveClientObserver(
+ CameraActiveClientObserver* observer) {
+ active_client_observers_->RemoveObserver(observer);
+}
+
+cros::mojom::CameraPrivacySwitchState
+CameraHalDispatcherImpl::AddCameraPrivacySwitchObserver(
+ CameraPrivacySwitchObserver* observer) {
+ privacy_switch_observers_->AddObserver(observer);
+
+ base::AutoLock lock(privacy_switch_state_lock_);
+ return current_privacy_switch_state_;
+}
+
+void CameraHalDispatcherImpl::RemoveCameraPrivacySwitchObserver(
+ CameraPrivacySwitchObserver* observer) {
+ privacy_switch_observers_->RemoveObserver(observer);
+}
+
+void CameraHalDispatcherImpl::RegisterPluginVmToken(
+ const base::UnguessableToken& token) {
+ token_manager_.RegisterPluginVmToken(token);
+}
+
+void CameraHalDispatcherImpl::UnregisterPluginVmToken(
+ const base::UnguessableToken& token) {
+ token_manager_.UnregisterPluginVmToken(token);
+}
+
CameraHalDispatcherImpl::CameraHalDispatcherImpl()
: proxy_thread_("CameraProxyThread"),
- blocking_io_thread_("CameraBlockingIOThread") {
- // This event is for adding camera category to categories list.
- TRACE_EVENT0("camera", "CameraHalDispatcherImpl");
- base::trace_event::TraceLog::GetInstance()->AddEnabledStateObserver(this);
-}
+ blocking_io_thread_("CameraBlockingIOThread"),
+ camera_hal_server_callbacks_(this),
+ active_client_observers_(
+ new base::ObserverListThreadSafe<CameraActiveClientObserver>()),
+ current_privacy_switch_state_(
+ cros::mojom::CameraPrivacySwitchState::UNKNOWN),
+ privacy_switch_observers_(
+ new base::ObserverListThreadSafe<CameraPrivacySwitchObserver>()) {}
CameraHalDispatcherImpl::~CameraHalDispatcherImpl() {
VLOG(1) << "Stopping CameraHalDispatcherImpl...";
@@ -164,23 +267,41 @@ CameraHalDispatcherImpl::~CameraHalDispatcherImpl() {
proxy_thread_.Stop();
}
blocking_io_thread_.Stop();
- base::trace_event::TraceLog::GetInstance()->RemoveEnabledStateObserver(this);
- VLOG(1) << "CameraHalDispatcherImpl stopped";
+ CAMERA_LOG(EVENT) << "CameraHalDispatcherImpl stopped";
}
void CameraHalDispatcherImpl::RegisterServer(
mojo::PendingRemote<cros::mojom::CameraHalServer> camera_hal_server) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+ LOG(ERROR) << "CameraHalDispatcher::RegisterServer is deprecated. "
+ "CameraHalServer will not be registered.";
+}
+
+void CameraHalDispatcherImpl::RegisterServerWithToken(
+ mojo::PendingRemote<cros::mojom::CameraHalServer> camera_hal_server,
+ const base::UnguessableToken& token,
+ RegisterServerWithTokenCallback callback) {
+ DCHECK(proxy_task_runner_->BelongsToCurrentThread());
if (camera_hal_server_) {
LOG(ERROR) << "Camera HAL server is already registered";
+ std::move(callback).Run(-EALREADY,
+ failed_camera_hal_server_callbacks_.GetRemote());
+ return;
+ }
+ if (!token_manager_.AuthenticateServer(token)) {
+ LOG(ERROR) << "Failed to authenticate server";
+ std::move(callback).Run(-EPERM,
+ failed_camera_hal_server_callbacks_.GetRemote());
return;
}
camera_hal_server_.Bind(std::move(camera_hal_server));
camera_hal_server_.set_disconnect_handler(
base::BindOnce(&CameraHalDispatcherImpl::OnCameraHalServerConnectionError,
base::Unretained(this)));
- VLOG(1) << "Camera HAL server registered";
+ CAMERA_LOG(EVENT) << "Camera HAL server registered";
+ std::move(callback).Run(
+ 0, camera_hal_server_callbacks_.BindNewPipeAndPassRemote());
// Set up the Mojo channels for clients which registered before the server
// registers.
@@ -191,13 +312,24 @@ void CameraHalDispatcherImpl::RegisterServer(
void CameraHalDispatcherImpl::RegisterClient(
mojo::PendingRemote<cros::mojom::CameraHalClient> client) {
- // RegisterClient can be called locally by ArcCameraBridge. Unretained
- // reference is safe here because CameraHalDispatcherImpl owns
+ NOTREACHED() << "RegisterClient() is disabled";
+}
+
+void CameraHalDispatcherImpl::RegisterClientWithToken(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& auth_token,
+ RegisterClientWithTokenCallback callback) {
+ base::UnguessableToken client_auth_token = auth_token;
+ // Unretained reference is safe here because CameraHalDispatcherImpl owns
// |proxy_thread_|.
proxy_task_runner_->PostTask(
FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImpl::RegisterClientOnProxyThread,
- base::Unretained(this), std::move(client)));
+ base::BindOnce(
+ &CameraHalDispatcherImpl::RegisterClientWithTokenOnProxyThread,
+ base::Unretained(this), std::move(client), type,
+ std::move(client_auth_token),
+ media::BindToCurrentLoop(std::move(callback))));
}
void CameraHalDispatcherImpl::GetJpegDecodeAccelerator(
@@ -212,6 +344,64 @@ void CameraHalDispatcherImpl::GetJpegEncodeAccelerator(
jea_factory_.Run(std::move(jea_receiver));
}
+void CameraHalDispatcherImpl::CameraDeviceActivityChange(
+ int32_t camera_id,
+ bool opened,
+ cros::mojom::CameraClientType type) {
+ VLOG(1) << type << (opened ? " opened " : " closed ") << "camera "
+ << camera_id;
+ base::AutoLock lock(opened_camera_id_map_lock_);
+ auto& camera_id_set = opened_camera_id_map_[type];
+ if (opened) {
+ auto result = camera_id_set.insert(camera_id);
+ if (!result.second) { // No element inserted.
+ LOG(WARNING) << "Received duplicated open notification for camera "
+ << camera_id;
+ return;
+ }
+ if (camera_id_set.size() == 1) {
+ VLOG(1) << type << " is active";
+ active_client_observers_->Notify(
+ FROM_HERE, &CameraActiveClientObserver::OnActiveClientChange, type,
+ /*is_active=*/true);
+ }
+ } else {
+ auto it = camera_id_set.find(camera_id);
+ if (it == camera_id_set.end()) {
+ // This can happen if something happened to the client process and it
+ // simultaneous lost connections to both CameraHalDispatcher and
+ // CameraHalServer.
+ LOG(WARNING) << "Received close notification for camera " << camera_id
+ << " which is not opened";
+ return;
+ }
+ camera_id_set.erase(it);
+ if (camera_id_set.empty()) {
+ VLOG(1) << type << " is inactive";
+ active_client_observers_->Notify(
+ FROM_HERE, &CameraActiveClientObserver::OnActiveClientChange, type,
+ /*is_active=*/false);
+ }
+ }
+}
+
+void CameraHalDispatcherImpl::CameraPrivacySwitchStateChange(
+ cros::mojom::CameraPrivacySwitchState state) {
+ DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+
+ base::AutoLock lock(privacy_switch_state_lock_);
+ current_privacy_switch_state_ = state;
+ privacy_switch_observers_->Notify(
+ FROM_HERE,
+ &CameraPrivacySwitchObserver::OnCameraPrivacySwitchStatusChanged,
+ current_privacy_switch_state_);
+}
+
+base::UnguessableToken CameraHalDispatcherImpl::GetTokenForTrustedClient(
+ cros::mojom::CameraClientType type) {
+ return token_manager_.GetTokenForTrustedClient(type);
+}
+
void CameraHalDispatcherImpl::OnTraceLogEnabled() {
proxy_task_runner_->PostTask(
FROM_HERE,
@@ -347,33 +537,46 @@ void CameraHalDispatcherImpl::StartServiceLoop(base::ScopedFD socket_fd,
}
}
-void CameraHalDispatcherImpl::RegisterClientOnProxyThread(
- mojo::PendingRemote<cros::mojom::CameraHalClient> client) {
+void CameraHalDispatcherImpl::RegisterClientWithTokenOnProxyThread(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ base::UnguessableToken auth_token,
+ RegisterClientWithTokenCallback callback) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
- auto client_observer =
- std::make_unique<MojoCameraClientObserver>(std::move(client));
+ auto client_observer = std::make_unique<MojoCameraClientObserver>(
+ std::move(client), type, std::move(auth_token));
client_observer->client().set_disconnect_handler(base::BindOnce(
&CameraHalDispatcherImpl::OnCameraHalClientConnectionError,
base::Unretained(this), base::Unretained(client_observer.get())));
- AddClientObserver(std::move(client_observer));
- VLOG(1) << "Camera HAL client registered";
+ AddClientObserverOnProxyThread(std::move(client_observer),
+ std::move(callback));
}
void CameraHalDispatcherImpl::AddClientObserverOnProxyThread(
- std::unique_ptr<CameraClientObserver> observer) {
+ std::unique_ptr<CameraClientObserver> observer,
+ base::OnceCallback<void(int32_t)> result_callback) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+ if (!observer->Authenticate(&token_manager_)) {
+ LOG(ERROR) << "Failed to authenticate camera client observer";
+ std::move(result_callback).Run(-EPERM);
+ return;
+ }
if (camera_hal_server_) {
EstablishMojoChannel(observer.get());
}
client_observers_.insert(std::move(observer));
+ std::move(result_callback).Run(0);
+ CAMERA_LOG(EVENT) << "Camera HAL client registered";
}
void CameraHalDispatcherImpl::EstablishMojoChannel(
CameraClientObserver* client_observer) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
mojo::PendingRemote<cros::mojom::CameraModule> camera_module;
+ const auto& type = client_observer->GetType();
+ CAMERA_LOG(EVENT) << "Establishing server channel for " << type;
camera_hal_server_->CreateChannel(
- camera_module.InitWithNewPipeAndPassReceiver());
+ camera_module.InitWithNewPipeAndPassReceiver(), type);
client_observer->OnChannelCreated(std::move(camera_module));
}
@@ -388,22 +591,58 @@ void CameraHalDispatcherImpl::OnPeerConnected(
void CameraHalDispatcherImpl::OnCameraHalServerConnectionError() {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
- VLOG(1) << "Camera HAL server connection lost";
+ base::AutoLock lock(opened_camera_id_map_lock_);
+ CAMERA_LOG(EVENT) << "Camera HAL server connection lost";
camera_hal_server_.reset();
+ camera_hal_server_callbacks_.reset();
+ for (auto& opened_camera_id_pair : opened_camera_id_map_) {
+ auto camera_client_type = opened_camera_id_pair.first;
+ const auto& camera_id_set = opened_camera_id_pair.second;
+ if (!camera_id_set.empty()) {
+ active_client_observers_->Notify(
+ FROM_HERE, &CameraActiveClientObserver::OnActiveClientChange,
+ camera_client_type, /*is_active=*/false);
+ }
+ }
+ opened_camera_id_map_.clear();
+
+ base::AutoLock privacy_lock(privacy_switch_state_lock_);
+ current_privacy_switch_state_ =
+ cros::mojom::CameraPrivacySwitchState::UNKNOWN;
+ privacy_switch_observers_->Notify(
+ FROM_HERE,
+ &CameraPrivacySwitchObserver::OnCameraPrivacySwitchStatusChanged,
+ current_privacy_switch_state_);
}
void CameraHalDispatcherImpl::OnCameraHalClientConnectionError(
CameraClientObserver* client_observer) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+ base::AutoLock lock(opened_camera_id_map_lock_);
+ auto camera_client_type = client_observer->GetType();
+ auto opened_it = opened_camera_id_map_.find(camera_client_type);
+ if (opened_it == opened_camera_id_map_.end()) {
+ // This can happen if this camera client never opened a camera.
+ return;
+ }
+ const auto& camera_id_set = opened_it->second;
+ if (!camera_id_set.empty()) {
+ active_client_observers_->Notify(
+ FROM_HERE, &CameraActiveClientObserver::OnActiveClientChange,
+ camera_client_type, /*is_active=*/false);
+ }
+ opened_camera_id_map_.erase(opened_it);
+
auto it = client_observers_.find(client_observer);
if (it != client_observers_.end()) {
client_observers_.erase(it);
- VLOG(1) << "Camera HAL client connection lost";
+ CAMERA_LOG(EVENT) << "Camera HAL client connection lost";
}
}
void CameraHalDispatcherImpl::StopOnProxyThread() {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+ base::trace_event::TraceLog::GetInstance()->RemoveEnabledStateObserver(this);
// TODO(crbug.com/1053569): Remove these lines once the issue is solved.
base::File::Info info;
@@ -422,6 +661,7 @@ void CameraHalDispatcherImpl::StopOnProxyThread() {
// Close |cancel_pipe_| to quit the loop in WaitForIncomingConnection.
cancel_pipe_.reset();
client_observers_.clear();
+ camera_hal_server_callbacks_.reset();
camera_hal_server_.reset();
receiver_set_.Clear();
}
@@ -446,4 +686,8 @@ void CameraHalDispatcherImpl::OnTraceLogDisabledOnProxyThread() {
camera_hal_server_->SetTracingEnabled(false);
}
+TokenManager* CameraHalDispatcherImpl::GetTokenManagerForTesting() {
+ return &token_manager_;
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h
new file mode 100644
index 00000000000..47f56d92dfb
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h
@@ -0,0 +1,283 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_CAMERA_HAL_DISPATCHER_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_CAMERA_HAL_DISPATCHER_IMPL_H_
+
+#include <memory>
+#include <set>
+
+#include "base/containers/flat_map.h"
+#include "base/containers/flat_set.h"
+#include "base/containers/unique_ptr_adapters.h"
+#include "base/files/scoped_file.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/memory/singleton.h"
+#include "base/observer_list_threadsafe.h"
+#include "base/observer_list_types.h"
+#include "base/synchronization/lock.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/thread_annotations.h"
+#include "base/threading/thread.h"
+#include "base/unguessable_token.h"
+#include "components/chromeos_camera/common/jpeg_encode_accelerator.mojom.h"
+#include "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
+#include "media/capture/video/chromeos/token_manager.h"
+#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
+#include "media/capture/video/video_capture_device_factory.h"
+#include "mojo/public/cpp/bindings/pending_receiver.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "mojo/public/cpp/bindings/receiver_set.h"
+#include "mojo/public/cpp/bindings/remote.h"
+#include "mojo/public/cpp/platform/platform_channel_server_endpoint.h"
+
+namespace base {
+
+class SingleThreadTaskRunner;
+class WaitableEvent;
+
+} // namespace base
+
+namespace media {
+
+using MojoJpegEncodeAcceleratorFactoryCB = base::RepeatingCallback<void(
+ mojo::PendingReceiver<chromeos_camera::mojom::JpegEncodeAccelerator>)>;
+
+class CAPTURE_EXPORT CameraClientObserver {
+ public:
+ CameraClientObserver(cros::mojom::CameraClientType type,
+ base::UnguessableToken auth_token)
+ : type_(type), auth_token_(auth_token) {}
+ virtual ~CameraClientObserver();
+ virtual void OnChannelCreated(
+ mojo::PendingRemote<cros::mojom::CameraModule> camera_module) = 0;
+
+ cros::mojom::CameraClientType GetType() { return type_; }
+ const base::UnguessableToken GetAuthToken() { return auth_token_; }
+
+ bool Authenticate(TokenManager* token_manager);
+
+ private:
+ cros::mojom::CameraClientType type_;
+ base::UnguessableToken auth_token_;
+};
+
+class CAPTURE_EXPORT CameraActiveClientObserver : public base::CheckedObserver {
+ public:
+ virtual void OnActiveClientChange(cros::mojom::CameraClientType type,
+ bool is_active) = 0;
+};
+
+// A class to provide a no-op remote to CameraHalServer that failed
+// registration. When CameraHalServer calls
+// CameraHalDispatcher::RegisterServerWithToken to register itself, a
+// PendingRemote<CameraHalServerCallbacks> is returned. Returning an unbound
+// pending remote would crash CameraHalServer immediately, and thus disallows
+// it from handling authentication failures.
+// TODO(b/170075468): Modify RegisterServerWithToken to return an optional
+// CameraHalServerCallbacks instead.
+class FailedCameraHalServerCallbacks
+ : public cros::mojom::CameraHalServerCallbacks {
+ private:
+ friend class CameraHalDispatcherImpl;
+
+ FailedCameraHalServerCallbacks();
+ ~FailedCameraHalServerCallbacks() final;
+
+ mojo::PendingRemote<cros::mojom::CameraHalServerCallbacks> GetRemote();
+
+ // CameraHalServerCallbacks implementations.
+ void CameraDeviceActivityChange(int32_t camera_id,
+ bool opened,
+ cros::mojom::CameraClientType type) final;
+ void CameraPrivacySwitchStateChange(
+ cros::mojom::CameraPrivacySwitchState state) final;
+
+ mojo::Receiver<cros::mojom::CameraHalServerCallbacks> callbacks_;
+};
+
+class CAPTURE_EXPORT CameraPrivacySwitchObserver
+ : public base::CheckedObserver {
+ public:
+ virtual void OnCameraPrivacySwitchStatusChanged(
+ cros::mojom::CameraPrivacySwitchState state) = 0;
+
+ protected:
+ ~CameraPrivacySwitchObserver() override = default;
+};
+
+// The CameraHalDispatcherImpl hosts and waits on the unix domain socket
+// /var/run/camera3.sock. CameraHalServer and CameraHalClients connect to the
+// unix domain socket to create the initial Mojo connections with the
+// CameraHalDisptcherImpl, and CameraHalDispatcherImpl then creates and
+// dispaches the Mojo channels between CameraHalServer and CameraHalClients to
+// establish direct Mojo connections between the CameraHalServer and the
+// CameraHalClients.
+//
+// For general documentation about the CameraHalDispater Mojo interface see the
+// comments in mojo/cros_camera_service.mojom.
+class CAPTURE_EXPORT CameraHalDispatcherImpl final
+ : public cros::mojom::CameraHalDispatcher,
+ public cros::mojom::CameraHalServerCallbacks,
+ public base::trace_event::TraceLog::EnabledStateObserver {
+ public:
+ static CameraHalDispatcherImpl* GetInstance();
+
+ bool Start(MojoMjpegDecodeAcceleratorFactoryCB jda_factory,
+ MojoJpegEncodeAcceleratorFactoryCB jea_factory);
+
+ void AddClientObserver(std::unique_ptr<CameraClientObserver> observer,
+ base::OnceCallback<void(int32_t)> result_callback);
+
+ bool IsStarted();
+
+ // Adds an observer that watches for active camera client changes. Observer
+ // would be immediately notified of the current list of active clients.
+ void AddActiveClientObserver(CameraActiveClientObserver* observer);
+
+ // Removes the observer. A previously-added observer must be removed before
+ // being destroyed.
+ void RemoveActiveClientObserver(CameraActiveClientObserver* observer);
+
+ // Adds an observer to get notified when the camera privacy switch status
+ // changed. Please note that for some devices, the signal will only be
+ // detectable when the camera is currently on due to hardware limitations.
+ // Returns the current state of the camera privacy switch.
+ cros::mojom::CameraPrivacySwitchState AddCameraPrivacySwitchObserver(
+ CameraPrivacySwitchObserver* observer);
+
+ // Removes the observer. A previously-added observer must be removed before
+ // being destroyed.
+ void RemoveCameraPrivacySwitchObserver(CameraPrivacySwitchObserver* observer);
+
+ // Called by vm_permission_service to register the token used for pluginvm.
+ void RegisterPluginVmToken(const base::UnguessableToken& token);
+ void UnregisterPluginVmToken(const base::UnguessableToken& token);
+
+ // CameraHalDispatcher implementations.
+ void RegisterServer(
+ mojo::PendingRemote<cros::mojom::CameraHalServer> server) final;
+ void RegisterServerWithToken(
+ mojo::PendingRemote<cros::mojom::CameraHalServer> server,
+ const base::UnguessableToken& token,
+ RegisterServerWithTokenCallback callback) final;
+ void RegisterClient(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client) final;
+ void RegisterClientWithToken(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& auth_token,
+ RegisterClientWithTokenCallback callback) final;
+ void GetJpegDecodeAccelerator(
+ mojo::PendingReceiver<chromeos_camera::mojom::MjpegDecodeAccelerator>
+ jda_receiver) final;
+ void GetJpegEncodeAccelerator(
+ mojo::PendingReceiver<chromeos_camera::mojom::JpegEncodeAccelerator>
+ jea_receiver) final;
+
+ // CameraHalServerCallbacks implementations.
+ void CameraDeviceActivityChange(int32_t camera_id,
+ bool opened,
+ cros::mojom::CameraClientType type) final;
+ void CameraPrivacySwitchStateChange(
+ cros::mojom::CameraPrivacySwitchState state) final;
+
+ base::UnguessableToken GetTokenForTrustedClient(
+ cros::mojom::CameraClientType type);
+
+ // base::trace_event::TraceLog::EnabledStateObserver implementation.
+ void OnTraceLogEnabled() final;
+ void OnTraceLogDisabled() final;
+
+ private:
+ friend struct base::DefaultSingletonTraits<CameraHalDispatcherImpl>;
+ // Allow the test to construct the class directly.
+ friend class CameraHalDispatcherImplTest;
+
+ CameraHalDispatcherImpl();
+ ~CameraHalDispatcherImpl() final;
+
+ bool StartThreads();
+
+ // Creates the unix domain socket for the camera client processes and the
+ // camera HALv3 adapter process to connect.
+ void CreateSocket(base::WaitableEvent* started);
+
+ // Waits for incoming connections (from HAL process or from client processes).
+ // Runs on |blocking_io_thread_|.
+ void StartServiceLoop(base::ScopedFD socket_fd, base::WaitableEvent* started);
+
+ void RegisterClientWithTokenOnProxyThread(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ base::UnguessableToken token,
+ RegisterClientWithTokenCallback callback);
+
+ void AddClientObserverOnProxyThread(
+ std::unique_ptr<CameraClientObserver> observer,
+ base::OnceCallback<void(int32_t)> result_callback);
+
+ void EstablishMojoChannel(CameraClientObserver* client_observer);
+
+ // Handler for incoming Mojo connection on the unix domain socket.
+ void OnPeerConnected(mojo::ScopedMessagePipeHandle message_pipe);
+
+ // Mojo connection error handlers.
+ void OnCameraHalServerConnectionError();
+ void OnCameraHalClientConnectionError(CameraClientObserver* client);
+
+ void StopOnProxyThread();
+
+ void OnTraceLogEnabledOnProxyThread();
+ void OnTraceLogDisabledOnProxyThread();
+
+ TokenManager* GetTokenManagerForTesting();
+
+ base::ScopedFD proxy_fd_;
+ base::ScopedFD cancel_pipe_;
+
+ base::Thread proxy_thread_;
+ base::Thread blocking_io_thread_;
+ scoped_refptr<base::SingleThreadTaskRunner> proxy_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> blocking_io_task_runner_;
+
+ mojo::ReceiverSet<cros::mojom::CameraHalDispatcher> receiver_set_;
+
+ mojo::Remote<cros::mojom::CameraHalServer> camera_hal_server_;
+
+ mojo::Receiver<cros::mojom::CameraHalServerCallbacks>
+ camera_hal_server_callbacks_;
+ FailedCameraHalServerCallbacks failed_camera_hal_server_callbacks_;
+
+ std::set<std::unique_ptr<CameraClientObserver>, base::UniquePtrComparator>
+ client_observers_;
+
+ MojoMjpegDecodeAcceleratorFactoryCB jda_factory_;
+
+ MojoJpegEncodeAcceleratorFactoryCB jea_factory_;
+
+ TokenManager token_manager_;
+
+ base::Lock opened_camera_id_map_lock_;
+ base::flat_map<cros::mojom::CameraClientType, base::flat_set<int32_t>>
+ opened_camera_id_map_ GUARDED_BY(opened_camera_id_map_lock_);
+
+ scoped_refptr<base::ObserverListThreadSafe<CameraActiveClientObserver>>
+ active_client_observers_;
+
+ base::Lock privacy_switch_state_lock_;
+ cros::mojom::CameraPrivacySwitchState current_privacy_switch_state_
+ GUARDED_BY(privacy_switch_state_lock_);
+
+ scoped_refptr<base::ObserverListThreadSafe<CameraPrivacySwitchObserver>>
+ privacy_switch_observers_;
+
+ DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_CAMERA_HAL_DISPATCHER_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc
new file mode 100644
index 00000000000..933d0e45440
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc
@@ -0,0 +1,423 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
+
+#include <memory>
+#include <utility>
+
+#include "base/bind.h"
+#include "base/posix/safe_strerror.h"
+#include "base/run_loop.h"
+#include "base/single_thread_task_runner.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/test/task_environment.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
+#include "mojo/public/cpp/bindings/pending_receiver.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "mojo/public/cpp/bindings/receiver.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::_;
+using testing::InvokeWithoutArgs;
+
+namespace media {
+namespace {
+
+class MockCameraHalServer : public cros::mojom::CameraHalServer {
+ public:
+ MockCameraHalServer() = default;
+
+ ~MockCameraHalServer() = default;
+
+ void CreateChannel(
+ mojo::PendingReceiver<cros::mojom::CameraModule> camera_module_receiver,
+ cros::mojom::CameraClientType camera_client_type) override {
+ DoCreateChannel(std::move(camera_module_receiver), camera_client_type);
+ }
+ MOCK_METHOD2(DoCreateChannel,
+ void(mojo::PendingReceiver<cros::mojom::CameraModule>
+ camera_module_receiver,
+ cros::mojom::CameraClientType camera_client_type));
+
+ MOCK_METHOD1(SetTracingEnabled, void(bool enabled));
+
+ mojo::PendingRemote<cros::mojom::CameraHalServer> GetPendingRemote() {
+ return receiver_.BindNewPipeAndPassRemote();
+ }
+
+ private:
+ mojo::Receiver<cros::mojom::CameraHalServer> receiver_{this};
+ DISALLOW_COPY_AND_ASSIGN(MockCameraHalServer);
+};
+
+class MockCameraHalClient : public cros::mojom::CameraHalClient {
+ public:
+ MockCameraHalClient() = default;
+
+ ~MockCameraHalClient() = default;
+
+ void SetUpChannel(
+ mojo::PendingRemote<cros::mojom::CameraModule> camera_module) override {
+ DoSetUpChannel(std::move(camera_module));
+ }
+ MOCK_METHOD1(
+ DoSetUpChannel,
+ void(mojo::PendingRemote<cros::mojom::CameraModule> camera_module));
+
+ mojo::PendingRemote<cros::mojom::CameraHalClient> GetPendingRemote() {
+ return receiver_.BindNewPipeAndPassRemote();
+ }
+
+ private:
+ mojo::Receiver<cros::mojom::CameraHalClient> receiver_{this};
+ DISALLOW_COPY_AND_ASSIGN(MockCameraHalClient);
+};
+
+class MockCameraActiveClientObserver : public CameraActiveClientObserver {
+ public:
+ void OnActiveClientChange(cros::mojom::CameraClientType type,
+ bool is_active) override {
+ DoOnActiveClientChange(type, is_active);
+ }
+ MOCK_METHOD2(DoOnActiveClientChange,
+ void(cros::mojom::CameraClientType, bool));
+};
+
+} // namespace
+
+class CameraHalDispatcherImplTest : public ::testing::Test {
+ public:
+ CameraHalDispatcherImplTest()
+ : register_client_event_(base::WaitableEvent::ResetPolicy::AUTOMATIC) {}
+
+ ~CameraHalDispatcherImplTest() override = default;
+
+ void SetUp() override {
+ dispatcher_ = new CameraHalDispatcherImpl();
+ EXPECT_TRUE(dispatcher_->StartThreads());
+ }
+
+ void TearDown() override { delete dispatcher_; }
+
+ scoped_refptr<base::SingleThreadTaskRunner> GetProxyTaskRunner() {
+ return dispatcher_->proxy_task_runner_;
+ }
+
+ void DoLoop() {
+ run_loop_.reset(new base::RunLoop());
+ run_loop_->Run();
+ }
+
+ void QuitRunLoop() {
+ if (run_loop_) {
+ run_loop_->Quit();
+ }
+ }
+
+ static void RegisterServer(
+ CameraHalDispatcherImpl* dispatcher,
+ mojo::PendingRemote<cros::mojom::CameraHalServer> server,
+ cros::mojom::CameraHalDispatcher::RegisterServerWithTokenCallback
+ callback) {
+ auto token = base::UnguessableToken::Create();
+ dispatcher->GetTokenManagerForTesting()->AssignServerTokenForTesting(token);
+ dispatcher->RegisterServerWithToken(std::move(server), std::move(token),
+ std::move(callback));
+ }
+
+ static void RegisterClientWithToken(
+ CameraHalDispatcherImpl* dispatcher,
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token,
+ cros::mojom::CameraHalDispatcher::RegisterClientWithTokenCallback
+ callback) {
+ dispatcher->RegisterClientWithToken(std::move(client), type, token,
+ std::move(callback));
+ }
+
+ void OnRegisteredServer(
+ int32_t result,
+ mojo::PendingRemote<cros::mojom::CameraHalServerCallbacks> callbacks) {
+ if (result != 0) {
+ ADD_FAILURE() << "Failed to register server: "
+ << base::safe_strerror(-result);
+ QuitRunLoop();
+ }
+ }
+
+ void OnRegisteredClient(int32_t result) {
+ last_register_client_result_ = result;
+ if (result != 0) {
+ // If registration fails, CameraHalClient::SetUpChannel() will not be
+ // called, and we need to quit the run loop here.
+ QuitRunLoop();
+ }
+ register_client_event_.Signal();
+ }
+
+ protected:
+ // We can't use std::unique_ptr here because the constructor and destructor of
+ // CameraHalDispatcherImpl are private.
+ CameraHalDispatcherImpl* dispatcher_;
+ base::WaitableEvent register_client_event_;
+ int32_t last_register_client_result_;
+
+ private:
+ base::test::TaskEnvironment task_environment_;
+ std::unique_ptr<base::RunLoop> run_loop_;
+ DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImplTest);
+};
+
+// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
+// for the client when the server crashes.
+TEST_F(CameraHalDispatcherImplTest, ServerConnectionError) {
+ // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
+ // between the server and the client.
+ auto mock_server = std::make_unique<MockCameraHalServer>();
+ auto mock_client = std::make_unique<MockCameraHalClient>();
+
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ auto server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+ auto client = mock_client->GetPendingRemote();
+ auto type = cros::mojom::CameraClientType::TESTING;
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client), type,
+ dispatcher_->GetTokenForTrustedClient(type),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // Wait until the client gets the established Mojo channel.
+ DoLoop();
+
+ // The client registration callback may be called after
+ // CameraHalClient::SetUpChannel(). Use a waitable event to make sure we have
+ // the result.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, 0);
+
+ // Re-create a new server to simulate a server crash.
+ mock_server = std::make_unique<MockCameraHalServer>();
+
+ // Make sure we creates a new Mojo channel from the new server to the same
+ // client.
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+
+ // Wait until the clients gets the newly established Mojo channel.
+ DoLoop();
+}
+
+// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
+// for the client when the client reconnects after crash.
+TEST_F(CameraHalDispatcherImplTest, ClientConnectionError) {
+ // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
+ // between the server and the client.
+ auto mock_server = std::make_unique<MockCameraHalServer>();
+ auto mock_client = std::make_unique<MockCameraHalClient>();
+
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ auto server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+ auto client = mock_client->GetPendingRemote();
+ auto type = cros::mojom::CameraClientType::TESTING;
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client), type,
+ dispatcher_->GetTokenForTrustedClient(type),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // Wait until the client gets the established Mojo channel.
+ DoLoop();
+
+ // The client registration callback may be called after
+ // CameraHalClient::SetUpChannel(). Use a waitable event to make sure we have
+ // the result.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, 0);
+
+ // Re-create a new client to simulate a client crash.
+ mock_client = std::make_unique<MockCameraHalClient>();
+
+ // Make sure we re-create the Mojo channel from the same server to the new
+ // client.
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ client = mock_client->GetPendingRemote();
+ type = cros::mojom::CameraClientType::TESTING;
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client), type,
+ dispatcher_->GetTokenForTrustedClient(type),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // Wait until the clients gets the newly established Mojo channel.
+ DoLoop();
+
+ // Make sure the client is still successfully registered.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, 0);
+}
+
+// Test that trusted camera HAL clients (e.g., Chrome, Android, Testing) can be
+// registered successfully.
+TEST_F(CameraHalDispatcherImplTest, RegisterClientSuccess) {
+ // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
+ // between the server and the client.
+ auto mock_server = std::make_unique<MockCameraHalServer>();
+
+ auto server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+
+ for (auto type : TokenManager::kTrustedClientTypes) {
+ auto mock_client = std::make_unique<MockCameraHalClient>();
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ auto client = mock_client->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client), type,
+ dispatcher_->GetTokenForTrustedClient(type),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // Wait until the client gets the established Mojo channel.
+ DoLoop();
+
+ // The client registration callback may be called after
+ // CameraHalClient::SetUpChannel(). Use a waitable event to make sure we
+ // have the result.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, 0);
+ }
+}
+
+// Test that CameraHalClient registration fails when a wrong (empty) token is
+// provided.
+TEST_F(CameraHalDispatcherImplTest, RegisterClientFail) {
+ // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
+ // between the server and the client.
+ auto mock_server = std::make_unique<MockCameraHalServer>();
+ auto mock_client = std::make_unique<MockCameraHalClient>();
+
+ auto server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+
+ // Use an empty token to make sure authentication fails.
+ base::UnguessableToken empty_token;
+ auto client = mock_client->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client),
+ cros::mojom::CameraClientType::TESTING, empty_token,
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // We do not need to enter a run loop here because
+ // CameraHalClient::SetUpChannel() isn't expected to called, and we only need
+ // to wait for the callback from CameraHalDispatcher::RegisterClientWithToken.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, -EPERM);
+}
+
+// Test that CameraHalDispatcherImpl correctly fires CameraActiveClientObserver
+// when a camera device is opened or closed by a client.
+TEST_F(CameraHalDispatcherImplTest, CameraActiveClientObserverTest) {
+ MockCameraActiveClientObserver observer;
+ dispatcher_->AddActiveClientObserver(&observer);
+
+ EXPECT_CALL(observer, DoOnActiveClientChange(
+ cros::mojom::CameraClientType::TESTING, true))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+ dispatcher_->CameraDeviceActivityChange(
+ /*camera_id=*/0, /*opened=*/true, cros::mojom::CameraClientType::TESTING);
+
+ DoLoop();
+
+ EXPECT_CALL(observer, DoOnActiveClientChange(
+ cros::mojom::CameraClientType::TESTING, false))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+ dispatcher_->CameraDeviceActivityChange(
+ /*camera_id=*/0, /*opened=*/false,
+ cros::mojom::CameraClientType::TESTING);
+
+ DoLoop();
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.cc b/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.cc
new file mode 100644
index 00000000000..321b9b3e82c
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.cc
@@ -0,0 +1,84 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/ash/power_manager_client_proxy.h"
+
+namespace media {
+
+PowerManagerClientProxy::PowerManagerClientProxy() = default;
+
+void PowerManagerClientProxy::Init(
+ base::WeakPtr<Observer> observer,
+ const std::string& debug_info,
+ scoped_refptr<base::SingleThreadTaskRunner> observer_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner) {
+ observer_ = std::move(observer);
+ debug_info_ = debug_info;
+ observer_task_runner_ = std::move(observer_task_runner);
+ dbus_task_runner_ = std::move(dbus_task_runner);
+
+ dbus_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&PowerManagerClientProxy::InitOnDBusThread, this));
+}
+
+void PowerManagerClientProxy::Shutdown() {
+ dbus_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&PowerManagerClientProxy::ShutdownOnDBusThread, this));
+}
+
+void PowerManagerClientProxy::UnblockSuspend(
+ const base::UnguessableToken& unblock_suspend_token) {
+ dbus_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&PowerManagerClientProxy::UnblockSuspendOnDBusThread, this,
+ unblock_suspend_token));
+}
+
+PowerManagerClientProxy::~PowerManagerClientProxy() = default;
+
+void PowerManagerClientProxy::InitOnDBusThread() {
+ DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
+ chromeos::PowerManagerClient::Get()->AddObserver(this);
+}
+
+void PowerManagerClientProxy::ShutdownOnDBusThread() {
+ DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
+ chromeos::PowerManagerClient::Get()->RemoveObserver(this);
+}
+
+void PowerManagerClientProxy::UnblockSuspendOnDBusThread(
+ const base::UnguessableToken& unblock_suspend_token) {
+ DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
+ chromeos::PowerManagerClient::Get()->UnblockSuspend(unblock_suspend_token);
+}
+
+void PowerManagerClientProxy::SuspendImminentOnObserverThread(
+ base::UnguessableToken unblock_suspend_token) {
+ DCHECK(observer_task_runner_->RunsTasksInCurrentSequence());
+ // TODO(b/175168296): Ensure that the weak pointer |observer| is dereferenced
+ // and invalidated on the same thread.
+ if (observer_) {
+ observer_->SuspendImminent();
+ }
+ UnblockSuspend(std::move(unblock_suspend_token));
+}
+
+void PowerManagerClientProxy::SuspendImminent(
+ power_manager::SuspendImminent::Reason reason) {
+ auto token = base::UnguessableToken::Create();
+ chromeos::PowerManagerClient::Get()->BlockSuspend(token, debug_info_);
+ observer_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&PowerManagerClientProxy::SuspendImminentOnObserverThread,
+ this, std::move(token)));
+}
+
+void PowerManagerClientProxy::SuspendDone(base::TimeDelta sleep_duration) {
+ observer_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&Observer::SuspendDone, observer_));
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.h b/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.h
new file mode 100644
index 00000000000..ced96772f45
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.h
@@ -0,0 +1,66 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_POWER_MANAGER_CLIENT_PROXY_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_POWER_MANAGER_CLIENT_PROXY_H_
+
+#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/unguessable_token.h"
+#include "chromeos/dbus/power/power_manager_client.h"
+
+namespace media {
+
+class PowerManagerClientProxy
+ : public base::RefCountedThreadSafe<PowerManagerClientProxy>,
+ public chromeos::PowerManagerClient::Observer {
+ public:
+ class Observer {
+ public:
+ virtual void SuspendDone() = 0;
+ virtual void SuspendImminent() = 0;
+ };
+
+ PowerManagerClientProxy();
+ PowerManagerClientProxy(const PowerManagerClientProxy&) = delete;
+ PowerManagerClientProxy& operator=(const PowerManagerClientProxy&) = delete;
+
+ void Init(base::WeakPtr<Observer> observer,
+ const std::string& debug_info,
+ scoped_refptr<base::SingleThreadTaskRunner> observer_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner);
+
+ void Shutdown();
+
+ void UnblockSuspend(const base::UnguessableToken& unblock_suspend_token);
+
+ private:
+ friend class base::RefCountedThreadSafe<PowerManagerClientProxy>;
+
+ ~PowerManagerClientProxy() override;
+
+ void InitOnDBusThread();
+
+ void ShutdownOnDBusThread();
+
+ void UnblockSuspendOnDBusThread(
+ const base::UnguessableToken& unblock_suspend_token);
+
+ void SuspendImminentOnObserverThread(
+ base::UnguessableToken unblock_suspend_token);
+
+ // chromeos::PowerManagerClient::Observer:
+ void SuspendImminent(power_manager::SuspendImminent::Reason reason) final;
+
+ void SuspendDone(base::TimeDelta sleep_duration) final;
+
+ base::WeakPtr<Observer> observer_;
+ std::string debug_info_;
+ scoped_refptr<base::SingleThreadTaskRunner> observer_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner_;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_POWER_MANAGER_CLIENT_PROXY_H_
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.cc b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
index 16079425ae8..4edbf5153f7 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.cc
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
@@ -7,6 +7,7 @@
#include <utility>
#include "base/bind.h"
+#include "base/containers/contains.h"
#include "base/numerics/ranges.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
@@ -51,8 +52,7 @@ Camera3AController::Camera3AController(
ANDROID_CONTROL_AWB_STATE_INACTIVE),
awb_mode_set_(false),
set_point_of_interest_running_(false),
- ae_locked_for_point_of_interest_(false),
- zero_shutter_lag_enabled_(false) {
+ ae_locked_for_point_of_interest_(false) {
DCHECK(task_runner_->BelongsToCurrentThread());
capture_metadata_dispatcher_->AddResultMetadataObserver(this);
@@ -157,6 +157,29 @@ Camera3AController::Camera3AController(
base::checked_cast<uint8_t>(ae_mode_));
Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
base::checked_cast<uint8_t>(awb_mode_));
+
+ // Enable face detection if it's available.
+ auto face_modes = GetMetadataEntryAsSpan<uint8_t>(
+ static_metadata, cros::mojom::CameraMetadataTag::
+ ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES);
+ // We don't need face landmarks and ids, so using SIMPLE mode instead of FULL
+ // mode should be enough.
+ const auto face_mode_simple = cros::mojom::AndroidStatisticsFaceDetectMode::
+ ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE;
+ if (base::Contains(face_modes,
+ base::checked_cast<uint8_t>(face_mode_simple))) {
+ SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_STATISTICS_FACE_DETECT_MODE,
+ face_mode_simple);
+ }
+
+ auto request_keys = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ zero_shutter_lag_supported_ = base::Contains(
+ request_keys,
+ static_cast<int32_t>(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_ENABLE_ZSL));
}
Camera3AController::~Camera3AController() {
@@ -184,7 +207,7 @@ void Camera3AController::Stabilize3AForStillCapture(
return;
}
- if (Is3AStabilized() || zero_shutter_lag_enabled_) {
+ if (Is3AStabilized() || zero_shutter_lag_supported_) {
std::move(on_3a_stabilized_callback).Run();
return;
}
@@ -427,6 +450,36 @@ void Camera3AController::SetExposureTime(bool enable_auto,
DVLOG(1) << "Setting AE mode to: " << ae_mode_;
}
+void Camera3AController::SetFocusDistance(bool enable_auto,
+ float focus_distance_diopters) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (enable_auto) {
+ if (!available_af_modes_.count(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO)) {
+ LOG(WARNING) << "Don't support ANDROID_CONTROL_AF_MODE_AUTO";
+ return;
+ }
+ af_mode_ = cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO;
+ capture_metadata_dispatcher_->UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_LENS_FOCUS_DISTANCE);
+ } else {
+ if (!available_af_modes_.count(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF)) {
+ LOG(WARNING) << "Don't support ANDROID_CONTROL_AE_MODE_OFF";
+ return;
+ }
+ af_mode_ = cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF;
+ SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_LENS_FOCUS_DISTANCE,
+ focus_distance_diopters);
+ }
+
+ Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ base::checked_cast<uint8_t>(af_mode_));
+ DVLOG(1) << "Setting AF mode to: " << af_mode_;
+}
+
bool Camera3AController::IsPointOfInterestSupported() {
return point_of_interest_supported_;
}
@@ -553,10 +606,6 @@ void Camera3AController::SetPointOfInterestUnlockAe() {
ClearRepeatingCaptureMetadata();
}
-void Camera3AController::UpdateZeroShutterLagAvailability(bool enabled) {
- zero_shutter_lag_enabled_ = enabled;
-}
-
base::WeakPtr<Camera3AController> Camera3AController::GetWeakPtr() {
DCHECK(task_runner_->BelongsToCurrentThread());
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.h b/chromium/media/capture/video/chromeos/camera_3a_controller.h
index 03ff69669f1..e87c8c2264c 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.h
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.h
@@ -50,16 +50,17 @@ class CAPTURE_EXPORT Camera3AController final
// only effective if |enable_auto| is set to false
void SetExposureTime(bool enable_auto, int64_t exposure_time_nanoseconds);
+ // Set focus distance.
+ // |enable_auto| enables auto focus mode. |focus_distance_diopters| is only
+ // effective if |enable_auto| is set to false
+ void SetFocusDistance(bool enable_auto, float focus_distance_diopters);
+
bool IsPointOfInterestSupported();
// Set point of interest. The coordinate system is based on the active
// pixel array.
void SetPointOfInterest(gfx::Point point);
- // Updates the availability of Zero-Shutter Lag (ZSL). We skip 3A (AE, AF,
- // AWB) if ZSL is enabled.
- void UpdateZeroShutterLagAvailability(bool enabled);
-
base::WeakPtr<Camera3AController> GetWeakPtr();
private:
@@ -98,6 +99,7 @@ class CAPTURE_EXPORT Camera3AController final
bool ae_region_supported_;
bool af_region_supported_;
bool point_of_interest_supported_;
+ bool zero_shutter_lag_supported_;
CaptureMetadataDispatcher* capture_metadata_dispatcher_;
const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
@@ -127,8 +129,6 @@ class CAPTURE_EXPORT Camera3AController final
bool ae_locked_for_point_of_interest_;
- bool zero_shutter_lag_enabled_;
-
base::TimeDelta latest_sensor_timestamp_;
std::unordered_set<cros::mojom::CameraMetadataTag> repeating_metadata_tags_;
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
index b2fa774a4af..d9ddaf31832 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
@@ -6,19 +6,43 @@
#include <string>
+#include "base/callback_helpers.h"
#include "base/command_line.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
namespace media {
-CameraAppDeviceBridgeImpl::CameraAppDeviceBridgeImpl() {}
+namespace {
+
+void InvalidateDevicePtrsOnDeviceIpcThread(
+ base::WeakPtr<CameraAppDeviceImpl> device,
+ base::OnceClosure callback) {
+ if (device) {
+ device->InvalidatePtrs(std::move(callback));
+ }
+}
+
+} // namespace
+
+CameraAppDeviceBridgeImpl::CameraAppDeviceBridgeImpl() {
+ const base::CommandLine* command_line =
+ base::CommandLine::ForCurrentProcess();
+ bool use_fake_camera =
+ command_line->HasSwitch(switches::kUseFakeDeviceForMediaStream);
+ bool use_file_camera =
+ command_line->HasSwitch(switches::kUseFileForFakeVideoCapture);
+ is_supported_ =
+ ShouldUseCrosCameraService() && !use_fake_camera && !use_file_camera;
+}
CameraAppDeviceBridgeImpl::~CameraAppDeviceBridgeImpl() = default;
-void CameraAppDeviceBridgeImpl::SetIsSupported(bool is_supported) {
- is_supported_ = is_supported;
+// static
+CameraAppDeviceBridgeImpl* CameraAppDeviceBridgeImpl::GetInstance() {
+ return base::Singleton<CameraAppDeviceBridgeImpl>::get();
}
void CameraAppDeviceBridgeImpl::BindReceiver(
@@ -26,29 +50,85 @@ void CameraAppDeviceBridgeImpl::BindReceiver(
receivers_.Add(this, std::move(receiver));
}
-void CameraAppDeviceBridgeImpl::OnDeviceClosed(const std::string& device_id) {
- auto it = camera_app_devices_.find(device_id);
- if (it != camera_app_devices_.end()) {
- camera_app_devices_.erase(it);
+void CameraAppDeviceBridgeImpl::OnVideoCaptureDeviceCreated(
+ const std::string& device_id,
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner) {
+ base::AutoLock lock(task_runner_map_lock_);
+ DCHECK_EQ(ipc_task_runners_.count(device_id), 0u);
+ ipc_task_runners_.emplace(device_id, ipc_task_runner);
+}
+
+void CameraAppDeviceBridgeImpl::OnVideoCaptureDeviceClosing(
+ const std::string& device_id) {
+ base::AutoLock lock(task_runner_map_lock_);
+ DCHECK_EQ(ipc_task_runners_.count(device_id), 1u);
+ ipc_task_runners_[device_id]->PostTask(
+ FROM_HERE, base::BindOnce(&InvalidateDevicePtrsOnDeviceIpcThread,
+ GetWeakCameraAppDevice(device_id),
+ base::DoNothing::Once()));
+ ipc_task_runners_.erase(device_id);
+}
+
+void CameraAppDeviceBridgeImpl::OnDeviceMojoDisconnected(
+ const std::string& device_id) {
+ auto remove_device = media::BindToCurrentLoop(
+ base::BindOnce(&CameraAppDeviceBridgeImpl::RemoveCameraAppDevice,
+ base::Unretained(this), device_id));
+ {
+ base::AutoLock lock(task_runner_map_lock_);
+ auto it = ipc_task_runners_.find(device_id);
+ if (it != ipc_task_runners_.end()) {
+ it->second->PostTask(
+ FROM_HERE, base::BindOnce(&InvalidateDevicePtrsOnDeviceIpcThread,
+ GetWeakCameraAppDevice(device_id),
+ std::move(remove_device)));
+ return;
+ }
}
+ std::move(remove_device).Run();
}
void CameraAppDeviceBridgeImpl::SetCameraInfoGetter(
CameraInfoGetter camera_info_getter) {
+ base::AutoLock lock(camera_info_getter_lock_);
camera_info_getter_ = std::move(camera_info_getter);
}
void CameraAppDeviceBridgeImpl::UnsetCameraInfoGetter() {
+ base::AutoLock lock(camera_info_getter_lock_);
camera_info_getter_ = {};
}
-CameraAppDeviceImpl* CameraAppDeviceBridgeImpl::GetCameraAppDevice(
+void CameraAppDeviceBridgeImpl::SetVirtualDeviceController(
+ VirtualDeviceController virtual_device_controller) {
+ base::AutoLock lock(virtual_device_controller_lock_);
+ virtual_device_controller_ = std::move(virtual_device_controller);
+}
+
+void CameraAppDeviceBridgeImpl::UnsetVirtualDeviceController() {
+ base::AutoLock lock(virtual_device_controller_lock_);
+ virtual_device_controller_ = {};
+}
+
+base::WeakPtr<CameraAppDeviceImpl>
+CameraAppDeviceBridgeImpl::GetWeakCameraAppDevice(
const std::string& device_id) {
+ base::AutoLock lock(device_map_lock_);
auto it = camera_app_devices_.find(device_id);
- if (it != camera_app_devices_.end()) {
- return it->second.get();
+ if (it == camera_app_devices_.end()) {
+ return nullptr;
}
- return CreateCameraAppDevice(device_id);
+ return it->second->GetWeakPtr();
+}
+
+void CameraAppDeviceBridgeImpl::RemoveCameraAppDevice(
+ const std::string& device_id) {
+ base::AutoLock lock(device_map_lock_);
+ auto it = camera_app_devices_.find(device_id);
+ if (it == camera_app_devices_.end()) {
+ return;
+ }
+ camera_app_devices_.erase(it);
}
void CameraAppDeviceBridgeImpl::GetCameraAppDevice(
@@ -56,16 +136,31 @@ void CameraAppDeviceBridgeImpl::GetCameraAppDevice(
GetCameraAppDeviceCallback callback) {
DCHECK(is_supported_);
- mojo::PendingRemote<cros::mojom::CameraAppDevice> device;
- GetCameraAppDevice(device_id)->BindReceiver(
- device.InitWithNewPipeAndPassReceiver());
+ mojo::PendingRemote<cros::mojom::CameraAppDevice> device_remote;
+ auto* device = GetOrCreateCameraAppDevice(device_id);
+ DCHECK(device);
+
+ device->BindReceiver(device_remote.InitWithNewPipeAndPassReceiver());
std::move(callback).Run(cros::mojom::GetCameraAppDeviceStatus::SUCCESS,
- std::move(device));
+ std::move(device_remote));
}
-media::CameraAppDeviceImpl* CameraAppDeviceBridgeImpl::CreateCameraAppDevice(
+media::CameraAppDeviceImpl*
+CameraAppDeviceBridgeImpl::GetOrCreateCameraAppDevice(
const std::string& device_id) {
+ base::AutoLock lock(device_map_lock_);
+ auto it = camera_app_devices_.find(device_id);
+ if (it != camera_app_devices_.end()) {
+ return it->second.get();
+ }
+
+ base::AutoLock camera_info_lock(camera_info_getter_lock_);
+ // Since we ensure that VideoCaptureDeviceFactory is created before binding
+ // CameraAppDeviceBridge and VideoCaptureDeviceFactory is only destroyed when
+ // the video capture service dies, we can guarantee that |camera_info_getter_|
+ // is always valid here.
DCHECK(camera_info_getter_);
+
auto device_info = camera_info_getter_.Run(device_id);
auto device_impl = std::make_unique<media::CameraAppDeviceImpl>(
device_id, std::move(device_info));
@@ -77,4 +172,18 @@ void CameraAppDeviceBridgeImpl::IsSupported(IsSupportedCallback callback) {
std::move(callback).Run(is_supported_);
}
+void CameraAppDeviceBridgeImpl::SetMultipleStreamsEnabled(
+ const std::string& device_id,
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback) {
+ base::AutoLock lock(virtual_device_controller_lock_);
+ if (!virtual_device_controller_) {
+ std::move(callback).Run(false);
+ return;
+ }
+
+ virtual_device_controller_.Run(device_id, enabled);
+ std::move(callback).Run(true);
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
index 42a1972d1d9..10e2646f738 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
@@ -7,6 +7,7 @@
#include <string>
+#include "base/memory/singleton.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/chromeos/camera_app_device_impl.h"
#include "media/capture/video/chromeos/mojom/camera_app.mojom.h"
@@ -14,30 +15,47 @@
namespace media {
-// A bridge class which helps to construct the connection of CameraAppDevice
-// between remote side (Chrome) and receiver side (Video Capture Service).
+// A singleton bridge class between Chrome Camera App and Video Capture Service
+// which helps to construct CameraAppDevice for communication between these two
+// components.
class CAPTURE_EXPORT CameraAppDeviceBridgeImpl
: public cros::mojom::CameraAppDeviceBridge {
public:
using CameraInfoGetter =
base::RepeatingCallback<cros::mojom::CameraInfoPtr(const std::string&)>;
+ using VirtualDeviceController =
+ base::RepeatingCallback<void(const std::string&, bool)>;
CameraAppDeviceBridgeImpl();
~CameraAppDeviceBridgeImpl() override;
- void SetIsSupported(bool is_supported);
+ static CameraAppDeviceBridgeImpl* GetInstance();
void BindReceiver(
mojo::PendingReceiver<cros::mojom::CameraAppDeviceBridge> receiver);
- void OnDeviceClosed(const std::string& device_id);
+ void OnVideoCaptureDeviceCreated(
+ const std::string& device_id,
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
+
+ void OnVideoCaptureDeviceClosing(const std::string& device_id);
+
+ void OnDeviceMojoDisconnected(const std::string& device_id);
void SetCameraInfoGetter(CameraInfoGetter camera_info_getter);
void UnsetCameraInfoGetter();
- CameraAppDeviceImpl* GetCameraAppDevice(const std::string& device_id);
+ void SetVirtualDeviceController(
+ VirtualDeviceController virtual_device_controller);
+
+ void UnsetVirtualDeviceController();
+
+ base::WeakPtr<CameraAppDeviceImpl> GetWeakCameraAppDevice(
+ const std::string& device_id);
+
+ void RemoveCameraAppDevice(const std::string& device_id);
// cros::mojom::CameraAppDeviceBridge implementations.
void GetCameraAppDevice(const std::string& device_id,
@@ -45,21 +63,38 @@ class CAPTURE_EXPORT CameraAppDeviceBridgeImpl
void IsSupported(IsSupportedCallback callback) override;
+ void SetMultipleStreamsEnabled(
+ const std::string& device_id,
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback) override;
+
private:
- CameraAppDeviceImpl* CreateCameraAppDevice(const std::string& device_id);
+ friend struct base::DefaultSingletonTraits<CameraAppDeviceBridgeImpl>;
+
+ CameraAppDeviceImpl* GetOrCreateCameraAppDevice(const std::string& device_id);
bool is_supported_;
- CameraInfoGetter camera_info_getter_;
+ base::Lock camera_info_getter_lock_;
+ CameraInfoGetter camera_info_getter_ GUARDED_BY(camera_info_getter_lock_);
+
+ base::Lock virtual_device_controller_lock_;
+ VirtualDeviceController virtual_device_controller_
+ GUARDED_BY(virtual_device_controller_lock_);
mojo::ReceiverSet<cros::mojom::CameraAppDeviceBridge> receivers_;
+ base::Lock device_map_lock_;
base::flat_map<std::string, std::unique_ptr<media::CameraAppDeviceImpl>>
- camera_app_devices_;
+ camera_app_devices_ GUARDED_BY(device_map_lock_);
+
+ base::Lock task_runner_map_lock_;
+ base::flat_map<std::string, scoped_refptr<base::SingleThreadTaskRunner>>
+ ipc_task_runners_ GUARDED_BY(task_runner_map_lock_);
DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceBridgeImpl);
};
} // namespace media
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_BRIDGE_IMPL_H_ \ No newline at end of file
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_BRIDGE_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
index d81afe5b3e5..3aba77a914c 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
@@ -4,6 +4,8 @@
#include "media/capture/video/chromeos/camera_app_device_impl.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
namespace media {
@@ -61,20 +63,36 @@ CameraAppDeviceImpl::CameraAppDeviceImpl(const std::string& device_id,
cros::mojom::CameraInfoPtr camera_info)
: device_id_(device_id),
camera_info_(std::move(camera_info)),
- task_runner_(base::ThreadTaskRunnerHandle::Get()),
capture_intent_(cros::mojom::CaptureIntent::DEFAULT),
next_metadata_observer_id_(0),
- next_camera_event_observer_id_(0),
- weak_ptr_factory_(
- std::make_unique<base::WeakPtrFactory<CameraAppDeviceImpl>>(this)) {}
+ next_camera_event_observer_id_(0) {}
CameraAppDeviceImpl::~CameraAppDeviceImpl() {
- task_runner_->DeleteSoon(FROM_HERE, std::move(weak_ptr_factory_));
+ // If the instance is bound, then this instance should only be destroyed when
+ // the mojo connection is dropped, which also happens on the mojo thread.
+ DCHECK(!mojo_task_runner_ || mojo_task_runner_->BelongsToCurrentThread());
+
+ // All the weak pointers of |weak_ptr_factory_| should be invalidated on
+ // camera device IPC thread before destroying CameraAppDeviceImpl.
+ DCHECK(!weak_ptr_factory_.HasWeakPtrs());
}
void CameraAppDeviceImpl::BindReceiver(
mojo::PendingReceiver<cros::mojom::CameraAppDevice> receiver) {
receivers_.Add(this, std::move(receiver));
+ receivers_.set_disconnect_handler(
+ base::BindRepeating(&CameraAppDeviceImpl::OnMojoConnectionError,
+ weak_ptr_factory_for_mojo_.GetWeakPtr()));
+ mojo_task_runner_ = base::ThreadTaskRunnerHandle::Get();
+}
+
+base::WeakPtr<CameraAppDeviceImpl> CameraAppDeviceImpl::GetWeakPtr() {
+ return weak_ptr_factory_.GetWeakPtr();
+}
+
+void CameraAppDeviceImpl::InvalidatePtrs(base::OnceClosure callback) {
+ weak_ptr_factory_.InvalidateWeakPtrs();
+ std::move(callback).Run();
}
void CameraAppDeviceImpl::ConsumeReprocessOptions(
@@ -130,26 +148,30 @@ void CameraAppDeviceImpl::OnResultMetadataAvailable(
}
void CameraAppDeviceImpl::OnShutterDone() {
- base::AutoLock lock(camera_event_observers_lock_);
-
- for (auto& observer : camera_event_observers_) {
- observer.second->OnShutterDone();
- }
+ mojo_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraAppDeviceImpl::NotifyShutterDoneOnMojoThread,
+ weak_ptr_factory_for_mojo_.GetWeakPtr()));
}
void CameraAppDeviceImpl::GetCameraInfo(GetCameraInfoCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
DCHECK(camera_info_);
+
std::move(callback).Run(camera_info_.Clone());
}
void CameraAppDeviceImpl::SetReprocessOption(
cros::mojom::Effect effect,
SetReprocessOptionCallback reprocess_result_callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
ReprocessTask task;
task.effect = effect;
- task.callback = base::BindOnce(&CameraAppDeviceImpl::SetReprocessResult,
- weak_ptr_factory_->GetWeakPtr(),
- std::move(reprocess_result_callback));
+ task.callback = media::BindToCurrentLoop(
+ base::BindOnce(&CameraAppDeviceImpl::SetReprocessResultOnMojoThread,
+ weak_ptr_factory_for_mojo_.GetWeakPtr(),
+ std::move(reprocess_result_callback)));
if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
auto e = BuildMetadataEntry(
@@ -165,6 +187,8 @@ void CameraAppDeviceImpl::SetReprocessOption(
void CameraAppDeviceImpl::SetFpsRange(const gfx::Range& fps_range,
SetFpsRangeCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
const int entry_length = 2;
auto& static_metadata = camera_info_->static_camera_characteristics;
@@ -198,6 +222,8 @@ void CameraAppDeviceImpl::SetFpsRange(const gfx::Range& fps_range,
void CameraAppDeviceImpl::SetStillCaptureResolution(
const gfx::Size& resolution,
SetStillCaptureResolutionCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
base::AutoLock lock(still_capture_resolution_lock_);
still_capture_resolution_ = resolution;
std::move(callback).Run();
@@ -206,6 +232,8 @@ void CameraAppDeviceImpl::SetStillCaptureResolution(
void CameraAppDeviceImpl::SetCaptureIntent(
cros::mojom::CaptureIntent capture_intent,
SetCaptureIntentCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
base::AutoLock lock(capture_intent_lock_);
capture_intent_ = capture_intent;
std::move(callback).Run();
@@ -215,6 +243,8 @@ void CameraAppDeviceImpl::AddResultMetadataObserver(
mojo::PendingRemote<cros::mojom::ResultMetadataObserver> observer,
cros::mojom::StreamType stream_type,
AddResultMetadataObserverCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
base::AutoLock lock(metadata_observers_lock_);
uint32_t id = next_metadata_observer_id_++;
@@ -228,6 +258,8 @@ void CameraAppDeviceImpl::AddResultMetadataObserver(
void CameraAppDeviceImpl::RemoveResultMetadataObserver(
uint32_t id,
RemoveResultMetadataObserverCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
base::AutoLock lock(metadata_observers_lock_);
if (metadata_observers_.erase(id) == 0) {
@@ -245,7 +277,7 @@ void CameraAppDeviceImpl::RemoveResultMetadataObserver(
void CameraAppDeviceImpl::AddCameraEventObserver(
mojo::PendingRemote<cros::mojom::CameraEventObserver> observer,
AddCameraEventObserverCallback callback) {
- base::AutoLock lock(camera_event_observers_lock_);
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
uint32_t id = next_camera_event_observer_id_++;
camera_event_observers_[id] =
@@ -256,7 +288,7 @@ void CameraAppDeviceImpl::AddCameraEventObserver(
void CameraAppDeviceImpl::RemoveCameraEventObserver(
uint32_t id,
RemoveCameraEventObserverCallback callback) {
- base::AutoLock lock(camera_event_observers_lock_);
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
bool is_success = camera_event_observers_.erase(id) == 1;
std::move(callback).Run(is_success);
@@ -274,17 +306,26 @@ void CameraAppDeviceImpl::DisableEeNr(ReprocessTask* task) {
task->extra_metadata.push_back(std::move(nr_entry));
}
-void CameraAppDeviceImpl::SetReprocessResult(
+void CameraAppDeviceImpl::OnMojoConnectionError() {
+ CameraAppDeviceBridgeImpl::GetInstance()->OnDeviceMojoDisconnected(
+ device_id_);
+}
+
+void CameraAppDeviceImpl::SetReprocessResultOnMojoThread(
SetReprocessOptionCallback callback,
const int32_t status,
media::mojom::BlobPtr blob) {
- auto callback_on_mojo_thread = base::BindOnce(
- [](const int32_t status, media::mojom::BlobPtr blob,
- SetReprocessOptionCallback callback) {
- std::move(callback).Run(status, std::move(blob));
- },
- status, std::move(blob), std::move(callback));
- task_runner_->PostTask(FROM_HERE, std::move(callback_on_mojo_thread));
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
+ std::move(callback).Run(status, std::move(blob));
+}
+
+void CameraAppDeviceImpl::NotifyShutterDoneOnMojoThread() {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
+ for (auto& observer : camera_event_observers_) {
+ observer.second->OnShutterDone();
+ }
}
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_impl.h
index a0853f0ac2e..4bdd9991899 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_impl.h
@@ -66,6 +66,12 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
void BindReceiver(
mojo::PendingReceiver<cros::mojom::CameraAppDevice> receiver);
+ // All the weak pointers should be dereferenced and invalidated on the camera
+ // device ipc thread.
+ base::WeakPtr<CameraAppDeviceImpl> GetWeakPtr();
+
+ void InvalidatePtrs(base::OnceClosure callback);
+
// Consumes all the pending reprocess tasks if there is any and eventually
// generates a ReprocessTaskQueue which contains:
// 1. A regular capture task with |take_photo_callback|.
@@ -121,9 +127,13 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
private:
static void DisableEeNr(ReprocessTask* task);
- void SetReprocessResult(SetReprocessOptionCallback callback,
- const int32_t status,
- media::mojom::BlobPtr blob);
+ void OnMojoConnectionError();
+
+ void SetReprocessResultOnMojoThread(SetReprocessOptionCallback callback,
+ const int32_t status,
+ media::mojom::BlobPtr blob);
+
+ void NotifyShutterDoneOnMojoThread();
std::string device_id_;
@@ -131,7 +141,8 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
cros::mojom::CameraInfoPtr camera_info_;
- const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ // It is used for calls which should run on the mojo thread.
+ scoped_refptr<base::SingleThreadTaskRunner> mojo_task_runner_;
// The queue will be enqueued and dequeued from different threads.
base::Lock reprocess_tasks_lock_;
@@ -159,14 +170,17 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
base::flat_map<cros::mojom::StreamType, base::flat_set<uint32_t>>
stream_metadata_observer_ids_ GUARDED_BY(metadata_observers_lock_);
- // Those maps will be changed and used from different threads.
- base::Lock camera_event_observers_lock_;
- uint32_t next_camera_event_observer_id_
- GUARDED_BY(camera_event_observers_lock_);
+ uint32_t next_camera_event_observer_id_;
base::flat_map<uint32_t, mojo::Remote<cros::mojom::CameraEventObserver>>
- camera_event_observers_ GUARDED_BY(camera_event_observers_lock_);
+ camera_event_observers_;
+
+ // The weak pointers should be dereferenced and invalidated on camera device
+ // ipc thread.
+ base::WeakPtrFactory<CameraAppDeviceImpl> weak_ptr_factory_{this};
- std::unique_ptr<base::WeakPtrFactory<CameraAppDeviceImpl>> weak_ptr_factory_;
+ // The weak pointers should be dereferenced and invalidated on the Mojo
+ // thread.
+ base::WeakPtrFactory<CameraAppDeviceImpl> weak_ptr_factory_for_mojo_{this};
DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceImpl);
};
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc
index 0cca22186c4..53defd23bee 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc
@@ -54,4 +54,27 @@ void CameraAppDeviceProviderImpl::IsSupported(IsSupportedCallback callback) {
bridge_->IsSupported(std::move(callback));
}
-} // namespace media \ No newline at end of file
+void CameraAppDeviceProviderImpl::SetMultipleStreamsEnabled(
+ const std::string& source_id,
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback) {
+ mapping_callback_.Run(
+ source_id,
+ media::BindToCurrentLoop(base::BindOnce(
+ &CameraAppDeviceProviderImpl::SetMultipleStreamsEnabledWithDeviceId,
+ weak_ptr_factory_.GetWeakPtr(), enabled, std::move(callback))));
+}
+
+void CameraAppDeviceProviderImpl::SetMultipleStreamsEnabledWithDeviceId(
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback,
+ const base::Optional<std::string>& device_id) {
+ if (!device_id.has_value()) {
+ std::move(callback).Run(false);
+ return;
+ }
+
+ bridge_->SetMultipleStreamsEnabled(*device_id, enabled, std::move(callback));
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
index 615d6bdbaac..6bf036ef614 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
@@ -32,12 +32,21 @@ class CAPTURE_EXPORT CameraAppDeviceProviderImpl
void GetCameraAppDevice(const std::string& source_id,
GetCameraAppDeviceCallback callback) override;
void IsSupported(IsSupportedCallback callback) override;
+ void SetMultipleStreamsEnabled(
+ const std::string& device_id,
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback) override;
private:
void GetCameraAppDeviceWithDeviceId(
GetCameraAppDeviceCallback callback,
const base::Optional<std::string>& device_id);
+ void SetMultipleStreamsEnabledWithDeviceId(
+ bool enable,
+ SetMultipleStreamsEnabledCallback callback,
+ const base::Optional<std::string>& device_id);
+
mojo::Remote<cros::mojom::CameraAppDeviceBridge> bridge_;
DeviceIdMappingCallback mapping_callback_;
@@ -51,4 +60,4 @@ class CAPTURE_EXPORT CameraAppDeviceProviderImpl
} // namespace media
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_PROVIDER_IMPL_H_ \ No newline at end of file
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_PROVIDER_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/camera_device_context.cc b/chromium/media/capture/video/chromeos/camera_device_context.cc
index 2de1625b056..6f5f6599821 100644
--- a/chromium/media/capture/video/chromeos/camera_device_context.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_context.cc
@@ -153,4 +153,9 @@ bool CameraDeviceContext::ReserveVideoCaptureBufferFromPool(
return result == VideoCaptureDevice::Client::ReserveResult::kSucceeded;
}
+bool CameraDeviceContext::HasClient() {
+ base::AutoLock lock(client_lock_);
+ return !clients_.empty();
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index ab213906718..36164f2f071 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -11,6 +11,7 @@
#include <utility>
#include <vector>
+#include "ash/constants/ash_features.h"
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/no_destructor.h"
@@ -22,6 +23,7 @@
#include "media/capture/mojom/image_capture_types.h"
#include "media/capture/video/blob_utils.h"
#include "media/capture/video/chromeos/camera_3a_controller.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
@@ -210,6 +212,8 @@ StreamType StreamIdToStreamType(uint64_t stream_id) {
return StreamType::kYUVInput;
case 3:
return StreamType::kYUVOutput;
+ case 4:
+ return StreamType::kRecordingOutput;
default:
return StreamType::kUnknown;
}
@@ -225,6 +229,8 @@ std::string StreamTypeToString(StreamType stream_type) {
return std::string("StreamType::kYUVInput");
case StreamType::kYUVOutput:
return std::string("StreamType::kYUVOutput");
+ case StreamType::kRecordingOutput:
+ return std::string("StreamType::kRecordingOutput");
default:
return std::string("Unknown StreamType value: ") +
base::NumberToString(static_cast<int32_t>(stream_type));
@@ -270,19 +276,15 @@ ResultMetadata::~ResultMetadata() = default;
CameraDeviceDelegate::CameraDeviceDelegate(
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device,
- ClientType client_type)
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
: device_descriptor_(device_descriptor),
camera_hal_delegate_(std::move(camera_hal_delegate)),
- ipc_task_runner_(std::move(ipc_task_runner)),
- camera_app_device_(camera_app_device),
- client_type_(client_type) {}
+ ipc_task_runner_(std::move(ipc_task_runner)) {}
CameraDeviceDelegate::~CameraDeviceDelegate() = default;
void CameraDeviceDelegate::AllocateAndStart(
- const VideoCaptureParams& params,
+ const base::flat_map<ClientType, VideoCaptureParams>& params,
CameraDeviceContext* device_context) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -291,7 +293,10 @@ void CameraDeviceDelegate::AllocateAndStart(
is_set_awb_mode_ = false;
is_set_brightness_ = false;
is_set_contrast_ = false;
+ is_set_exposure_compensation_ = false;
is_set_exposure_time_ = false;
+ is_set_focus_distance_ = false;
+ is_set_iso_ = false;
is_set_pan_ = false;
is_set_saturation_ = false;
is_set_sharpness_ = false;
@@ -426,6 +431,13 @@ void CameraDeviceDelegate::SetPhotoOptions(
// Set the vendor tag into with given |name| and |value|. Returns true if
// the vendor tag is set and false otherwise.
+ auto to_uint8_vector = [](int32_t value) {
+ std::vector<uint8_t> temp(sizeof(int32_t));
+ auto* temp_ptr = reinterpret_cast<int32_t*>(temp.data());
+ *temp_ptr = value;
+ return temp;
+ };
+
auto set_vendor_int = [&](const std::string& name, bool has_field,
double value, bool is_set) {
const VendorTagInfo* info =
@@ -436,11 +448,8 @@ void CameraDeviceDelegate::SetPhotoOptions(
}
return false;
}
- std::vector<uint8_t> temp(sizeof(int32_t));
- auto* temp_ptr = reinterpret_cast<int32_t*>(temp.data());
- *temp_ptr = value;
request_manager_->SetRepeatingCaptureMetadata(info->tag, info->type, 1,
- std::move(temp));
+ to_uint8_vector(value));
return true;
};
is_set_brightness_ = set_vendor_int(kBrightness, settings->has_brightness,
@@ -524,6 +533,48 @@ void CameraDeviceDelegate::SetPhotoOptions(
is_set_exposure_time_ = false;
}
+ if (settings->has_focus_mode &&
+ settings->focus_mode == mojom::MeteringMode::MANUAL &&
+ settings->has_focus_distance) {
+ // The unit of settings is meter but it is diopter of android metadata.
+ float focus_distance_diopters_ = 1.0 / settings->focus_distance;
+ camera_3a_controller_->SetFocusDistance(false, focus_distance_diopters_);
+ is_set_focus_distance_ = true;
+ } else if (is_set_focus_distance_) {
+ camera_3a_controller_->SetFocusDistance(true, 0);
+ is_set_focus_distance_ = false;
+ }
+
+ if (settings->has_iso) {
+ request_manager_->SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_SENSITIVITY,
+ cros::mojom::EntryType::TYPE_INT32, 1, to_uint8_vector(settings->iso));
+ is_set_iso_ = true;
+ if (!is_set_exposure_time_) {
+ LOG(WARNING) << "set iso doesn't work due to auto exposure time";
+ }
+ } else if (is_set_iso_) {
+ request_manager_->UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_SENSITIVITY);
+ is_set_iso_ = false;
+ }
+
+ if (settings->has_exposure_compensation) {
+ int metadata_exposure_compensation =
+ std::round(settings->exposure_compensation / ae_compensation_step_);
+ request_manager_->SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ cros::mojom::EntryType::TYPE_INT32, 1,
+ to_uint8_vector(metadata_exposure_compensation));
+ is_set_exposure_compensation_ = true;
+ } else if (is_set_exposure_compensation_) {
+ request_manager_->UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
+ is_set_exposure_compensation_ = false;
+ }
+
// If there is callback of SetPhotoOptions(), the streams might being
// reconfigured and we should notify them once the reconfiguration is done.
auto on_reconfigured_callback = base::BindOnce(
@@ -543,6 +594,18 @@ void CameraDeviceDelegate::SetPhotoOptions(
result_metadata_frame_number_for_photo_state_ = current_request_frame_number_;
}
+void CameraDeviceDelegate::ReconfigureStreams(
+ const base::flat_map<ClientType, VideoCaptureParams>& params) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ chrome_capture_params_ = params;
+ if (request_manager_) {
+ // ReconfigureStreams is used for video recording. It does not require
+ // photo.
+ request_manager_->StopPreview(base::BindOnce(
+ &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), false, base::nullopt));
+ }
+}
+
void CameraDeviceDelegate::SetRotation(int rotation) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0);
@@ -570,11 +633,11 @@ bool CameraDeviceDelegate::MaybeReconfigureForPhotoStream(
gfx::Size new_blob_resolution(static_cast<int32_t>(settings->width),
static_cast<int32_t>(settings->height));
request_manager_->StopPreview(
- base::BindOnce(&CameraDeviceDelegate::OnFlushed, GetWeakPtr(),
+ base::BindOnce(&CameraDeviceDelegate::OnFlushed, GetWeakPtr(), true,
std::move(new_blob_resolution)));
} else {
request_manager_->StopPreview(base::BindOnce(
- &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), base::nullopt));
+ &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), true, base::nullopt));
}
return true;
}
@@ -595,7 +658,7 @@ void CameraDeviceDelegate::TakePhotoImpl() {
// Trigger the reconfigure process if it not yet triggered.
if (on_reconfigured_callbacks_.empty()) {
request_manager_->StopPreview(base::BindOnce(
- &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), base::nullopt));
+ &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), true, base::nullopt));
}
auto on_reconfigured_callback = base::BindOnce(
[](base::WeakPtr<Camera3AController> controller,
@@ -631,6 +694,7 @@ void CameraDeviceDelegate::OnMojoConnectionError() {
}
void CameraDeviceDelegate::OnFlushed(
+ bool require_photo,
base::Optional<gfx::Size> new_blob_resolution,
int32_t result) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -642,7 +706,7 @@ void CameraDeviceDelegate::OnFlushed(
return;
}
device_context_->SetState(CameraDeviceContext::State::kInitialized);
- ConfigureStreams(true, std::move(new_blob_resolution));
+ ConfigureStreams(require_photo, std::move(new_blob_resolution));
}
void CameraDeviceDelegate::OnClosed(int32_t result) {
@@ -710,13 +774,15 @@ void CameraDeviceDelegate::Initialize() {
DCHECK_EQ(device_context_->GetState(), CameraDeviceContext::State::kStarting);
mojo::PendingRemote<cros::mojom::Camera3CallbackOps> callback_ops;
+ // Assumes the buffer_type will be the same for all |chrome_capture_params|.
request_manager_ = std::make_unique<RequestManager>(
+ device_descriptor_.device_id,
callback_ops.InitWithNewPipeAndPassReceiver(),
std::make_unique<StreamCaptureInterfaceImpl>(GetWeakPtr()),
- device_context_, chrome_capture_params_.buffer_type,
+ device_context_,
+ chrome_capture_params_[ClientType::kPreviewClient].buffer_type,
std::make_unique<CameraBufferFactory>(),
- base::BindRepeating(&RotateAndBlobify), ipc_task_runner_,
- camera_app_device_, client_type_);
+ base::BindRepeating(&RotateAndBlobify), ipc_task_runner_);
camera_3a_controller_ = std::make_unique<Camera3AController>(
static_metadata_, request_manager_.get(), ipc_task_runner_);
device_ops_->Initialize(
@@ -744,10 +810,13 @@ void CameraDeviceDelegate::OnInitialized(int32_t result) {
}
device_context_->SetState(CameraDeviceContext::State::kInitialized);
bool require_photo = [&] {
- if (camera_app_device_ == nullptr) {
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
+ if (!camera_app_device) {
return false;
}
- auto capture_intent = camera_app_device_->GetCaptureIntent();
+ auto capture_intent = camera_app_device->GetCaptureIntent();
switch (capture_intent) {
case cros::mojom::CaptureIntent::DEFAULT:
return false;
@@ -770,27 +839,33 @@ void CameraDeviceDelegate::ConfigureStreams(
DCHECK_EQ(device_context_->GetState(),
CameraDeviceContext::State::kInitialized);
- // Set up context for preview stream.
- cros::mojom::Camera3StreamPtr preview_stream =
- cros::mojom::Camera3Stream::New();
- preview_stream->id = static_cast<uint64_t>(StreamType::kPreviewOutput);
- preview_stream->stream_type =
- cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
- preview_stream->width =
- chrome_capture_params_.requested_format.frame_size.width();
- preview_stream->height =
- chrome_capture_params_.requested_format.frame_size.height();
- preview_stream->format =
- cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
- preview_stream->usage = cros::mojom::GRALLOC_USAGE_HW_COMPOSER |
- cros::mojom::GRALLOC_USAGE_HW_VIDEO_ENCODER;
- preview_stream->data_space = 0;
- preview_stream->rotation =
- cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
-
cros::mojom::Camera3StreamConfigurationPtr stream_config =
cros::mojom::Camera3StreamConfiguration::New();
- stream_config->streams.push_back(std::move(preview_stream));
+ for (const auto& param : chrome_capture_params_) {
+ // Set up context for preview stream and record stream.
+ cros::mojom::Camera3StreamPtr stream = cros::mojom::Camera3Stream::New();
+ StreamType stream_type = (param.first == ClientType::kPreviewClient)
+ ? StreamType::kPreviewOutput
+ : StreamType::kRecordingOutput;
+ // TODO(henryhsu): PreviewClient should remove HW_VIDEO_ENCODER usage when
+ // multiple streams enabled.
+ auto usage = (param.first == ClientType::kPreviewClient)
+ ? (cros::mojom::GRALLOC_USAGE_HW_COMPOSER |
+ cros::mojom::GRALLOC_USAGE_HW_VIDEO_ENCODER)
+ : cros::mojom::GRALLOC_USAGE_HW_VIDEO_ENCODER;
+ stream->id = static_cast<uint64_t>(stream_type);
+ stream->stream_type = cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
+ stream->width = param.second.requested_format.frame_size.width();
+ stream->height = param.second.requested_format.frame_size.height();
+ stream->format =
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
+ stream->usage = usage;
+ stream->data_space = 0;
+ stream->rotation =
+ cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
+
+ stream_config->streams.push_back(std::move(stream));
+ }
// Set up context for still capture stream. We set still capture stream to the
// JPEG stream configuration with maximum supported resolution.
@@ -898,22 +973,12 @@ void CameraDeviceDelegate::OnConfiguredStreams(
return;
}
- bool zero_shutter_lag_enabled = false;
- for (const auto& stream : updated_config->streams) {
- if (stream->usage & cros::mojom::GRALLOC_USAGE_ZERO_SHUTTER_LAG_ENABLED) {
- zero_shutter_lag_enabled = true;
- break;
- }
- }
- camera_3a_controller_->UpdateZeroShutterLagAvailability(
- zero_shutter_lag_enabled);
-
current_blob_resolution_.SetSize(blob_resolution.width(),
blob_resolution.height());
- request_manager_->SetUpStreamsAndBuffers(
- chrome_capture_params_.requested_format, static_metadata_,
- std::move(updated_config->streams));
+ request_manager_->SetUpStreamsAndBuffers(chrome_capture_params_,
+ static_metadata_,
+ std::move(updated_config->streams));
device_context_->SetState(CameraDeviceContext::State::kStreamConfigured);
// Kick off the preview stream.
@@ -1001,9 +1066,12 @@ void CameraDeviceDelegate::ConstructDefaultRequestSettings(
if (stream_type == StreamType::kPreviewOutput) {
// CCA uses the same stream for preview and video recording. Choose proper
// template here so the underlying camera HAL can set 3A tuning accordingly.
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
auto request_template =
- camera_app_device_ && camera_app_device_->GetCaptureIntent() ==
- cros::mojom::CaptureIntent::VIDEO_RECORD
+ camera_app_device && camera_app_device->GetCaptureIntent() ==
+ cros::mojom::CaptureIntent::VIDEO_RECORD
? cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_VIDEO_RECORD
: cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW;
device_ops_->ConstructDefaultRequestSettings(
@@ -1040,51 +1108,29 @@ void CameraDeviceDelegate::OnConstructedDefaultPreviewRequestSettings(
return;
}
- if (camera_app_device_) {
- OnGotFpsRange(std::move(settings), camera_app_device_->GetFpsRange());
- } else {
- OnGotFpsRange(std::move(settings), {});
- }
-}
-
-void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
- cros::mojom::CameraMetadataPtr settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- while (!take_photo_callbacks_.empty()) {
- auto take_photo_callback = base::BindOnce(
- &TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
- base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
- camera_3a_controller_->GetWeakPtr()));
- if (camera_app_device_) {
- camera_app_device_->ConsumeReprocessOptions(
- std::move(take_photo_callback),
- media::BindToCurrentLoop(base::BindOnce(
- &RequestManager::TakePhoto, request_manager_->GetWeakPtr(),
- settings.Clone())));
- } else {
- request_manager_->TakePhoto(
- settings.Clone(), CameraAppDeviceImpl::GetSingleShotReprocessOptions(
- std::move(take_photo_callback)));
- }
- take_photo_callbacks_.pop();
- }
-}
-
-void CameraDeviceDelegate::OnGotFpsRange(
- cros::mojom::CameraMetadataPtr settings,
- base::Optional<gfx::Range> specified_fps_range) {
device_context_->SetState(CameraDeviceContext::State::kCapturing);
camera_3a_controller_->SetAutoFocusModeForStillCapture();
- if (specified_fps_range.has_value()) {
+
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
+ auto specified_fps_range =
+ camera_app_device ? camera_app_device->GetFpsRange() : base::nullopt;
+ if (specified_fps_range) {
SetFpsRangeInMetadata(&settings, specified_fps_range->GetMin(),
specified_fps_range->GetMax());
} else {
+ // Assumes the frame_rate will be the same for all |chrome_capture_params|.
int32_t requested_frame_rate =
- std::round(chrome_capture_params_.requested_format.frame_rate);
+ std::round(chrome_capture_params_[ClientType::kPreviewClient]
+ .requested_format.frame_rate);
bool prefer_constant_frame_rate =
- camera_app_device_ && camera_app_device_->GetCaptureIntent() ==
- cros::mojom::CaptureIntent::VIDEO_RECORD;
+ base::FeatureList::IsEnabled(
+ chromeos::features::kPreferConstantFrameRate) ||
+ (camera_app_device && camera_app_device->GetCaptureIntent() ==
+ cros::mojom::CaptureIntent::VIDEO_RECORD);
int32_t target_min, target_max;
std::tie(target_min, target_max) = GetTargetFrameRateRange(
static_metadata_, requested_frame_rate, prefer_constant_frame_rate);
@@ -1110,8 +1156,38 @@ void CameraDeviceDelegate::OnGotFpsRange(
}
}
+void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
+ cros::mojom::CameraMetadataPtr settings) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
+
+ while (!take_photo_callbacks_.empty()) {
+ auto take_photo_callback = base::BindOnce(
+ &TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
+ camera_3a_controller_->GetWeakPtr()));
+ if (camera_app_device) {
+ camera_app_device->ConsumeReprocessOptions(
+ std::move(take_photo_callback),
+ media::BindToCurrentLoop(base::BindOnce(
+ &RequestManager::TakePhoto, request_manager_->GetWeakPtr(),
+ settings.Clone())));
+ } else {
+ request_manager_->TakePhoto(
+ settings.Clone(), CameraAppDeviceImpl::GetSingleShotReprocessOptions(
+ std::move(take_photo_callback)));
+ }
+ take_photo_callbacks_.pop();
+ }
+}
+
gfx::Size CameraDeviceDelegate::GetBlobResolution(
base::Optional<gfx::Size> new_blob_resolution) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
std::vector<gfx::Size> blob_resolutions;
GetStreamResolutions(
static_metadata_, cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT,
@@ -1128,9 +1204,12 @@ gfx::Size CameraDeviceDelegate::GetBlobResolution(
return *new_blob_resolution;
}
- if (camera_app_device_) {
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
+ if (camera_app_device) {
auto specified_capture_resolution =
- camera_app_device_->GetStillCaptureResolution();
+ camera_app_device->GetStillCaptureResolution();
if (!specified_capture_resolution.IsEmpty() &&
base::Contains(blob_resolutions, specified_capture_resolution)) {
return specified_capture_resolution;
@@ -1299,6 +1378,33 @@ void CameraDeviceDelegate::OnResultMetadataAvailable(
if (awb_mode.size() == 1)
result_metadata_.awb_mode = awb_mode[0];
+ result_metadata_.af_mode.reset();
+ auto af_mode = GetMetadataEntryAsSpan<uint8_t>(
+ result_metadata, cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE);
+ if (af_mode.size() == 1)
+ result_metadata_.af_mode = af_mode[0];
+
+ result_metadata_.focus_distance.reset();
+ auto focus_distance = GetMetadataEntryAsSpan<float>(
+ result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_LENS_FOCUS_DISTANCE);
+ if (focus_distance.size() == 1)
+ result_metadata_.focus_distance = focus_distance[0];
+
+ result_metadata_.sensitivity.reset();
+ auto sensitivity = GetMetadataEntryAsSpan<int32_t>(
+ result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_SENSITIVITY);
+ if (sensitivity.size() == 1)
+ result_metadata_.sensitivity = sensitivity[0];
+
+ result_metadata_.ae_compensation.reset();
+ auto ae_compensation = GetMetadataEntryAsSpan<int32_t>(
+ result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
+ if (ae_compensation.size() == 1)
+ result_metadata_.ae_compensation = ae_compensation[0];
+
result_metadata_frame_number_ = frame_number;
// We need to wait the new result metadata for new settings.
if (result_metadata_frame_number_ >
@@ -1470,6 +1576,96 @@ void CameraDeviceDelegate::DoGetPhotoState(
result_metadata_.exposure_time.value() / (100 * kMicroToNano);
}
+ auto af_available_modes = GetMetadataEntryAsSpan<uint8_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES);
+ bool support_manual_focus_distance = false;
+ if (af_available_modes.size() > 1 && result_metadata_.af_mode) {
+ support_manual_focus_distance = base::Contains(
+ af_available_modes,
+ static_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF));
+ }
+
+ auto minimum_focus_distance = GetMetadataEntryAsSpan<float>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+ // If the lens is fixed-focus, minimum_focus_distance will be 0.
+ if (support_manual_focus_distance && minimum_focus_distance.size() == 1 &&
+ minimum_focus_distance[0] != 0 && result_metadata_.focus_distance) {
+ photo_state->supported_focus_modes.push_back(mojom::MeteringMode::MANUAL);
+ photo_state->supported_focus_modes.push_back(
+ mojom::MeteringMode::CONTINUOUS);
+ if (result_metadata_.af_mode ==
+ static_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF))
+ photo_state->current_focus_mode = mojom::MeteringMode::MANUAL;
+ else
+ photo_state->current_focus_mode = mojom::MeteringMode::CONTINUOUS;
+
+ // The unit of photo_state->focus_distance is meter and from metadata is
+ // diopter.
+ photo_state->focus_distance->min =
+ std::roundf(100.0 / minimum_focus_distance[0]) / 100.0;
+ photo_state->focus_distance->max = std::numeric_limits<double>::infinity();
+ photo_state->focus_distance->step = 0.01;
+ if (result_metadata_.focus_distance.value() == 0) {
+ photo_state->focus_distance->current =
+ std::numeric_limits<double>::infinity();
+ } else {
+ // We want to make sure |current| is a possible value of
+ // |min| + |steps(0.01)|*X. The minimum can be divided by step(0.01). So
+ // we only need to round the value less than 0.01.
+ double meters = 1.0 / result_metadata_.focus_distance.value();
+ photo_state->focus_distance->current = std::roundf(meters * 100) / 100.0;
+ }
+ }
+
+ auto sensitivity_range = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
+ if (sensitivity_range.size() == 2 && result_metadata_.sensitivity) {
+ photo_state->iso->min = sensitivity_range[0];
+ photo_state->iso->max = sensitivity_range[1];
+ photo_state->iso->step = 1;
+ photo_state->iso->current = result_metadata_.sensitivity.value();
+ }
+
+ auto ae_compensation_range = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_COMPENSATION_RANGE);
+ ae_compensation_step_ = 0.0;
+ if (ae_compensation_range.size() == 2) {
+ if (ae_compensation_range[0] != 0 || ae_compensation_range[1] != 0) {
+ auto ae_compensation_step = GetMetadataEntryAsSpan<Rational>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_COMPENSATION_STEP);
+ if (ae_compensation_step.size() == 1) {
+ if (ae_compensation_step[0].numerator == 0 ||
+ ae_compensation_step[0].denominator == 0) {
+ LOG(WARNING) << "AE_COMPENSATION_STEP: numerator:"
+ << ae_compensation_step[0].numerator << ", denominator:"
+ << ae_compensation_step[0].denominator;
+ } else {
+ ae_compensation_step_ =
+ static_cast<float>(ae_compensation_step[0].numerator) /
+ static_cast<float>(ae_compensation_step[0].denominator);
+ photo_state->exposure_compensation->min =
+ ae_compensation_range[0] * ae_compensation_step_;
+ photo_state->exposure_compensation->max =
+ ae_compensation_range[1] * ae_compensation_step_;
+ photo_state->exposure_compensation->step = ae_compensation_step_;
+ if (result_metadata_.ae_compensation)
+ photo_state->exposure_compensation->current =
+ result_metadata_.ae_compensation.value() *
+ ae_compensation_step_;
+ else
+ photo_state->exposure_compensation->current = 0;
+ }
+ }
+ }
+ }
+
std::move(callback).Run(std::move(photo_state));
}
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.h b/chromium/media/capture/video/chromeos/camera_device_delegate.h
index 01a37cfd67e..3f821f96b54 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.h
@@ -8,6 +8,7 @@
#include <memory>
#include <queue>
+#include "base/containers/flat_map.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
@@ -34,7 +35,19 @@ enum class StreamType : uint64_t {
kJpegOutput = 1,
kYUVInput = 2,
kYUVOutput = 3,
- kUnknown,
+ kRecordingOutput = 4,
+ kUnknown = 5,
+};
+
+// A map to know that each StreamType belongs to which ClientType.
+// The index is StreamType value.
+constexpr std::array<ClientType, static_cast<int>(StreamType::kUnknown)>
+ kStreamClientTypeMap = {
+ ClientType::kPreviewClient, // kPreviewOutput
+ ClientType::kPreviewClient, // kJpegOutput
+ ClientType::kPreviewClient, // kYUVInput
+ ClientType::kPreviewClient, // kYUVOutput
+ ClientType::kVideoClient, // kRecordingOutput
};
// The metadata might be large so clone a whole metadata might be relatively
@@ -44,12 +57,16 @@ struct ResultMetadata {
~ResultMetadata();
base::Optional<uint8_t> ae_mode;
+ base::Optional<int32_t> ae_compensation;
+ base::Optional<uint8_t> af_mode;
base::Optional<uint8_t> awb_mode;
base::Optional<int32_t> brightness;
base::Optional<int32_t> contrast;
base::Optional<int64_t> exposure_time;
+ base::Optional<float> focus_distance;
base::Optional<int32_t> pan;
base::Optional<int32_t> saturation;
+ base::Optional<int32_t> sensitivity;
base::Optional<int32_t> sharpness;
base::Optional<int32_t> tilt;
base::Optional<int32_t> zoom;
@@ -92,27 +109,34 @@ class CAPTURE_EXPORT StreamCaptureInterface {
// AllocateAndStart of VideoCaptureDeviceArcChromeOS runs on. All the methods
// in CameraDeviceDelegate run on |ipc_task_runner_| and hence all the
// access to member variables is sequenced.
+//
+// CameraDeviceDelegate supports multiple clients.
+// It will use the first client for preview stream and photo stream and use
+// second client for recording stream.
+// The second client will be a virtual camera device which is only used in CCA.
class CAPTURE_EXPORT CameraDeviceDelegate final
: public CaptureMetadataDispatcher::ResultMetadataObserver {
public:
CameraDeviceDelegate(
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device,
- ClientType client_type);
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
~CameraDeviceDelegate() final;
// Delegation methods for the VideoCaptureDevice interface.
- void AllocateAndStart(const VideoCaptureParams& params,
- CameraDeviceContext* device_context);
+ void AllocateAndStart(
+ const base::flat_map<ClientType, VideoCaptureParams>& params,
+ CameraDeviceContext* device_context);
void StopAndDeAllocate(base::OnceClosure device_close_callback);
void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback);
void GetPhotoState(VideoCaptureDevice::GetPhotoStateCallback callback);
void SetPhotoOptions(mojom::PhotoSettingsPtr settings,
VideoCaptureDevice::SetPhotoOptionsCallback callback);
+ void ReconfigureStreams(
+ const base::flat_map<ClientType, VideoCaptureParams>& params);
+
// Sets the frame rotation angle in |rotation_|. |rotation_| is clockwise
// rotation in degrees, and is passed to |client_| along with the captured
// frames.
@@ -134,8 +158,10 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
// Mojo connection error handler.
void OnMojoConnectionError();
- // Reconfigure streams for picture taking.
- void OnFlushed(base::Optional<gfx::Size> new_blob_resolution, int32_t result);
+ // Reconfigure streams for picture taking and recording.
+ void OnFlushed(bool require_photo,
+ base::Optional<gfx::Size> new_blob_resolution,
+ int32_t result);
// Callback method for the Close Mojo IPC call. This method resets the Mojo
// connection and closes the camera device.
@@ -188,9 +214,6 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
void OnConstructedDefaultStillCaptureRequestSettings(
cros::mojom::CameraMetadataPtr settings);
- void OnGotFpsRange(cros::mojom::CameraMetadataPtr settings,
- base::Optional<gfx::Range> specified_fps_range);
-
gfx::Size GetBlobResolution(base::Optional<gfx::Size> new_blob_resolution);
// StreamCaptureInterface implementations. These methods are called by
@@ -222,7 +245,8 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
const scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
- VideoCaptureParams chrome_capture_params_;
+ // Map client type to video capture parameter.
+ base::flat_map<ClientType, VideoCaptureParams> chrome_capture_params_;
CameraDeviceContext* device_context_;
@@ -246,13 +270,16 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
std::queue<base::OnceClosure> on_reconfigured_callbacks_;
- CameraAppDeviceImpl* camera_app_device_; // Weak.
+ base::WeakPtr<CameraAppDeviceImpl> camera_app_device_;
// States of SetPhotoOptions
bool is_set_awb_mode_;
bool is_set_brightness_;
bool is_set_contrast_;
+ bool is_set_exposure_compensation_;
bool is_set_exposure_time_;
+ bool is_set_focus_distance_;
+ bool is_set_iso_;
bool is_set_pan_;
bool is_set_saturation_;
bool is_set_sharpness_;
@@ -261,6 +288,8 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
std::vector<base::OnceClosure> get_photo_state_queue_;
bool use_digital_zoom_;
+ float ae_compensation_step_;
+
// We reply GetPhotoState when |result_metadata_frame_number_| >
// |result_metadata_frame_number_for_photo_state_|. Otherwise javascript API
// getSettings() will get non-updated settings.
@@ -272,8 +301,6 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
ResultMetadata result_metadata_;
gfx::Rect active_array_size_;
- ClientType client_type_;
-
base::WeakPtrFactory<CameraDeviceDelegate> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(CameraDeviceDelegate);
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index 01ed4b806da..1c1bb4ad006 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -103,6 +103,15 @@ class MockCameraDevice : public cros::mojom::Camera3DeviceOps {
void Close(CloseCallback callback) override { DoClose(callback); }
MOCK_METHOD1(DoClose, void(CloseCallback& callback));
+ void ConfigureStreamsAndGetAllocatedBuffers(
+ cros::mojom::Camera3StreamConfigurationPtr config,
+ ConfigureStreamsAndGetAllocatedBuffersCallback callback) override {
+ DoConfigureStreamsAndGetAllocatedBuffers(config, callback);
+ }
+ MOCK_METHOD2(DoConfigureStreamsAndGetAllocatedBuffers,
+ void(cros::mojom::Camera3StreamConfigurationPtr& config,
+ ConfigureStreamsAndGetAllocatedBuffersCallback& callback));
+
private:
DISALLOW_COPY_AND_ASSIGN(MockCameraDevice);
};
@@ -111,11 +120,13 @@ constexpr int32_t kJpegMaxBufferSize = 1024;
constexpr size_t kDefaultWidth = 1280, kDefaultHeight = 720;
constexpr int32_t kDefaultMinFrameRate = 1, kDefaultMaxFrameRate = 30;
-VideoCaptureParams GetDefaultCaptureParams() {
+base::flat_map<ClientType, VideoCaptureParams> GetDefaultCaptureParams() {
VideoCaptureParams params;
+ base::flat_map<ClientType, VideoCaptureParams> capture_params;
params.requested_format = {gfx::Size(kDefaultWidth, kDefaultHeight),
float{kDefaultMaxFrameRate}, PIXEL_FORMAT_I420};
- return params;
+ capture_params[ClientType::kPreviewClient] = params;
+ return capture_params;
}
} // namespace
@@ -162,7 +173,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
devices_info[0].descriptor, camera_hal_delegate_,
- device_delegate_thread_.task_runner(), nullptr, client_type_);
+ device_delegate_thread_.task_runner());
}
void GetNumberOfFakeCameras(
@@ -354,7 +365,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
.Times(1)
.WillOnce(
Invoke(this, &CameraDeviceDelegateTest::GetNumberOfFakeCameras));
- EXPECT_CALL(mock_camera_module_, DoSetCallbacks(_, _)).Times(1);
+ EXPECT_CALL(mock_camera_module_, DoSetCallbacksAssociated(_, _)).Times(1);
EXPECT_CALL(mock_camera_module_, DoGetVendorTagOps(_, _))
.Times(1)
.WillOnce(Invoke(this, &CameraDeviceDelegateTest::GetFakeVendorTagOps));
@@ -381,12 +392,11 @@ class CameraDeviceDelegateTest : public ::testing::Test {
.Times(1)
.WillOnce(
Invoke(this, &CameraDeviceDelegateTest::ConfigureFakeStreams));
- EXPECT_CALL(
- mock_gpu_memory_buffer_manager_,
- CreateGpuMemoryBuffer(
- _, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
- gpu::kNullSurfaceHandle))
+ EXPECT_CALL(mock_gpu_memory_buffer_manager_,
+ CreateGpuMemoryBuffer(
+ _, gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
@@ -398,13 +408,12 @@ class CameraDeviceDelegateTest : public ::testing::Test {
.Times(AtMost(1))
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
- EXPECT_CALL(
- mock_gpu_memory_buffer_manager_,
- CreateGpuMemoryBuffer(
- gfx::Size(kDefaultWidth, kDefaultHeight),
- gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
- gpu::kNullSurfaceHandle))
+ EXPECT_CALL(mock_gpu_memory_buffer_manager_,
+ CreateGpuMemoryBuffer(
+ gfx::Size(kDefaultWidth, kDefaultHeight),
+ gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index a0bee569637..3ce1c4c0583 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -19,11 +19,14 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_piece.h"
#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
#include "base/system/system_monitor.h"
-#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
+#include "base/unguessable_token.h"
+#include "components/device_event_log/device_event_log.h"
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
+#include "media/capture/video/chromeos/video_capture_device_chromeos_delegate.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
namespace media {
@@ -31,6 +34,7 @@ namespace media {
namespace {
constexpr int32_t kDefaultFps = 30;
+constexpr char kVirtualPrefix[] = "VIRTUAL_";
constexpr base::TimeDelta kEventWaitTimeoutSecs =
base::TimeDelta::FromSeconds(1);
@@ -38,8 +42,11 @@ constexpr base::TimeDelta kEventWaitTimeoutSecs =
class LocalCameraClientObserver : public CameraClientObserver {
public:
explicit LocalCameraClientObserver(
- scoped_refptr<CameraHalDelegate> camera_hal_delegate)
- : camera_hal_delegate_(std::move(camera_hal_delegate)) {}
+ scoped_refptr<CameraHalDelegate> camera_hal_delegate,
+ cros::mojom::CameraClientType type,
+ base::UnguessableToken auth_token)
+ : CameraClientObserver(type, std::move(auth_token)),
+ camera_hal_delegate_(std::move(camera_hal_delegate)) {}
void OnChannelCreated(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module) override {
@@ -117,7 +124,8 @@ base::flat_set<int32_t> GetAvailableFramerates(
CameraHalDelegate::CameraHalDelegate(
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
- : camera_module_has_been_set_(
+ : authenticated_(false),
+ camera_module_has_been_set_(
base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED),
builtin_camera_info_updated_(
@@ -138,9 +146,27 @@ CameraHalDelegate::CameraHalDelegate(
CameraHalDelegate::~CameraHalDelegate() = default;
-void CameraHalDelegate::RegisterCameraClient() {
- CameraHalDispatcherImpl::GetInstance()->AddClientObserver(
- std::make_unique<LocalCameraClientObserver>(this));
+bool CameraHalDelegate::RegisterCameraClient() {
+ auto* dispatcher = CameraHalDispatcherImpl::GetInstance();
+ auto type = cros::mojom::CameraClientType::CHROME;
+ dispatcher->AddClientObserver(
+ std::make_unique<LocalCameraClientObserver>(
+ this, type, dispatcher->GetTokenForTrustedClient(type)),
+ base::BindOnce(&CameraHalDelegate::OnRegisteredCameraHalClient,
+ base::Unretained(this)));
+ camera_hal_client_registered_.Wait();
+ return authenticated_;
+}
+
+void CameraHalDelegate::OnRegisteredCameraHalClient(int32_t result) {
+ if (result != 0) {
+ LOG(ERROR) << "Failed to register camera HAL client";
+ camera_hal_client_registered_.Signal();
+ return;
+ }
+ CAMERA_LOG(EVENT) << "Registered camera HAL client";
+ authenticated_ = true;
+ camera_hal_client_registered_.Signal();
}
void CameraHalDelegate::SetCameraModule(
@@ -158,8 +184,7 @@ void CameraHalDelegate::Reset() {
std::unique_ptr<VideoCaptureDevice> CameraHalDelegate::CreateDevice(
scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
- const VideoCaptureDeviceDescriptor& device_descriptor,
- CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
+ const VideoCaptureDeviceDescriptor& device_descriptor) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!UpdateBuiltInCameraInfo()) {
return nullptr;
@@ -170,35 +195,17 @@ std::unique_ptr<VideoCaptureDevice> CameraHalDelegate::CreateDevice(
return nullptr;
}
- if (camera_app_device_bridge) {
- auto* camera_app_device = camera_app_device_bridge->GetCameraAppDevice(
- device_descriptor.device_id);
- // Since the cleanup callback will be triggered when VideoCaptureDevice died
- // and |camera_app_device_bridge| is actually owned by
- // VideoCaptureServiceImpl, it should be safe to assume
- // |camera_app_device_bridge| is still valid here.
- auto cleanup_callback = base::BindOnce(
- [](const std::string& device_id, CameraAppDeviceBridgeImpl* bridge) {
- bridge->OnDeviceClosed(device_id);
- },
- device_descriptor.device_id, camera_app_device_bridge);
- return std::make_unique<VideoCaptureDeviceChromeOSHalv3>(
- std::move(task_runner_for_screen_observer), device_descriptor, this,
- camera_app_device, std::move(cleanup_callback));
- } else {
- return std::make_unique<VideoCaptureDeviceChromeOSHalv3>(
- std::move(task_runner_for_screen_observer), device_descriptor, this,
- nullptr, base::DoNothing());
- }
+ auto* delegate =
+ GetVCDDelegate(task_runner_for_screen_observer, device_descriptor);
+ return std::make_unique<VideoCaptureDeviceChromeOSHalv3>(delegate,
+ device_descriptor);
}
void CameraHalDelegate::GetSupportedFormats(
- int camera_id,
+ const cros::mojom::CameraInfoPtr& camera_info,
VideoCaptureFormats* supported_formats) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- const cros::mojom::CameraInfoPtr& camera_info = camera_info_[camera_id];
-
base::flat_set<int32_t> candidate_fps_set =
GetAvailableFramerates(camera_info);
@@ -244,7 +251,7 @@ void CameraHalDelegate::GetSupportedFormats(
// There's no consumer information here to determine the buffer usage, so
// hard-code the usage that all the clients should be using.
constexpr gfx::BufferUsage kClientBufferUsage =
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE;
const ChromiumPixelFormat cr_format =
camera_buffer_factory_->ResolveStreamBufferFormat(hal_format,
kClientBufferUsage);
@@ -257,8 +264,9 @@ void CameraHalDelegate::GetSupportedFormats(
continue;
}
- VLOG(1) << "Supported format: " << width << "x" << height
- << " fps=" << fps << " format=" << cr_format.video_format;
+ CAMERA_LOG(EVENT) << "Supported format: " << width << "x" << height
+ << " fps=" << fps
+ << " format=" << cr_format.video_format;
supported_formats->emplace_back(gfx::Size(width, height), fps,
cr_format.video_format);
}
@@ -287,6 +295,7 @@ void CameraHalDelegate::GetDevicesInfo(
{
base::AutoLock info_lock(camera_info_lock_);
base::AutoLock id_map_lock(device_id_to_camera_id_lock_);
+ base::AutoLock virtual_lock(enable_virtual_device_lock_);
for (const auto& it : camera_info_) {
int camera_id = it.first;
const cros::mojom::CameraInfoPtr& camera_info = it.second;
@@ -334,6 +343,12 @@ void CameraHalDelegate::GetDevicesInfo(
// Mojo validates the input parameters for us so we don't need to
// worry about malformed values.
}
+ case cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_BACK:
+ case cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_FRONT:
+ case cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_EXTERNAL:
+ // |camera_info_| should not have these facing types.
+ LOG(ERROR) << "Invalid facing type: " << camera_info->facing;
+ break;
}
auto* vid = get_vendor_string("com.google.usb.vendorId");
auto* pid = get_vendor_string("com.google.usb.productId");
@@ -343,10 +358,22 @@ void CameraHalDelegate::GetDevicesInfo(
desc.set_control_support(GetControlSupport(camera_info));
device_id_to_camera_id_[desc.device_id] = camera_id;
devices_info.emplace_back(desc);
- GetSupportedFormats(camera_id, &devices_info.back().supported_formats);
+ GetSupportedFormats(camera_info_[camera_id],
+ &devices_info.back().supported_formats);
+
+ // Create a virtual device when multiple streams are enabled.
+ if (enable_virtual_device_[camera_id]) {
+ desc.facing = VideoFacingMode::MEDIA_VIDEO_FACING_NONE;
+ desc.device_id =
+ std::string(kVirtualPrefix) + base::NumberToString(camera_id);
+ desc.set_display_name("Virtual Camera");
+ device_id_to_camera_id_[desc.device_id] = camera_id;
+ devices_info.emplace_back(desc);
+ GetSupportedFormats(camera_info_[camera_id],
+ &devices_info.back().supported_formats);
+ }
}
}
-
// TODO(shik): Report external camera first when lid is closed.
// TODO(jcliang): Remove this after JS API supports query camera facing
// (http://crbug.com/543997).
@@ -404,7 +431,36 @@ cros::mojom::CameraInfoPtr CameraHalDelegate::GetCameraInfoFromDeviceId(
if (it == camera_info_.end()) {
return {};
}
- return it->second.Clone();
+ auto info = it->second.Clone();
+ if (base::StartsWith(device_id, std::string(kVirtualPrefix))) {
+ switch (it->second->facing) {
+ case cros::mojom::CameraFacing::CAMERA_FACING_BACK:
+ info->facing = cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_BACK;
+ break;
+ case cros::mojom::CameraFacing::CAMERA_FACING_FRONT:
+ info->facing = cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_FRONT;
+ break;
+ case cros::mojom::CameraFacing::CAMERA_FACING_EXTERNAL:
+ info->facing =
+ cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_EXTERNAL;
+ break;
+ default:
+ break;
+ }
+ }
+ return info;
+}
+
+void CameraHalDelegate::EnableVirtualDevice(const std::string& device_id,
+ bool enable) {
+ if (base::StartsWith(device_id, std::string(kVirtualPrefix))) {
+ return;
+ }
+ auto camera_id = GetCameraIdFromDeviceId(device_id);
+ if (camera_id != -1) {
+ base::AutoLock lock(enable_virtual_device_lock_);
+ enable_virtual_device_[camera_id] = enable;
+ }
}
const VendorTagInfo* CameraHalDelegate::GetVendorTagInfoByName(
@@ -436,6 +492,27 @@ int CameraHalDelegate::GetCameraIdFromDeviceId(const std::string& device_id) {
return it->second;
}
+VideoCaptureDeviceChromeOSDelegate* CameraHalDelegate::GetVCDDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
+ const VideoCaptureDeviceDescriptor& device_descriptor) {
+ auto camera_id = GetCameraIdFromDeviceId(device_descriptor.device_id);
+ auto it = vcd_delegate_map_.find(camera_id);
+ if (it == vcd_delegate_map_.end() || it->second->HasDeviceClient() == 0) {
+ auto cleanup_callback = base::BindOnce(
+ [](int camera_id,
+ base::flat_map<int,
+ std::unique_ptr<VideoCaptureDeviceChromeOSDelegate>>*
+ vcd_delegate_map) { vcd_delegate_map->erase(camera_id); },
+ camera_id, &vcd_delegate_map_);
+ auto delegate = std::make_unique<VideoCaptureDeviceChromeOSDelegate>(
+ std::move(task_runner_for_screen_observer), device_descriptor, this,
+ std::move(cleanup_callback));
+ vcd_delegate_map_[camera_id] = std::move(delegate);
+ return vcd_delegate_map_[camera_id].get();
+ }
+ return it->second.get();
+}
+
void CameraHalDelegate::SetCameraModuleOnIpcThread(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -462,6 +539,7 @@ void CameraHalDelegate::ResetMojoInterfaceOnIpcThread() {
external_camera_info_updated_.Signal();
// Clear all cached camera info, especially external cameras.
+ base::AutoLock lock(camera_info_lock_);
camera_info_.clear();
pending_external_camera_info_.clear();
}
@@ -495,18 +573,20 @@ void CameraHalDelegate::UpdateBuiltInCameraInfoOnIpcThread() {
void CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ base::AutoLock lock(camera_info_lock_);
if (num_cameras < 0) {
builtin_camera_info_updated_.Signal();
LOG(ERROR) << "Failed to get number of cameras: " << num_cameras;
return;
}
- VLOG(1) << "Number of built-in cameras: " << num_cameras;
+ CAMERA_LOG(EVENT) << "Number of built-in cameras: " << num_cameras;
num_builtin_cameras_ = num_cameras;
// Per camera HAL v3 specification SetCallbacks() should be called after the
// first time GetNumberOfCameras() is called, and before other CameraModule
// functions are called.
- camera_module_->SetCallbacks(
- camera_module_callbacks_.BindNewPipeAndPassRemote(),
+ camera_module_->SetCallbacksAssociated(
+ camera_module_callbacks_.BindNewEndpointAndPassRemote(),
base::BindOnce(&CameraHalDelegate::OnSetCallbacksOnIpcThread, this));
camera_module_->GetVendorTagOps(
@@ -516,6 +596,8 @@ void CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras) {
void CameraHalDelegate::OnSetCallbacksOnIpcThread(int32_t result) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ base::AutoLock lock(camera_info_lock_);
if (result) {
num_builtin_cameras_ = 0;
builtin_camera_info_updated_.Signal();
@@ -567,6 +649,7 @@ void CameraHalDelegate::OnGotCameraInfoOnIpcThread(
// |camera_info_| might contain some entries for external cameras as well,
// we should check all built-in cameras explicitly.
bool all_updated = [&]() {
+ camera_info_lock_.AssertAcquired();
for (size_t i = 0; i < num_builtin_cameras_; i++) {
if (camera_info_.find(i) == camera_info_.end()) {
return false;
@@ -606,7 +689,8 @@ void CameraHalDelegate::CameraDeviceStatusChange(
int32_t camera_id,
cros::mojom::CameraDeviceStatus new_status) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- VLOG(1) << "camera_id = " << camera_id << ", new_status = " << new_status;
+ CAMERA_LOG(EVENT) << "camera_id = " << camera_id
+ << ", new_status = " << new_status;
base::AutoLock lock(camera_info_lock_);
auto it = camera_info_.find(camera_id);
switch (new_status) {
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.h b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
index aa0dacf2006..2aba005ad11 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
@@ -9,6 +9,7 @@
#include <string>
#include <unordered_map>
+#include "base/containers/flat_map.h"
#include "base/macros.h"
#include "base/sequence_checker.h"
#include "base/single_thread_task_runner.h"
@@ -20,15 +21,16 @@
#include "media/capture/video/chromeos/vendor_tag_ops_delegate.h"
#include "media/capture/video/video_capture_device_factory.h"
#include "media/capture/video_capture_types.h"
+#include "mojo/public/cpp/bindings/associated_receiver.h"
+#include "mojo/public/cpp/bindings/pending_associated_receiver.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
-#include "mojo/public/cpp/bindings/receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
namespace media {
-class CameraAppDeviceBridgeImpl;
class CameraBufferFactory;
+class VideoCaptureDeviceChromeOSDelegate;
// CameraHalDelegate is the component which does Mojo IPCs to the camera HAL
// process on Chrome OS to access the module-level camera functionalities such
@@ -47,7 +49,8 @@ class CAPTURE_EXPORT CameraHalDelegate final
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
// Registers the camera client observer to the CameraHalDispatcher instance.
- void RegisterCameraClient();
+ // Returns true if successful, false if failed (e.g., authentication failure).
+ bool RegisterCameraClient();
void SetCameraModule(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module);
@@ -62,8 +65,7 @@ class CAPTURE_EXPORT CameraHalDelegate final
std::unique_ptr<VideoCaptureDevice> CreateDevice(
scoped_refptr<base::SingleThreadTaskRunner>
task_runner_for_screen_observer,
- const VideoCaptureDeviceDescriptor& device_descriptor,
- CameraAppDeviceBridgeImpl* app_device_bridge);
+ const VideoCaptureDeviceDescriptor& device_descriptor);
void GetDevicesInfo(
VideoCaptureDeviceFactory::GetDevicesInfoCallback callback);
@@ -89,14 +91,23 @@ class CAPTURE_EXPORT CameraHalDelegate final
const VendorTagInfo* GetVendorTagInfoByName(const std::string& full_name);
+ void EnableVirtualDevice(const std::string& device_id, bool enable);
+
private:
friend class base::RefCountedThreadSafe<CameraHalDelegate>;
~CameraHalDelegate() final;
- void GetSupportedFormats(int camera_id,
+ void OnRegisteredCameraHalClient(int32_t result);
+
+ void GetSupportedFormats(const cros::mojom::CameraInfoPtr& camera_info,
VideoCaptureFormats* supported_formats);
+ VideoCaptureDeviceChromeOSDelegate* GetVCDDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner>
+ task_runner_for_screen_observer,
+ const VideoCaptureDeviceDescriptor& device_descriptor);
+
void SetCameraModuleOnIpcThread(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module);
@@ -144,6 +155,9 @@ class CAPTURE_EXPORT CameraHalDelegate final
void TorchModeStatusChange(int32_t camera_id,
cros::mojom::TorchModeStatus new_status) final;
+ base::WaitableEvent camera_hal_client_registered_;
+ bool authenticated_;
+
base::WaitableEvent camera_module_has_been_set_;
// Signaled when |num_builtin_cameras_| and |camera_info_| are updated.
@@ -167,15 +181,21 @@ class CAPTURE_EXPORT CameraHalDelegate final
// conditions. For external cameras, the |camera_info_| would be read nad
// updated in CameraDeviceStatusChange, which is also protected by
// |camera_info_lock_|.
- size_t num_builtin_cameras_;
base::Lock camera_info_lock_;
- std::unordered_map<int, cros::mojom::CameraInfoPtr> camera_info_;
+ size_t num_builtin_cameras_ GUARDED_BY(camera_info_lock_);
+ std::unordered_map<int, cros::mojom::CameraInfoPtr> camera_info_
+ GUARDED_BY(camera_info_lock_);
// A map from |VideoCaptureDeviceDescriptor.device_id| to camera id, which is
// updated in GetDeviceDescriptors() and queried in
// GetCameraIdFromDeviceId().
base::Lock device_id_to_camera_id_lock_;
- std::map<std::string, int> device_id_to_camera_id_;
+ std::map<std::string, int> device_id_to_camera_id_
+ GUARDED_BY(device_id_to_camera_id_lock_);
+ // A virtual device is enabled/disabled for camera id.
+ base::Lock enable_virtual_device_lock_;
+ base::flat_map<int, bool> enable_virtual_device_
+ GUARDED_BY(enable_virtual_device_lock_);
SEQUENCE_CHECKER(sequence_checker_);
@@ -190,12 +210,17 @@ class CAPTURE_EXPORT CameraHalDelegate final
// The Mojo receiver serving the camera module callbacks. Bound to
// |ipc_task_runner_|.
- mojo::Receiver<cros::mojom::CameraModuleCallbacks> camera_module_callbacks_;
+ mojo::AssociatedReceiver<cros::mojom::CameraModuleCallbacks>
+ camera_module_callbacks_;
// An internal delegate to handle VendorTagOps mojo connection and query
// information of vendor tags. Bound to |ipc_task_runner_|.
VendorTagOpsDelegate vendor_tag_ops_delegate_;
+ // A map from camera id to corresponding delegate instance.
+ base::flat_map<int, std::unique_ptr<VideoCaptureDeviceChromeOSDelegate>>
+ vcd_delegate_map_;
+
DISALLOW_COPY_AND_ASSIGN(CameraHalDelegate);
};
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
index 89208e5610f..3e1bf9b0d6e 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
@@ -167,8 +167,9 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
};
auto set_callbacks_cb =
- [&](mojo::PendingRemote<cros::mojom::CameraModuleCallbacks>& callbacks,
- cros::mojom::CameraModule::SetCallbacksCallback&) {
+ [&](mojo::PendingAssociatedRemote<cros::mojom::CameraModuleCallbacks>&
+ callbacks,
+ cros::mojom::CameraModule::SetCallbacksAssociatedCallback&) {
mock_camera_module_.NotifyCameraDeviceChange(
2, cros::mojom::CameraDeviceStatus::CAMERA_DEVICE_STATUS_PRESENT);
};
@@ -176,10 +177,12 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
EXPECT_CALL(mock_camera_module_, DoGetNumberOfCameras(_))
.Times(1)
.WillOnce(Invoke(get_number_of_cameras_cb));
- EXPECT_CALL(mock_camera_module_,
- DoSetCallbacks(
- A<mojo::PendingRemote<cros::mojom::CameraModuleCallbacks>&>(),
- A<cros::mojom::CameraModule::SetCallbacksCallback&>()))
+ EXPECT_CALL(
+ mock_camera_module_,
+ DoSetCallbacksAssociated(
+ A<mojo::PendingAssociatedRemote<
+ cros::mojom::CameraModuleCallbacks>&>(),
+ A<cros::mojom::CameraModule::SetCallbacksAssociatedCallback&>()))
.Times(1)
.WillOnce(Invoke(set_callbacks_cb));
EXPECT_CALL(mock_camera_module_,
@@ -228,7 +231,7 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
EXPECT_CALL(mock_gpu_memory_buffer_manager_,
CreateGpuMemoryBuffer(
_, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
deleted file mode 100644
index bf1475aaef8..00000000000
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
+++ /dev/null
@@ -1,146 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_HAL_DISPATCHER_IMPL_H_
-#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_HAL_DISPATCHER_IMPL_H_
-
-#include <memory>
-#include <set>
-
-#include "base/containers/unique_ptr_adapters.h"
-#include "base/files/scoped_file.h"
-#include "base/memory/singleton.h"
-#include "base/threading/thread.h"
-#include "components/chromeos_camera/common/jpeg_encode_accelerator.mojom.h"
-#include "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom.h"
-#include "media/capture/capture_export.h"
-#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
-#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
-#include "media/capture/video/video_capture_device_factory.h"
-#include "mojo/public/cpp/bindings/pending_receiver.h"
-#include "mojo/public/cpp/bindings/pending_remote.h"
-#include "mojo/public/cpp/bindings/receiver_set.h"
-#include "mojo/public/cpp/bindings/remote.h"
-#include "mojo/public/cpp/platform/platform_channel_server_endpoint.h"
-
-namespace base {
-
-class SingleThreadTaskRunner;
-class WaitableEvent;
-
-} // namespace base
-
-namespace media {
-
-using MojoJpegEncodeAcceleratorFactoryCB = base::RepeatingCallback<void(
- mojo::PendingReceiver<chromeos_camera::mojom::JpegEncodeAccelerator>)>;
-
-class CAPTURE_EXPORT CameraClientObserver {
- public:
- virtual ~CameraClientObserver();
- virtual void OnChannelCreated(
- mojo::PendingRemote<cros::mojom::CameraModule> camera_module) = 0;
-};
-
-// The CameraHalDispatcherImpl hosts and waits on the unix domain socket
-// /var/run/camera3.sock. CameraHalServer and CameraHalClients connect to the
-// unix domain socket to create the initial Mojo connections with the
-// CameraHalDisptcherImpl, and CameraHalDispatcherImpl then creates and
-// dispaches the Mojo channels between CameraHalServer and CameraHalClients to
-// establish direct Mojo connections between the CameraHalServer and the
-// CameraHalClients.
-//
-// For general documentation about the CameraHalDispater Mojo interface see the
-// comments in mojo/cros_camera_service.mojom.
-class CAPTURE_EXPORT CameraHalDispatcherImpl final
- : public cros::mojom::CameraHalDispatcher,
- public base::trace_event::TraceLog::EnabledStateObserver {
- public:
- static CameraHalDispatcherImpl* GetInstance();
-
- bool Start(MojoMjpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory);
-
- void AddClientObserver(std::unique_ptr<CameraClientObserver> observer);
-
- bool IsStarted();
-
- // CameraHalDispatcher implementations.
- void RegisterServer(
- mojo::PendingRemote<cros::mojom::CameraHalServer> server) final;
- void RegisterClient(
- mojo::PendingRemote<cros::mojom::CameraHalClient> client) final;
- void GetJpegDecodeAccelerator(
- mojo::PendingReceiver<chromeos_camera::mojom::MjpegDecodeAccelerator>
- jda_receiver) final;
- void GetJpegEncodeAccelerator(
- mojo::PendingReceiver<chromeos_camera::mojom::JpegEncodeAccelerator>
- jea_receiver) final;
-
- // base::trace_event::TraceLog::EnabledStateObserver implementation.
- void OnTraceLogEnabled() final;
- void OnTraceLogDisabled() final;
-
- private:
- friend struct base::DefaultSingletonTraits<CameraHalDispatcherImpl>;
- // Allow the test to construct the class directly.
- friend class CameraHalDispatcherImplTest;
-
- CameraHalDispatcherImpl();
- ~CameraHalDispatcherImpl() final;
-
- bool StartThreads();
-
- // Creates the unix domain socket for the camera client processes and the
- // camera HALv3 adapter process to connect.
- void CreateSocket(base::WaitableEvent* started);
-
- // Waits for incoming connections (from HAL process or from client processes).
- // Runs on |blocking_io_thread_|.
- void StartServiceLoop(base::ScopedFD socket_fd, base::WaitableEvent* started);
-
- void RegisterClientOnProxyThread(
- mojo::PendingRemote<cros::mojom::CameraHalClient> client);
- void AddClientObserverOnProxyThread(
- std::unique_ptr<CameraClientObserver> observer);
-
- void EstablishMojoChannel(CameraClientObserver* client_observer);
-
- // Handler for incoming Mojo connection on the unix domain socket.
- void OnPeerConnected(mojo::ScopedMessagePipeHandle message_pipe);
-
- // Mojo connection error handlers.
- void OnCameraHalServerConnectionError();
- void OnCameraHalClientConnectionError(CameraClientObserver* client);
-
- void StopOnProxyThread();
-
- void OnTraceLogEnabledOnProxyThread();
- void OnTraceLogDisabledOnProxyThread();
-
- base::ScopedFD proxy_fd_;
- base::ScopedFD cancel_pipe_;
-
- base::Thread proxy_thread_;
- base::Thread blocking_io_thread_;
- scoped_refptr<base::SingleThreadTaskRunner> proxy_task_runner_;
- scoped_refptr<base::SingleThreadTaskRunner> blocking_io_task_runner_;
-
- mojo::ReceiverSet<cros::mojom::CameraHalDispatcher> receiver_set_;
-
- mojo::Remote<cros::mojom::CameraHalServer> camera_hal_server_;
-
- std::set<std::unique_ptr<CameraClientObserver>, base::UniquePtrComparator>
- client_observers_;
-
- MojoMjpegDecodeAcceleratorFactoryCB jda_factory_;
-
- MojoJpegEncodeAcceleratorFactoryCB jea_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImpl);
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_HAL_DISPATCHER_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
deleted file mode 100644
index 72e98ce2417..00000000000
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
+++ /dev/null
@@ -1,227 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
-
-#include <memory>
-#include <utility>
-
-#include "base/bind.h"
-#include "base/run_loop.h"
-#include "base/single_thread_task_runner.h"
-#include "base/test/task_environment.h"
-#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
-#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
-#include "mojo/public/cpp/bindings/pending_receiver.h"
-#include "mojo/public/cpp/bindings/pending_remote.h"
-#include "mojo/public/cpp/bindings/receiver.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-using testing::_;
-using testing::InvokeWithoutArgs;
-
-namespace media {
-namespace {
-
-class MockCameraHalServer : public cros::mojom::CameraHalServer {
- public:
- MockCameraHalServer() = default;
-
- ~MockCameraHalServer() = default;
-
- void CreateChannel(mojo::PendingReceiver<cros::mojom::CameraModule>
- camera_module_receiver) override {
- DoCreateChannel(std::move(camera_module_receiver));
- }
- MOCK_METHOD1(DoCreateChannel,
- void(mojo::PendingReceiver<cros::mojom::CameraModule>
- camera_module_receiver));
-
- MOCK_METHOD1(SetTracingEnabled, void(bool enabled));
-
- mojo::PendingRemote<cros::mojom::CameraHalServer> GetPendingRemote() {
- return receiver_.BindNewPipeAndPassRemote();
- }
-
- private:
- mojo::Receiver<cros::mojom::CameraHalServer> receiver_{this};
- DISALLOW_COPY_AND_ASSIGN(MockCameraHalServer);
-};
-
-class MockCameraHalClient : public cros::mojom::CameraHalClient {
- public:
- MockCameraHalClient() = default;
-
- ~MockCameraHalClient() = default;
-
- void SetUpChannel(
- mojo::PendingRemote<cros::mojom::CameraModule> camera_module) override {
- DoSetUpChannel(std::move(camera_module));
- }
- MOCK_METHOD1(
- DoSetUpChannel,
- void(mojo::PendingRemote<cros::mojom::CameraModule> camera_module));
-
- mojo::PendingRemote<cros::mojom::CameraHalClient> GetPendingRemote() {
- return receiver_.BindNewPipeAndPassRemote();
- }
-
- private:
- mojo::Receiver<cros::mojom::CameraHalClient> receiver_{this};
- DISALLOW_COPY_AND_ASSIGN(MockCameraHalClient);
-};
-
-} // namespace
-
-class CameraHalDispatcherImplTest : public ::testing::Test {
- public:
- CameraHalDispatcherImplTest() = default;
-
- ~CameraHalDispatcherImplTest() override = default;
-
- void SetUp() override {
- dispatcher_ = new CameraHalDispatcherImpl();
- EXPECT_TRUE(dispatcher_->StartThreads());
- }
-
- void TearDown() override { delete dispatcher_; }
-
- scoped_refptr<base::SingleThreadTaskRunner> GetProxyTaskRunner() {
- return dispatcher_->proxy_task_runner_;
- }
-
- void DoLoop() {
- run_loop_.reset(new base::RunLoop());
- run_loop_->Run();
- }
-
- void QuitRunLoop() {
- if (run_loop_) {
- run_loop_->Quit();
- }
- }
-
- static void RegisterServer(
- CameraHalDispatcherImpl* dispatcher,
- mojo::PendingRemote<cros::mojom::CameraHalServer> server) {
- dispatcher->RegisterServer(std::move(server));
- }
-
- static void RegisterClient(
- CameraHalDispatcherImpl* dispatcher,
- mojo::PendingRemote<cros::mojom::CameraHalClient> client) {
- dispatcher->RegisterClient(std::move(client));
- }
-
- protected:
- // We can't use std::unique_ptr here because the constructor and destructor of
- // CameraHalDispatcherImpl are private.
- CameraHalDispatcherImpl* dispatcher_;
-
- private:
- base::test::TaskEnvironment task_environment_;
- std::unique_ptr<base::RunLoop> run_loop_;
- DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImplTest);
-};
-
-// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
-// for the client when the server crashes.
-TEST_F(CameraHalDispatcherImplTest, ServerConnectionError) {
- // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
- // between the server and the client.
- auto mock_server = std::make_unique<MockCameraHalServer>();
- auto mock_client = std::make_unique<MockCameraHalClient>();
-
- EXPECT_CALL(*mock_server, DoCreateChannel(_)).Times(1);
- EXPECT_CALL(*mock_client, DoSetUpChannel(_))
- .Times(1)
- .WillOnce(
- InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
-
- auto server = mock_server->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterServer,
- base::Unretained(dispatcher_), std::move(server)));
- auto client = mock_client->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterClient,
- base::Unretained(dispatcher_), std::move(client)));
-
- // Wait until the client gets the established Mojo channel.
- DoLoop();
-
- // Re-create a new server to simulate a server crash.
- mock_server = std::make_unique<MockCameraHalServer>();
-
- // Make sure we creates a new Mojo channel from the new server to the same
- // client.
- EXPECT_CALL(*mock_server, DoCreateChannel(_)).Times(1);
- EXPECT_CALL(*mock_client, DoSetUpChannel(_))
- .Times(1)
- .WillOnce(
- InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
-
- server = mock_server->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterServer,
- base::Unretained(dispatcher_), std::move(server)));
-
- // Wait until the clients gets the newly established Mojo channel.
- DoLoop();
-}
-
-// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
-// for the client when the client reconnects after crash.
-TEST_F(CameraHalDispatcherImplTest, ClientConnectionError) {
- // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
- // between the server and the client.
- auto mock_server = std::make_unique<MockCameraHalServer>();
- auto mock_client = std::make_unique<MockCameraHalClient>();
-
- EXPECT_CALL(*mock_server, DoCreateChannel(_)).Times(1);
- EXPECT_CALL(*mock_client, DoSetUpChannel(_))
- .Times(1)
- .WillOnce(
- InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
-
- auto server = mock_server->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterServer,
- base::Unretained(dispatcher_), std::move(server)));
- auto client = mock_client->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterClient,
- base::Unretained(dispatcher_), std::move(client)));
-
- // Wait until the client gets the established Mojo channel.
- DoLoop();
-
- // Re-create a new server to simulate a server crash.
- mock_client = std::make_unique<MockCameraHalClient>();
-
- // Make sure we re-create the Mojo channel from the same server to the new
- // client.
- EXPECT_CALL(*mock_server, DoCreateChannel(_)).Times(1);
- EXPECT_CALL(*mock_client, DoSetUpChannel(_))
- .Times(1)
- .WillOnce(
- InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
-
- client = mock_client->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterClient,
- base::Unretained(dispatcher_), std::move(client)));
-
- // Wait until the clients gets the newly established Mojo channel.
- DoLoop();
-}
-
-} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
index c3394d55b45..8b309353e85 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
@@ -35,6 +35,10 @@ template <>
const cros::mojom::EntryType entry_type_of<double>::value =
cros::mojom::EntryType::TYPE_DOUBLE;
+template <>
+const cros::mojom::EntryType entry_type_of<Rational>::value =
+ cros::mojom::EntryType::TYPE_RATIONAL;
+
// TODO(shik): support TYPE_RATIONAL
cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
@@ -54,7 +58,7 @@ cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
}
auto* entry_ptr = &(camera_metadata->entries.value()[(*iter)->index]);
- if (!(*entry_ptr)->data.data()) {
+ if ((*entry_ptr)->data.empty()) {
// Metadata tag found with no valid data.
LOG(WARNING) << "Found tag " << static_cast<int>(tag)
<< " but with invalid data";
@@ -123,6 +127,7 @@ void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
}
for (const auto& entry : from->entries.value()) {
if (tags.find(entry->tag) != tags.end()) {
+ (*to)->entry_count -= 1;
LOG(ERROR) << "Found duplicated entries for tag " << entry->tag;
continue;
}
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.h b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
index ed935bd6ff6..9d209ac8514 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.h
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
@@ -12,6 +12,11 @@
namespace media {
+struct Rational {
+ int32_t numerator;
+ int32_t denominator;
+};
+
// Helper traits for converting native types to cros::mojom::EntryType.
template <typename T, typename Enable = void>
struct entry_type_of {
diff --git a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
index 4d8fc096b75..3a3f9a02a60 100644
--- a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
+++ b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
@@ -31,7 +31,8 @@ bool GpuMemoryBufferTracker::Init(const gfx::Size& dimensions,
const gfx::BufferUsage usage =
*gfx_format == gfx::BufferFormat::R_8
? gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE
- : gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ : gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+
buffer_ =
buffer_factory_.CreateGpuMemoryBuffer(dimensions, *gfx_format, usage);
if (!buffer_) {
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.cc b/chromium/media/capture/video/chromeos/mock_camera_module.cc
index c5079eaee5f..120cc6f1613 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.cc
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.cc
@@ -42,9 +42,7 @@ void MockCameraModule::GetCameraInfo(int32_t camera_id,
void MockCameraModule::SetCallbacks(
mojo::PendingRemote<cros::mojom::CameraModuleCallbacks> callbacks,
SetCallbacksCallback callback) {
- DoSetCallbacks(callbacks, callback);
- callbacks_.Bind(std::move(callbacks));
- std::move(callback).Run(0);
+ // Method deprecated and not expected to be called.
}
void MockCameraModule::Init(InitCallback callback) {
@@ -66,6 +64,13 @@ void MockCameraModule::GetVendorTagOps(
std::move(callback).Run();
}
+void MockCameraModule::SetCallbacksAssociated(
+ mojo::PendingAssociatedRemote<cros::mojom::CameraModuleCallbacks> callbacks,
+ SetCallbacksAssociatedCallback callback) {
+ DoSetCallbacksAssociated(callbacks, callback);
+ callbacks_.Bind(std::move(callbacks));
+ std::move(callback).Run(0);
+}
void MockCameraModule::NotifyCameraDeviceChange(
int camera_id,
cros::mojom::CameraDeviceStatus status) {
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.h b/chromium/media/capture/video/chromeos/mock_camera_module.h
index 8479c9ab899..12397b61455 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.h
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.h
@@ -11,6 +11,8 @@
#include "base/threading/thread.h"
#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
+#include "mojo/public/cpp/bindings/associated_remote.h"
+#include "mojo/public/cpp/bindings/pending_associated_remote.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/receiver.h"
@@ -72,6 +74,14 @@ class MockCameraModule : public cros::mojom::CameraModule {
vendor_tag_ops_receiver,
GetVendorTagOpsCallback& callback));
+ void SetCallbacksAssociated(mojo::PendingAssociatedRemote<
+ cros::mojom::CameraModuleCallbacks> callbacks,
+ SetCallbacksAssociatedCallback callback) override;
+ MOCK_METHOD2(DoSetCallbacksAssociated,
+ void(mojo::PendingAssociatedRemote<
+ cros::mojom::CameraModuleCallbacks>& callbacks,
+ SetCallbacksAssociatedCallback& callback));
+
void NotifyCameraDeviceChange(int camera_id,
cros::mojom::CameraDeviceStatus status);
@@ -89,7 +99,7 @@ class MockCameraModule : public cros::mojom::CameraModule {
base::Thread mock_module_thread_;
mojo::Receiver<cros::mojom::CameraModule> receiver_{this};
- mojo::Remote<cros::mojom::CameraModuleCallbacks> callbacks_;
+ mojo::AssociatedRemote<cros::mojom::CameraModuleCallbacks> callbacks_;
DISALLOW_COPY_AND_ASSIGN(MockCameraModule);
};
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
index 9151ac99363..be1371a2e95 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
@@ -68,9 +68,8 @@ void MockVideoCaptureClient::OnIncomingCapturedGfxBuffer(
}
void MockVideoCaptureClient::OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
if (frame_cb_)
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.h b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
index ddb9380aece..3c59b8033fa 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.h
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
@@ -55,9 +55,8 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
base::TimeDelta timestamp,
int frame_feedback_id = 0) override;
void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) override;
// Trampoline methods to workaround GMOCK problems with std::unique_ptr<>.
diff --git a/chromium/media/capture/video/chromeos/mojom/camera3.mojom b/chromium/media/capture/video/chromeos/mojom/camera3.mojom
index 5e7e6ded75d..dd99ed86dde 100644
--- a/chromium/media/capture/video/chromeos/mojom/camera3.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera3.mojom
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Next min version: 3
+// Next min version: 4
module cros.mojom;
@@ -28,10 +28,6 @@ const uint32 GRALLOC_USAGE_FORCE_I420 = 0x10000000;
// into a new HAL request for Zero-Shutter Lag (ZSL). See crrev.com/c/1877636
// for the CL that does the aforementioned things.
const uint32 GRALLOC_USAGE_STILL_CAPTURE = 0x20000000;
-// Flag to indicate ZSL is enabled for this session. Returned in the updated
-// stream configuration returned from configure_streams(). Refer to
-// crrev.com/c/2055927 which returns this flag.
-const uint32 GRALLOC_USAGE_ZERO_SHUTTER_LAG_ENABLED = 0x40000000;
[Extensible]
enum HalPixelFormat {
@@ -102,7 +98,8 @@ enum Camera3BufferStatus {
CAMERA3_BUFFER_STATUS_ERROR = 1,
};
-// Structure that contains needed information about a camera buffer.
+// Structure that contains needed information about a camera buffer that could
+// be used to map in userspace.
struct CameraBufferHandle {
uint64 buffer_id;
array<handle> fds;
@@ -112,6 +109,7 @@ struct CameraBufferHandle {
uint32 height;
array<uint32> strides;
array<uint32> offsets;
+ [MinVersion=3] array<uint32>? sizes;
};
struct Camera3StreamBuffer {
@@ -253,7 +251,7 @@ interface Camera3CallbackOps {
//
// 7. Close() closes the camera device.
//
-// Next method ID: 8
+// Next method ID: 9
interface Camera3DeviceOps {
// Initialize() is called once after the camera device is opened to register
// the Camera3CallbackOps handle.
@@ -309,4 +307,12 @@ interface Camera3DeviceOps {
// Close() is called to close the camera device.
Close@7() => (int32 result);
+
+ // ConfigureStreamsAndGetAllocatedBuffers() is called every time the client
+ // needs to set up new set of streams. Also allocated buffers for clients that
+ // do not have capabilities to allocate DMA-bufs.
+ [MinVersion=3]
+ ConfigureStreamsAndGetAllocatedBuffers@8(Camera3StreamConfiguration config) =>
+ (int32 result, Camera3StreamConfiguration? updated_config,
+ map<uint64, array<Camera3StreamBuffer>> allocated_buffers);
};
diff --git a/chromium/media/capture/video/chromeos/mojom/camera_app.mojom b/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
index 7bc0da4e710..59df9f820bd 100644
--- a/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
@@ -53,6 +53,11 @@ interface CameraAppDeviceProvider {
// and camera app. Currently only devices running camera HAL v3 support this
// feature.
IsSupported() => (bool is_supported);
+
+ // Add/Remove a virtual device for recording stream according to |enabled|.
+ // The virtual device has the same config as |device_id| except facing
+ // attribute.
+ SetMultipleStreamsEnabled(string device_id, bool enabled) => (bool success);
};
// Inner interface that used to communicate between browser process (Remote) and
@@ -68,6 +73,11 @@ interface CameraAppDeviceBridge {
// and camera app. Currently only devices running camera HAL v3 support this
// feature.
IsSupported() => (bool is_supported);
+
+ // Add/Remove a virtual device for recording stream according to |enabled|.
+ // The virtual device has the same config as |device_id| except facing
+ // attribute.
+ SetMultipleStreamsEnabled(string device_id, bool enabled) => (bool success);
};
// Interface for communication between Chrome Camera App (Remote) and camera
diff --git a/chromium/media/capture/video/chromeos/mojom/camera_common.mojom b/chromium/media/capture/video/chromeos/mojom/camera_common.mojom
index 7c0847267d6..f26bcd95d1b 100644
--- a/chromium/media/capture/video/chromeos/mojom/camera_common.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera_common.mojom
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Next min version: 3
+// Next min version: 4
module cros.mojom;
@@ -13,6 +13,9 @@ enum CameraFacing {
CAMERA_FACING_BACK = 0,
CAMERA_FACING_FRONT = 1,
CAMERA_FACING_EXTERNAL = 2,
+ CAMERA_FACING_VIRTUAL_BACK = 3,
+ CAMERA_FACING_VIRTUAL_FRONT = 4,
+ CAMERA_FACING_VIRTUAL_EXTERNAL = 5,
};
struct CameraResourceCost {
@@ -101,6 +104,7 @@ interface CameraModule {
// Gets various info about the camera specified by |camera_id|.
GetCameraInfo@2(int32 camera_id) => (int32 result, CameraInfo? camera_info);
+ // [Deprecated in version 3]
// Registers the CameraModuleCallbacks interface with the camera HAL.
SetCallbacks@3(pending_remote<CameraModuleCallbacks> callbacks)
=> (int32 result);
@@ -122,4 +126,11 @@ interface CameraModule {
[MinVersion=2]
GetVendorTagOps@6(pending_receiver<VendorTagOps> vendor_tag_ops_request)
=> ();
+
+ // Registers the CameraModuleCallbacks associated interface with the camera
+ // HAL. TODO(b/169324225): Migrate all camera HAL clients to use this.
+ [MinVersion=3]
+ SetCallbacksAssociated@7(
+ pending_associated_remote<CameraModuleCallbacks> callbacks)
+ => (int32 result);
};
diff --git a/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom b/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom
index 03f5a00fdac..dec0a5017ff 100644
--- a/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom
@@ -2,13 +2,44 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Next min version: 4
+// Next min version: 6
module cros.mojom;
import "components/chromeos_camera/common/jpeg_encode_accelerator.mojom";
import "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom";
import "media/capture/video/chromeos/mojom/camera_common.mojom";
+import "mojo/public/mojom/base/unguessable_token.mojom";
+
+// CameraClientType indicates the type of a CameraHalClient.
+// It should be kept in sync with the ChromeOSCameraClientType enum in
+// tools/metrics/histograms/enums.xml
+[Extensible]
+enum CameraClientType{
+ UNKNOWN = 0,
+ TESTING = 1,
+ CHROME = 2,
+ ANDROID = 3,
+ PLUGINVM = 4,
+ ASH_CHROME = 5,
+ LACROS_CHROME = 6,
+};
+
+// CameraPrivacySwitchState indicates the state of the camera privacy switch.
+enum CameraPrivacySwitchState{
+ // For devices which can only read the privacy switch status while the camera
+ // is streaming, it is possible that the state of privacy switch is currently
+ // unknown.
+ UNKNOWN = 0,
+
+ // State when the privacy switch is on, which means the black frames will be
+ // delivered when streaming.
+ ON = 1,
+
+ // State when the privacy switch is off, which means camera should stream
+ // normally.
+ OFF = 2,
+};
// The CrOS camera HAL v3 Mojo dispatcher. The dispatcher acts as a proxy and
// waits for the server and the clients to register. There can only be one
@@ -17,12 +48,14 @@ import "media/capture/video/chromeos/mojom/camera_common.mojom";
// channel to the server and pass the established Mojo channel to the client in
// order to set up a Mojo channel between the client and the server.
//
-// Next method ID: 4
+// Next method ID: 6
interface CameraHalDispatcher {
+ // [Deprecated in version 4]
// A CameraHalServer calls RegisterServer to register itself with the
// dispatcher.
RegisterServer@0(pending_remote<CameraHalServer> server);
+ // [Deprecated in version 4]
// A CameraHalClient calls RegisterClient to register itself with the
// dispatcher.
RegisterClient@1(pending_remote<CameraHalClient> client);
@@ -35,6 +68,25 @@ interface CameraHalDispatcher {
// Get JpegEncodeAccelerator from dispatcher.
[MinVersion=2] GetJpegEncodeAccelerator@3(
pending_receiver<chromeos_camera.mojom.JpegEncodeAccelerator> jea_receiver);
+
+ // A CameraHalServer calls RegisterServerWithToken to register itself with the
+ // dispatcher. CameraHalDispatcher would authenticate the server with the
+ // supplied |auth_token|. |callbacks| is fired by CameraHalServer to notify
+ // CameraHalDispatcher about CameraHalClient updates, for example when a
+ // CameraHalClient opens or closes a camera device.
+ [MinVersion=4] RegisterServerWithToken@4(
+ pending_remote<CameraHalServer> server,
+ mojo_base.mojom.UnguessableToken auth_token) =>
+ (int32 result, pending_remote<CameraHalServerCallbacks> callbacks);
+
+ // A CameraHalClient calls RegisterClient to register itself with the
+ // dispatcher. CameraHalDispatcher would authenticate the client with the
+ // given |type| and |auth_token|.
+ [MinVersion=4] RegisterClientWithToken@5(
+ pending_remote<CameraHalClient> client,
+ CameraClientType type,
+ mojo_base.mojom.UnguessableToken auth_token) => (int32 result);
+
};
// The CrOS camera HAL v3 Mojo server.
@@ -45,13 +97,34 @@ interface CameraHalServer {
// HAL v3 adapter. Upon successfully binding of |camera_module_request|, the
// caller will have a established Mojo channel to the camera HAL v3 adapter
// process.
- CreateChannel@0(pending_receiver<CameraModule> camera_module_request);
+ CreateChannel@0(pending_receiver<CameraModule> camera_module_request,
+ [MinVersion=4] CameraClientType type);
// Enable or disable tracing.
[MinVersion=3]
SetTracingEnabled@1(bool enabled);
};
+// CameraHalServerCallbacks is an interface for CameraHalServer to notify
+// CameraHalDispatcher for any changes on the server side, for example when a
+// CameraHalClient opens or closes a camera device.
+//
+// Next method ID: 2
+interface CameraHalServerCallbacks {
+ // Fired when a CameraHalClient opens or closes a camera device. When a
+ // CameraHalClient loses mojo connection to CameraHalServer, CameraHalServer
+ // would also use this to notify that cameras are closed (not being used).
+ CameraDeviceActivityChange@0(int32 camera_id,
+ bool opened,
+ CameraClientType type);
+
+ // Fired when the camera privacy switch status is changed. If the device has
+ // such switch, this callback will be fired immediately for once to notify its
+ // current status when the callbacks are registered.
+ [MinVersion=5]
+ CameraPrivacySwitchStateChange@1(CameraPrivacySwitchState state);
+};
+
// The CrOS camera HAL v3 Mojo client.
//
// Next method ID: 1
diff --git a/chromium/media/capture/video/chromeos/request_manager.cc b/chromium/media/capture/video/chromeos/request_manager.cc
index 7dedadf09b3..8944ef12a3a 100644
--- a/chromium/media/capture/video/chromeos/request_manager.cc
+++ b/chromium/media/capture/video/chromeos/request_manager.cc
@@ -17,6 +17,7 @@
#include "base/posix/safe_strerror.h"
#include "base/strings/string_number_conversions.h"
#include "base/trace_event/trace_event.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
#include "media/capture/video/chromeos/video_capture_features_chromeos.h"
@@ -34,6 +35,7 @@ constexpr std::initializer_list<StreamType> kYUVReprocessStreams = {
} // namespace
RequestManager::RequestManager(
+ const std::string& device_id,
mojo::PendingReceiver<cros::mojom::Camera3CallbackOps>
callback_ops_receiver,
std::unique_ptr<StreamCaptureInterface> capture_interface,
@@ -41,10 +43,9 @@ RequestManager::RequestManager(
VideoCaptureBufferType buffer_type,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
BlobifyCallback blobify_callback,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device,
- ClientType client_type)
- : callback_ops_(this, std::move(callback_ops_receiver)),
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
+ : device_id_(device_id),
+ callback_ops_(this, std::move(callback_ops_receiver)),
capture_interface_(std::move(capture_interface)),
device_context_(device_context),
video_capture_use_gmb_(buffer_type ==
@@ -52,15 +53,12 @@ RequestManager::RequestManager(
stream_buffer_manager_(
new StreamBufferManager(device_context_,
video_capture_use_gmb_,
- std::move(camera_buffer_factory),
- client_type)),
+ std::move(camera_buffer_factory))),
blobify_callback_(std::move(blobify_callback)),
ipc_task_runner_(std::move(ipc_task_runner)),
capturing_(false),
partial_result_count_(1),
- first_frame_shutter_time_(base::TimeTicks()),
- camera_app_device_(std::move(camera_app_device)),
- client_type_(client_type) {
+ first_frame_shutter_time_(base::TimeTicks()) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(callback_ops_.is_bound());
DCHECK(device_context_);
@@ -80,9 +78,19 @@ RequestManager::RequestManager(
RequestManager::~RequestManager() = default;
void RequestManager::SetUpStreamsAndBuffers(
- VideoCaptureFormat capture_format,
+ base::flat_map<ClientType, VideoCaptureParams> capture_params,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams) {
+ auto request_keys = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ zero_shutter_lag_supported_ = base::Contains(
+ request_keys,
+ static_cast<int32_t>(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_ENABLE_ZSL));
+ VLOG(1) << "Zero-shutter lag is "
+ << (zero_shutter_lag_supported_ ? "" : "not ") << "supported";
+
// The partial result count metadata is optional; defaults to 1 in case it
// is not set in the static metadata.
const cros::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
@@ -107,7 +115,7 @@ void RequestManager::SetUpStreamsAndBuffers(
}
stream_buffer_manager_->SetUpStreamsAndBuffers(
- capture_format, static_metadata, std::move(streams));
+ capture_params, static_metadata, std::move(streams));
}
cros::mojom::Camera3StreamPtr RequestManager::GetStreamConfiguration(
@@ -269,11 +277,13 @@ void RequestManager::PrepareCaptureRequest() {
// 2. Capture (YuvOutput)
// 3. Preview + Capture (YuvOutput)
// 4. Reprocess (YuvInput + BlobOutput)
+ // 5. Preview + Recording (YuvOutput)
//
// For device without reprocess capability:
// 1. Preview
// 2. Capture (BlobOutput)
// 3. Preview + Capture (BlobOutput)
+ // 4. Preview + Recording (YuvOutput)
std::set<StreamType> stream_types;
cros::mojom::CameraMetadataPtr settings;
TakePhotoCallback callback = base::NullCallback();
@@ -283,6 +293,7 @@ void RequestManager::PrepareCaptureRequest() {
bool is_reprocess_request = false;
bool is_preview_request = false;
bool is_oneshot_request = false;
+ bool is_recording_request = false;
// First, check if there are pending reprocess tasks.
is_reprocess_request = TryPrepareReprocessRequest(
@@ -291,16 +302,32 @@ void RequestManager::PrepareCaptureRequest() {
// If there is no pending reprocess task, then check if there are pending
// one-shot requests. And also try to put preview in the request.
if (!is_reprocess_request) {
- is_preview_request = TryPreparePreviewRequest(&stream_types, &settings);
+ if (!zero_shutter_lag_supported_) {
+ is_preview_request = TryPreparePreviewRequest(&stream_types, &settings);
+
+ // Order matters here. If the preview request and oneshot request are both
+ // added in single capture request, the settings will be overridden by the
+ // later.
+ is_oneshot_request =
+ TryPrepareOneShotRequest(&stream_types, &settings, &callback);
+ } else {
+ // Zero-shutter lag could potentially give a frame from the past. Don't
+ // prepare a preview request when a one shot request has been prepared.
+ is_oneshot_request =
+ TryPrepareOneShotRequest(&stream_types, &settings, &callback);
+
+ if (!is_oneshot_request) {
+ is_preview_request = TryPreparePreviewRequest(&stream_types, &settings);
+ }
+ }
+ }
- // Order matters here. If the preview request and oneshot request are both
- // added in single capture request, the settings will be overridden by the
- // later.
- is_oneshot_request =
- TryPrepareOneShotRequest(&stream_types, &settings, &callback);
+ if (is_preview_request) {
+ is_recording_request = TryPrepareRecordingRequest(&stream_types);
}
- if (!is_reprocess_request && !is_oneshot_request && !is_preview_request) {
+ if (!is_reprocess_request && !is_oneshot_request && !is_preview_request &&
+ !is_recording_request) {
// We have to keep the pipeline full.
if (preview_buffers_queued_ < pipeline_depth_) {
ipc_task_runner_->PostTask(
@@ -450,6 +477,17 @@ bool RequestManager::TryPrepareOneShotRequest(
return true;
}
+bool RequestManager::TryPrepareRecordingRequest(
+ std::set<StreamType>* stream_types) {
+ if (!stream_buffer_manager_->IsRecordingSupported() ||
+ !stream_buffer_manager_->HasFreeBuffers({StreamType::kRecordingOutput})) {
+ return false;
+ }
+
+ stream_types->insert({StreamType::kRecordingOutput});
+ return true;
+}
+
void RequestManager::OnProcessedCaptureRequest(int32_t result) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -664,8 +702,12 @@ void RequestManager::Notify(cros::mojom::Camera3NotifyMsgPtr message) {
first_frame_shutter_time_ = reference_time;
}
pending_result.timestamp = reference_time - first_frame_shutter_time_;
- if (camera_app_device_ && pending_result.still_capture_callback) {
- camera_app_device_->OnShutterDone();
+
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_id_);
+ if (camera_app_device && pending_result.still_capture_callback) {
+ camera_app_device->OnShutterDone();
}
TrySubmitPendingBuffers(frame_number);
@@ -759,8 +801,11 @@ void RequestManager::SubmitCaptureResult(
observer->OnResultMetadataAvailable(frame_number, pending_result.metadata);
}
- if (camera_app_device_) {
- camera_app_device_->OnResultMetadataAvailable(
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_id_);
+ if (camera_app_device) {
+ camera_app_device->OnResultMetadataAvailable(
pending_result.metadata,
static_cast<cros::mojom::StreamType>(stream_type));
}
@@ -790,8 +835,10 @@ void RequestManager::SubmitCaptureResult(
// Deliver the captured data to client.
if (stream_buffer->status ==
cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK) {
- if (stream_type == StreamType::kPreviewOutput) {
- SubmitCapturedPreviewBuffer(frame_number, buffer_ipc_id);
+ if (stream_type == StreamType::kPreviewOutput ||
+ stream_type == StreamType::kRecordingOutput) {
+ SubmitCapturedPreviewRecordingBuffer(frame_number, buffer_ipc_id,
+ stream_type);
} else if (stream_type == StreamType::kJpegOutput) {
SubmitCapturedJpegBuffer(frame_number, buffer_ipc_id);
} else if (stream_type == StreamType::kYUVOutput) {
@@ -827,14 +874,17 @@ void RequestManager::SubmitCaptureResult(
PrepareCaptureRequest();
}
-void RequestManager::SubmitCapturedPreviewBuffer(uint32_t frame_number,
- uint64_t buffer_ipc_id) {
+void RequestManager::SubmitCapturedPreviewRecordingBuffer(
+ uint32_t frame_number,
+ uint64_t buffer_ipc_id,
+ StreamType stream_type) {
const CaptureResult& pending_result = pending_results_[frame_number];
+ auto client_type = kStreamClientTypeMap[static_cast<int>(stream_type)];
if (video_capture_use_gmb_) {
VideoCaptureFormat format;
base::Optional<VideoCaptureDevice::Client::Buffer> buffer =
stream_buffer_manager_->AcquireBufferForClientById(
- StreamType::kPreviewOutput, buffer_ipc_id, &format);
+ stream_type, buffer_ipc_id, &format);
CHECK(buffer);
// TODO: Figure out the right color space for the camera frame. We may need
@@ -849,39 +899,38 @@ void RequestManager::SubmitCapturedPreviewBuffer(uint32_t frame_number,
auto translate_rotation = [](const int rotation) -> VideoRotation {
switch (rotation) {
case 0:
- return VideoRotation::VIDEO_ROTATION_0;
+ return VIDEO_ROTATION_0;
case 90:
- return VideoRotation::VIDEO_ROTATION_90;
+ return VIDEO_ROTATION_90;
case 180:
- return VideoRotation::VIDEO_ROTATION_180;
+ return VIDEO_ROTATION_180;
case 270:
- return VideoRotation::VIDEO_ROTATION_270;
+ return VIDEO_ROTATION_270;
}
- return VideoRotation::VIDEO_ROTATION_0;
+ return VIDEO_ROTATION_0;
};
- metadata.rotation =
+ metadata.transformation =
translate_rotation(device_context_->GetRotationForDisplay());
} else {
// All frames are pre-rotated to the display orientation.
- metadata.rotation = VideoRotation::VIDEO_ROTATION_0;
+ metadata.transformation = VIDEO_ROTATION_0;
}
device_context_->SubmitCapturedVideoCaptureBuffer(
- client_type_, std::move(*buffer), format, pending_result.reference_time,
+ client_type, std::move(*buffer), format, pending_result.reference_time,
pending_result.timestamp, metadata);
// |buffer| ownership is transferred to client, so we need to reserve a
// new video buffer.
- stream_buffer_manager_->ReserveBuffer(StreamType::kPreviewOutput);
+ stream_buffer_manager_->ReserveBuffer(stream_type);
} else {
gfx::GpuMemoryBuffer* gmb = stream_buffer_manager_->GetGpuMemoryBufferById(
- StreamType::kPreviewOutput, buffer_ipc_id);
+ stream_type, buffer_ipc_id);
CHECK(gmb);
device_context_->SubmitCapturedGpuMemoryBuffer(
- client_type_, gmb,
- stream_buffer_manager_->GetStreamCaptureFormat(
- StreamType::kPreviewOutput),
+ client_type, gmb,
+ stream_buffer_manager_->GetStreamCaptureFormat(stream_type),
pending_result.reference_time, pending_result.timestamp);
- stream_buffer_manager_->ReleaseBufferFromCaptureResult(
- StreamType::kPreviewOutput, buffer_ipc_id);
+ stream_buffer_manager_->ReleaseBufferFromCaptureResult(stream_type,
+ buffer_ipc_id);
}
}
diff --git a/chromium/media/capture/video/chromeos/request_manager.h b/chromium/media/capture/video/chromeos/request_manager.h
index f5d4fdad821..8739ceab41c 100644
--- a/chromium/media/capture/video/chromeos/request_manager.h
+++ b/chromium/media/capture/video/chromeos/request_manager.h
@@ -12,6 +12,7 @@
#include <set>
#include <vector>
+#include "base/containers/flat_map.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "media/capture/mojom/image_capture.mojom.h"
@@ -45,7 +46,6 @@ constexpr int32_t kMinConfiguredStreams = 1;
// Maximum configured streams could contain two optional YUV streams.
constexpr int32_t kMaxConfiguredStreams = 4;
-
// RequestManager is responsible for managing the flow for sending capture
// requests and receiving capture results. Having RequestBuilder to build
// request and StreamBufferManager to handles stream buffers, it focuses on
@@ -99,22 +99,21 @@ class CAPTURE_EXPORT RequestManager final
int32_t orientation;
};
- RequestManager(mojo::PendingReceiver<cros::mojom::Camera3CallbackOps>
+ RequestManager(const std::string& device_id,
+ mojo::PendingReceiver<cros::mojom::Camera3CallbackOps>
callback_ops_receiver,
std::unique_ptr<StreamCaptureInterface> capture_interface,
CameraDeviceContext* device_context,
VideoCaptureBufferType buffer_type,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
BlobifyCallback blobify_callback,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device,
- ClientType client_type);
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
~RequestManager() override;
// Sets up the stream context and allocate buffers according to the
// configuration specified in |streams|.
void SetUpStreamsAndBuffers(
- VideoCaptureFormat capture_format,
+ base::flat_map<ClientType, VideoCaptureParams> capture_params,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams);
@@ -227,6 +226,8 @@ class CAPTURE_EXPORT RequestManager final
cros::mojom::CameraMetadataPtr* settings,
TakePhotoCallback* callback);
+ bool TryPrepareRecordingRequest(std::set<StreamType>* stream_types);
+
// Callback for ProcessCaptureRequest().
void OnProcessedCaptureRequest(int32_t result);
@@ -257,20 +258,26 @@ class CAPTURE_EXPORT RequestManager final
void SubmitCaptureResult(uint32_t frame_number,
StreamType stream_type,
cros::mojom::Camera3StreamBufferPtr stream_buffer);
- void SubmitCapturedPreviewBuffer(uint32_t frame_number,
- uint64_t buffer_ipc_id);
+ void SubmitCapturedPreviewRecordingBuffer(uint32_t frame_number,
+ uint64_t buffer_ipc_id,
+ StreamType stream_type);
void SubmitCapturedJpegBuffer(uint32_t frame_number, uint64_t buffer_ipc_id);
// If there are some metadata set by SetCaptureMetadata() or
// SetRepeatingCaptureMetadata(), update them onto |capture_settings|.
void UpdateCaptureSettings(cros::mojom::CameraMetadataPtr* capture_settings);
+ // The unique device id which is retrieved from VideoCaptureDeviceDescriptor.
+ std::string device_id_;
+
mojo::Receiver<cros::mojom::Camera3CallbackOps> callback_ops_;
std::unique_ptr<StreamCaptureInterface> capture_interface_;
CameraDeviceContext* device_context_;
+ bool zero_shutter_lag_supported_;
+
bool video_capture_use_gmb_;
// StreamBufferManager should be declared before RequestBuilder since
@@ -362,9 +369,7 @@ class CAPTURE_EXPORT RequestManager final
// duplicate or out of order of frames.
std::map<StreamType, uint32_t> last_received_frame_number_map_;
- CameraAppDeviceImpl* camera_app_device_; // Weak.
-
- ClientType client_type_;
+ base::WeakPtr<CameraAppDeviceImpl> camera_app_device_;
base::WeakPtrFactory<RequestManager> weak_ptr_factory_{this};
diff --git a/chromium/media/capture/video/chromeos/request_manager_unittest.cc b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
index 6227b6f3e43..90b9e83aafe 100644
--- a/chromium/media/capture/video/chromeos/request_manager_unittest.cc
+++ b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
@@ -87,12 +87,16 @@ class RequestManagerTest : public ::testing::Test {
void SetUp() override {
quit_ = false;
client_type_ = ClientType::kPreviewClient;
+ VideoCaptureParams params;
+ params.requested_format = kDefaultCaptureFormat;
+ capture_params_[client_type_] = params;
device_context_ = std::make_unique<CameraDeviceContext>();
if (device_context_->AddClient(
client_type_,
std::make_unique<unittest_internal::MockVideoCaptureClient>())) {
+ std::string fake_device_id = "0";
request_manager_ = std::make_unique<RequestManager>(
- mock_callback_ops_.BindNewPipeAndPassReceiver(),
+ fake_device_id, mock_callback_ops_.BindNewPipeAndPassReceiver(),
std::make_unique<MockStreamCaptureInterface>(), device_context_.get(),
VideoCaptureBufferType::kSharedMemory,
std::make_unique<FakeCameraBufferFactory>(),
@@ -100,7 +104,7 @@ class RequestManagerTest : public ::testing::Test {
[](const uint8_t* buffer, const uint32_t bytesused,
const VideoCaptureFormat& capture_format,
const int rotation) { return mojom::Blob::New(); }),
- base::ThreadTaskRunnerHandle::Get(), nullptr, client_type_);
+ base::ThreadTaskRunnerHandle::Get());
}
}
@@ -284,6 +288,7 @@ class RequestManagerTest : public ::testing::Test {
mojo::Remote<cros::mojom::Camera3CallbackOps> mock_callback_ops_;
std::unique_ptr<CameraDeviceContext> device_context_;
ClientType client_type_;
+ base::flat_map<ClientType, VideoCaptureParams> capture_params_;
private:
std::unique_ptr<base::RunLoop> run_loop_;
@@ -300,8 +305,7 @@ TEST_F(RequestManagerTest, SimpleCaptureTest) {
.WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -343,8 +347,7 @@ TEST_F(RequestManagerTest, PartialResultTest) {
}));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 3),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 3),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -375,8 +378,7 @@ TEST_F(RequestManagerTest, DeviceErrorTest) {
}));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -415,8 +417,7 @@ TEST_F(RequestManagerTest, RequestErrorTest) {
.WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -456,8 +457,7 @@ TEST_F(RequestManagerTest, ResultErrorTest) {
.WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 2),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 2),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -499,8 +499,7 @@ TEST_F(RequestManagerTest, BufferErrorTest) {
.WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
index 75bd224507a..4e3f277f890 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
@@ -27,12 +27,10 @@ namespace media {
StreamBufferManager::StreamBufferManager(
CameraDeviceContext* device_context,
bool video_capture_use_gmb,
- std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
- ClientType client_type)
+ std::unique_ptr<CameraBufferFactory> camera_buffer_factory)
: device_context_(device_context),
video_capture_use_gmb_(video_capture_use_gmb),
- camera_buffer_factory_(std::move(camera_buffer_factory)),
- client_type_(client_type) {
+ camera_buffer_factory_(std::move(camera_buffer_factory)) {
if (video_capture_use_gmb_) {
gmb_support_ = std::make_unique<gpu::GpuMemoryBufferSupport>();
}
@@ -155,8 +153,9 @@ StreamBufferManager::AcquireBufferForClientById(StreamType stream_type,
} else {
// We have to reserve a new buffer because the size is different.
Buffer rotated_buffer;
+ auto client_type = kStreamClientTypeMap[static_cast<int>(stream_type)];
if (!device_context_->ReserveVideoCaptureBufferFromPool(
- client_type_, format->frame_size, format->pixel_format,
+ client_type, format->frame_size, format->pixel_format,
&rotated_buffer)) {
DLOG(WARNING) << "Failed to reserve video capture buffer";
original_gmb->Unmap();
@@ -221,7 +220,7 @@ bool StreamBufferManager::HasStreamsConfigured(
}
void StreamBufferManager::SetUpStreamsAndBuffers(
- VideoCaptureFormat capture_format,
+ base::flat_map<ClientType, VideoCaptureParams> capture_params,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams) {
DestroyCurrentStreamsAndBuffers();
@@ -249,15 +248,18 @@ void StreamBufferManager::SetUpStreamsAndBuffers(
// flags of the stream.
StreamType stream_type = StreamIdToStreamType(stream->id);
auto stream_context = std::make_unique<StreamContext>();
- stream_context->capture_format = capture_format;
+ auto client_type = kStreamClientTypeMap[static_cast<int>(stream_type)];
+ stream_context->capture_format =
+ capture_params[client_type].requested_format;
stream_context->stream = std::move(stream);
switch (stream_type) {
case StreamType::kPreviewOutput:
+ case StreamType::kRecordingOutput:
stream_context->buffer_dimension = gfx::Size(
stream_context->stream->width, stream_context->stream->height);
stream_context->buffer_usage =
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE;
break;
case StreamType::kYUVInput:
case StreamType::kYUVOutput:
@@ -378,6 +380,11 @@ bool StreamBufferManager::IsReprocessSupported() {
return stream_context_.find(StreamType::kYUVOutput) != stream_context_.end();
}
+bool StreamBufferManager::IsRecordingSupported() {
+ return stream_context_.find(StreamType::kRecordingOutput) !=
+ stream_context_.end();
+}
+
// static
uint64_t StreamBufferManager::GetBufferIpcId(StreamType stream_type, int key) {
uint64_t id = 0;
@@ -441,8 +448,9 @@ void StreamBufferManager::ReserveBufferFromPool(StreamType stream_type) {
return;
}
Buffer vcd_buffer;
+ auto client_type = kStreamClientTypeMap[static_cast<int>(stream_type)];
if (!device_context_->ReserveVideoCaptureBufferFromPool(
- client_type_, stream_context->buffer_dimension,
+ client_type, stream_context->buffer_dimension,
stream_context->capture_format.pixel_format, &vcd_buffer)) {
DLOG(WARNING) << "Failed to reserve video capture buffer";
return;
@@ -452,8 +460,9 @@ void StreamBufferManager::ReserveBufferFromPool(StreamType stream_type) {
stream_context->buffer_dimension, *gfx_format,
stream_context->buffer_usage, base::NullCallback());
stream_context->free_buffers.push(vcd_buffer.id);
- stream_context->buffers.insert(std::make_pair(
- vcd_buffer.id, BufferPair(std::move(gmb), std::move(vcd_buffer))));
+ const int id = vcd_buffer.id;
+ stream_context->buffers.insert(
+ std::make_pair(id, BufferPair(std::move(gmb), std::move(vcd_buffer))));
}
void StreamBufferManager::DestroyCurrentStreamsAndBuffers() {
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.h b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
index 50ed6a352b1..ba62fbe89f4 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.h
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
@@ -14,6 +14,7 @@
#include <unordered_map>
#include <vector>
+#include "base/containers/flat_map.h"
#include "base/containers/queue.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
@@ -51,8 +52,7 @@ class CAPTURE_EXPORT StreamBufferManager final {
StreamBufferManager(
CameraDeviceContext* device_context,
bool video_capture_use_gmb,
- std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
- ClientType client_type);
+ std::unique_ptr<CameraBufferFactory> camera_buffer_factory);
~StreamBufferManager();
void ReserveBuffer(StreamType stream_type);
@@ -84,7 +84,7 @@ class CAPTURE_EXPORT StreamBufferManager final {
// Sets up the stream context and allocate buffers according to the
// configuration specified in |stream|.
void SetUpStreamsAndBuffers(
- VideoCaptureFormat capture_format,
+ base::flat_map<ClientType, VideoCaptureParams> capture_params,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams);
@@ -105,6 +105,8 @@ class CAPTURE_EXPORT StreamBufferManager final {
bool IsReprocessSupported();
+ bool IsRecordingSupported();
+
private:
friend class RequestManagerTest;
@@ -164,8 +166,6 @@ class CAPTURE_EXPORT StreamBufferManager final {
std::unique_ptr<CameraBufferFactory> camera_buffer_factory_;
- ClientType client_type_;
-
base::WeakPtrFactory<StreamBufferManager> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(StreamBufferManager);
diff --git a/chromium/media/capture/video/chromeos/token_manager.cc b/chromium/media/capture/video/chromeos/token_manager.cc
new file mode 100644
index 00000000000..1f5789c6aaa
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/token_manager.cc
@@ -0,0 +1,157 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/token_manager.h"
+
+#include <grp.h>
+#include <sys/types.h>
+#include <unistd.h>
+#include <string>
+
+#include <base/files/file_path.h>
+#include <base/files/file_util.h>
+#include <base/strings/string_number_conversions.h>
+#include <base/strings/string_util.h>
+
+namespace {
+
+gid_t GetArcCameraGid() {
+ auto* group = getgrnam("arc-camera");
+ return group != nullptr ? group->gr_gid : 0;
+}
+
+bool EnsureTokenDirectoryExists(const base::FilePath& token_path) {
+ static const gid_t gid = GetArcCameraGid();
+ if (gid == 0) {
+ LOG(ERROR) << "Failed to query the GID of arc-camera";
+ return false;
+ }
+
+ base::FilePath dir_name = token_path.DirName();
+ if (!base::CreateDirectory(dir_name) ||
+ !base::SetPosixFilePermissions(dir_name, 0770)) {
+ LOG(ERROR) << "Failed to create token directory at "
+ << token_path.AsUTF8Unsafe();
+ return false;
+ }
+
+ if (chown(dir_name.AsUTF8Unsafe().c_str(), -1, gid) != 0) {
+ LOG(ERROR) << "Failed to chown token directory to arc-camera";
+ return false;
+ }
+ return true;
+}
+
+bool WriteTokenToFile(const base::FilePath& token_path,
+ const base::UnguessableToken& token) {
+ if (!EnsureTokenDirectoryExists(token_path)) {
+ LOG(ERROR) << "Failed to ensure token directory exists";
+ return false;
+ }
+ base::File token_file(
+ token_path, base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
+ if (!token_file.IsValid()) {
+ LOG(ERROR) << "Failed to create token file at "
+ << token_path.AsUTF8Unsafe();
+ return false;
+ }
+ std::string token_string = token.ToString();
+ token_file.WriteAtCurrentPos(token_string.c_str(), token_string.length());
+ return true;
+}
+
+} // namespace
+
+namespace media {
+
+constexpr char TokenManager::kServerTokenPath[];
+constexpr char TokenManager::kTestClientTokenPath[];
+constexpr std::array<cros::mojom::CameraClientType, 3>
+ TokenManager::kTrustedClientTypes;
+
+TokenManager::TokenManager() = default;
+TokenManager::~TokenManager() = default;
+
+bool TokenManager::GenerateServerToken() {
+ server_token_ = base::UnguessableToken::Create();
+ return WriteTokenToFile(base::FilePath(kServerTokenPath), server_token_);
+}
+
+bool TokenManager::GenerateTestClientToken() {
+ return WriteTokenToFile(
+ base::FilePath(kTestClientTokenPath),
+ GetTokenForTrustedClient(cros::mojom::CameraClientType::TESTING));
+}
+
+base::UnguessableToken TokenManager::GetTokenForTrustedClient(
+ cros::mojom::CameraClientType type) {
+ base::AutoLock l(client_token_map_lock_);
+ if (std::find(kTrustedClientTypes.begin(), kTrustedClientTypes.end(), type) ==
+ kTrustedClientTypes.end()) {
+ return base::UnguessableToken();
+ }
+ auto& token_set = client_token_map_[type];
+ if (token_set.empty()) {
+ token_set.insert(base::UnguessableToken::Create());
+ }
+ return *token_set.begin();
+}
+
+void TokenManager::RegisterPluginVmToken(const base::UnguessableToken& token) {
+ base::AutoLock l(client_token_map_lock_);
+ auto result =
+ client_token_map_[cros::mojom::CameraClientType::PLUGINVM].insert(token);
+ if (!result.second) {
+ LOG(WARNING) << "The same token is already registered";
+ }
+}
+
+void TokenManager::UnregisterPluginVmToken(
+ const base::UnguessableToken& token) {
+ base::AutoLock l(client_token_map_lock_);
+ auto num_removed =
+ client_token_map_[cros::mojom::CameraClientType::PLUGINVM].erase(token);
+ if (num_removed != 1) {
+ LOG(WARNING) << "The token wasn't registered previously";
+ }
+}
+
+bool TokenManager::AuthenticateServer(const base::UnguessableToken& token) {
+ DCHECK(!server_token_.is_empty());
+ return server_token_ == token;
+}
+
+base::Optional<cros::mojom::CameraClientType> TokenManager::AuthenticateClient(
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token) {
+ base::AutoLock l(client_token_map_lock_);
+ if (type == cros::mojom::CameraClientType::UNKNOWN) {
+ for (const auto& client_token_map_pair : client_token_map_) {
+ const auto& token_set = client_token_map_pair.second;
+ if (token_set.find(token) != token_set.end()) {
+ return client_token_map_pair.first;
+ }
+ }
+ return base::nullopt;
+ }
+ auto& token_set = client_token_map_[type];
+ if (token_set.find(token) == token_set.end()) {
+ return base::nullopt;
+ }
+ return type;
+}
+
+void TokenManager::AssignServerTokenForTesting(
+ const base::UnguessableToken& token) {
+ server_token_ = token;
+}
+
+void TokenManager::AssignClientTokenForTesting(
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token) {
+ base::AutoLock l(client_token_map_lock_);
+ client_token_map_[type].insert(token);
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/token_manager.h b/chromium/media/capture/video/chromeos/token_manager.h
new file mode 100644
index 00000000000..c4a75ad5f9f
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/token_manager.h
@@ -0,0 +1,72 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_TOKEN_MANAGER_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_TOKEN_MANAGER_H_
+
+#include <array>
+
+#include "base/containers/flat_map.h"
+#include "base/containers/flat_set.h"
+#include "base/optional.h"
+#include "base/thread_annotations.h"
+#include "base/unguessable_token.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
+
+namespace media {
+
+class CAPTURE_EXPORT TokenManager {
+ public:
+ static constexpr char kServerTokenPath[] = "/run/camera_tokens/server/token";
+ static constexpr char kTestClientTokenPath[] =
+ "/run/camera_tokens/testing/token";
+ static constexpr std::array<cros::mojom::CameraClientType, 3>
+ kTrustedClientTypes = {cros::mojom::CameraClientType::CHROME,
+ cros::mojom::CameraClientType::ANDROID,
+ cros::mojom::CameraClientType::TESTING};
+
+ TokenManager();
+ ~TokenManager();
+
+ bool GenerateServerToken();
+
+ bool GenerateTestClientToken();
+
+ base::UnguessableToken GetTokenForTrustedClient(
+ cros::mojom::CameraClientType type);
+
+ void RegisterPluginVmToken(const base::UnguessableToken& token);
+ void UnregisterPluginVmToken(const base::UnguessableToken& token);
+
+ bool AuthenticateServer(const base::UnguessableToken& token);
+
+ // Authenticates client with the given |type| and |token|. When |type| is
+ // cros::mojom::CameraClientType::UNKNOWN, it tries to figure out the actual
+ // client type by the supplied |token|. If authentication succeeds, it returns
+ // the authenticated type of the client. If authentication fails,
+ // base::nullopt is returned.
+ base::Optional<cros::mojom::CameraClientType> AuthenticateClient(
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token);
+
+ private:
+ friend class TokenManagerTest;
+ friend class CameraHalDispatcherImplTest;
+
+ void AssignServerTokenForTesting(const base::UnguessableToken& token);
+ void AssignClientTokenForTesting(cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token);
+
+ base::UnguessableToken server_token_;
+
+ base::Lock client_token_map_lock_;
+ base::flat_map<cros::mojom::CameraClientType,
+ base::flat_set<base::UnguessableToken>>
+ client_token_map_ GUARDED_BY(client_token_map_lock_);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_TOKEN_MANAGER_H_
diff --git a/chromium/media/capture/video/chromeos/token_manager_unittest.cc b/chromium/media/capture/video/chromeos/token_manager_unittest.cc
new file mode 100644
index 00000000000..1cc351b590d
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/token_manager_unittest.cc
@@ -0,0 +1,97 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/token_manager.h"
+
+#include <string>
+
+#include "base/containers/flat_map.h"
+#include "base/files/file_util.h"
+#include "base/unguessable_token.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+class TokenManagerTest : public ::testing::Test {
+ public:
+ TokenManagerTest() {
+ // Generate server token.
+ server_token_ = base::UnguessableToken::Create();
+ manager_.AssignServerTokenForTesting(server_token_);
+
+ // Generate tokens from trusted clients.
+ for (const auto& type : TokenManager::kTrustedClientTypes) {
+ auto& token = client_token_map_[type];
+ token = base::UnguessableToken::Create();
+ manager_.AssignClientTokenForTesting(type, token);
+ }
+ }
+
+ ~TokenManagerTest() override = default;
+
+ protected:
+ TokenManager manager_;
+ base::UnguessableToken server_token_;
+ base::flat_map<cros::mojom::CameraClientType, base::UnguessableToken>
+ client_token_map_;
+};
+
+// Test that TokenManager authenticates token for CameraHalServer.
+TEST_F(TokenManagerTest, AuthenticateServer) {
+ EXPECT_TRUE(manager_.AuthenticateServer(server_token_));
+}
+
+// Test that TokenManager authenticates token for CameraHalClient.
+TEST_F(TokenManagerTest, AuthenticateClient) {
+ for (auto type : TokenManager::kTrustedClientTypes) {
+ auto authenticated_type =
+ manager_.AuthenticateClient(type, client_token_map_[type]);
+ ASSERT_TRUE(authenticated_type.has_value());
+ EXPECT_EQ(authenticated_type.value(), type);
+
+ // Verify that an empty token fails authentication.
+ authenticated_type =
+ manager_.AuthenticateClient(type, base::UnguessableToken());
+ EXPECT_FALSE(authenticated_type.has_value());
+ }
+}
+
+// Test that TokanManager authenticates token for pluginvm and the
+// authentication fails when the token is unregistered.
+TEST_F(TokenManagerTest, AuthenticatePluginvm) {
+ // Create a fake token for pluginvm.
+ auto token = base::UnguessableToken::Create();
+
+ manager_.RegisterPluginVmToken(token);
+ auto authenticated_type = manager_.AuthenticateClient(
+ cros::mojom::CameraClientType::UNKNOWN, token);
+ ASSERT_TRUE(authenticated_type.has_value());
+ EXPECT_EQ(authenticated_type.value(),
+ cros::mojom::CameraClientType::PLUGINVM);
+
+ manager_.UnregisterPluginVmToken(token);
+ authenticated_type = manager_.AuthenticateClient(
+ cros::mojom::CameraClientType::UNKNOWN, token);
+ EXPECT_FALSE(authenticated_type.has_value());
+}
+
+// Test that CameraClientType::UNKNOWN with an empty token is rejected.
+TEST_F(TokenManagerTest, AuthenticateUnknown) {
+ auto authenticated_type = manager_.AuthenticateClient(
+ cros::mojom::CameraClientType::UNKNOWN, base::UnguessableToken::Create());
+ EXPECT_FALSE(authenticated_type.has_value());
+}
+
+// Test that TokenManager::GetTokenForTrustedClient returns an empty token for
+// untrusted clients.
+TEST_F(TokenManagerTest, GetTokenForTrustedClientFailForUntrustedClients) {
+ EXPECT_TRUE(
+ manager_.GetTokenForTrustedClient(cros::mojom::CameraClientType::UNKNOWN)
+ .is_empty());
+ EXPECT_TRUE(
+ manager_.GetTokenForTrustedClient(cros::mojom::CameraClientType::PLUGINVM)
+ .is_empty());
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc
index 2ce163efb49..013b267fa77 100644
--- a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc
+++ b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc
@@ -13,7 +13,7 @@ namespace media {
VendorTagOpsDelegate::VendorTagOpsDelegate(
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
- : ipc_task_runner_(ipc_task_runner) {}
+ : ipc_task_runner_(ipc_task_runner), is_initializing_(false) {}
VendorTagOpsDelegate::~VendorTagOpsDelegate() = default;
@@ -28,17 +28,29 @@ VendorTagOpsDelegate::MakeReceiver() {
void VendorTagOpsDelegate::Initialize() {
DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+
+ base::AutoLock lock(lock_);
+ is_initializing_ = true;
vendor_tag_ops_->GetTagCount(base::BindOnce(
&VendorTagOpsDelegate::OnGotTagCount, base::Unretained(this)));
}
void VendorTagOpsDelegate::Reset() {
DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+
+ base::AutoLock lock(lock_);
vendor_tag_ops_.reset();
pending_info_.clear();
name_map_.clear();
tag_map_.clear();
initialized_.Reset();
+ is_initializing_ = false;
+}
+
+void VendorTagOpsDelegate::StopInitialization() {
+ base::AutoLock lock(lock_);
+ initialized_.Signal();
+ is_initializing_ = false;
}
void VendorTagOpsDelegate::RemovePending(uint32_t tag) {
@@ -47,7 +59,7 @@ void VendorTagOpsDelegate::RemovePending(uint32_t tag) {
DCHECK_EQ(removed, 1u);
if (pending_info_.empty()) {
DVLOG(1) << "VendorTagOpsDelegate initialized";
- initialized_.Signal();
+ StopInitialization();
}
}
@@ -55,13 +67,13 @@ void VendorTagOpsDelegate::OnGotTagCount(int32_t tag_count) {
DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
if (tag_count == -1) {
LOG(ERROR) << "Failed to get tag count";
- initialized_.Signal();
+ StopInitialization();
return;
}
if (tag_count == 0) {
// There is no vendor tag, we are done here.
- initialized_.Signal();
+ StopInitialization();
return;
}
@@ -134,6 +146,13 @@ void VendorTagOpsDelegate::OnGotTagType(uint32_t tag, int32_t type) {
const VendorTagInfo* VendorTagOpsDelegate::GetInfoByName(
const std::string& full_name) {
+ {
+ base::AutoLock lock(lock_);
+ if (!is_initializing_ && !initialized_.IsSignaled()) {
+ LOG(WARNING) << "VendorTagOps is accessed before calling Initialize()";
+ return nullptr;
+ }
+ }
initialized_.Wait();
auto it = name_map_.find(full_name);
if (it == name_map_.end()) {
@@ -144,6 +163,13 @@ const VendorTagInfo* VendorTagOpsDelegate::GetInfoByName(
const VendorTagInfo* VendorTagOpsDelegate::GetInfoByTag(
cros::mojom::CameraMetadataTag tag) {
+ {
+ base::AutoLock lock(lock_);
+ if (!is_initializing_ && !initialized_.IsSignaled()) {
+ LOG(WARNING) << "VendorTagOps is accessed before calling Initialize()";
+ return nullptr;
+ }
+ }
initialized_.Wait();
auto it = tag_map_.find(tag);
if (it == tag_map_.end()) {
diff --git a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
index 206394f3817..cd963e0cdd8 100644
--- a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
+++ b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
@@ -9,6 +9,7 @@
#include <string>
#include <vector>
+#include "base/synchronization/lock.h"
#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
@@ -41,6 +42,7 @@ class VendorTagOpsDelegate {
const VendorTagInfo* GetInfoByTag(cros::mojom::CameraMetadataTag tag);
private:
+ void StopInitialization();
void RemovePending(uint32_t tag);
void OnGotTagCount(int32_t tag_count);
@@ -63,6 +65,9 @@ class VendorTagOpsDelegate {
std::map<cros::mojom::CameraMetadataTag, VendorTagInfo> tag_map_;
base::WaitableEvent initialized_;
+
+ base::Lock lock_;
+ bool is_initializing_ GUARDED_BY(lock_);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc
new file mode 100644
index 00000000000..498542efce2
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc
@@ -0,0 +1,282 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/video_capture_device_chromeos_delegate.h"
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/location.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/threading/platform_thread.h"
+#include "base/trace_event/trace_event.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
+#include "media/capture/video/chromeos/camera_device_delegate.h"
+#include "media/capture/video/chromeos/camera_hal_delegate.h"
+#include "ui/display/display.h"
+#include "ui/display/display_observer.h"
+#include "ui/display/screen.h"
+
+namespace media {
+
+VideoCaptureDeviceChromeOSDelegate::VideoCaptureDeviceChromeOSDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ const VideoCaptureDeviceDescriptor& device_descriptor,
+ scoped_refptr<CameraHalDelegate> camera_hal_delegate,
+ base::OnceClosure cleanup_callback)
+ : device_descriptor_(device_descriptor),
+ camera_hal_delegate_(std::move(camera_hal_delegate)),
+ capture_task_runner_(base::ThreadTaskRunnerHandle::Get()),
+ camera_device_ipc_thread_(std::string("CameraDeviceIpcThread") +
+ device_descriptor.device_id),
+ screen_observer_delegate_(
+ ScreenObserverDelegate::Create(this, ui_task_runner)),
+ lens_facing_(device_descriptor.facing),
+ // External cameras have lens_facing as MEDIA_VIDEO_FACING_NONE.
+ // We don't want to rotate the frame even if the device rotates.
+ rotates_with_device_(lens_facing_ !=
+ VideoFacingMode::MEDIA_VIDEO_FACING_NONE),
+ rotation_(0),
+ cleanup_callback_(std::move(cleanup_callback)),
+ device_closed_(base::WaitableEvent::ResetPolicy::MANUAL,
+ base::WaitableEvent::InitialState::NOT_SIGNALED) {
+ // TODO(b/175168296): Hook power manager client on LaCrOS.
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ power_manager_client_proxy_ = base::MakeRefCounted<PowerManagerClientProxy>();
+ power_manager_client_proxy_->Init(
+ weak_ptr_factory_.GetWeakPtr(), "VideoCaptureDeviceChromeOSDelegate",
+ capture_task_runner_, std::move(ui_task_runner));
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+}
+
+VideoCaptureDeviceChromeOSDelegate::~VideoCaptureDeviceChromeOSDelegate() {}
+
+void VideoCaptureDeviceChromeOSDelegate::Shutdown() {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ if (!HasDeviceClient()) {
+ DCHECK(!camera_device_ipc_thread_.IsRunning());
+ screen_observer_delegate_->RemoveObserver();
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ power_manager_client_proxy_->Shutdown();
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ std::move(cleanup_callback_).Run();
+ }
+}
+
+bool VideoCaptureDeviceChromeOSDelegate::HasDeviceClient() {
+ return device_context_ && device_context_->HasClient();
+}
+
+void VideoCaptureDeviceChromeOSDelegate::AllocateAndStart(
+ const VideoCaptureParams& params,
+ std::unique_ptr<VideoCaptureDevice::Client> client,
+ ClientType client_type) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ DCHECK(!camera_device_delegate_);
+ if (!HasDeviceClient()) {
+ TRACE_EVENT0("camera", "Start Device");
+ if (!camera_device_ipc_thread_.Start()) {
+ std::string error_msg = "Failed to start device thread";
+ LOG(ERROR) << error_msg;
+ client->OnError(
+ media::VideoCaptureError::kCrosHalV3FailedToStartDeviceThread,
+ FROM_HERE, error_msg);
+ return;
+ }
+
+ device_context_ = std::make_unique<CameraDeviceContext>();
+ if (device_context_->AddClient(client_type, std::move(client))) {
+ capture_params_[client_type] = params;
+ camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
+ device_descriptor_, camera_hal_delegate_,
+ camera_device_ipc_thread_.task_runner());
+ OpenDevice();
+ }
+ CameraAppDeviceBridgeImpl::GetInstance()->OnVideoCaptureDeviceCreated(
+ device_descriptor_.device_id, camera_device_ipc_thread_.task_runner());
+ } else {
+ if (device_context_->AddClient(client_type, std::move(client))) {
+ capture_params_[client_type] = params;
+ ReconfigureStreams();
+ }
+ }
+}
+
+void VideoCaptureDeviceChromeOSDelegate::StopAndDeAllocate(
+ ClientType client_type) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ DCHECK(camera_device_delegate_);
+ device_context_->RemoveClient(client_type);
+ if (!HasDeviceClient()) {
+ CloseDevice();
+ CameraAppDeviceBridgeImpl::GetInstance()->OnVideoCaptureDeviceClosing(
+ device_descriptor_.device_id);
+ camera_device_ipc_thread_.Stop();
+ camera_device_delegate_.reset();
+ device_context_.reset();
+ }
+}
+
+void VideoCaptureDeviceChromeOSDelegate::TakePhoto(
+ VideoCaptureDevice::TakePhotoCallback callback) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ DCHECK(camera_device_delegate_);
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&CameraDeviceDelegate::TakePhoto,
+ camera_device_delegate_->GetWeakPtr(),
+ std::move(callback)));
+}
+
+void VideoCaptureDeviceChromeOSDelegate::GetPhotoState(
+ VideoCaptureDevice::GetPhotoStateCallback callback) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&CameraDeviceDelegate::GetPhotoState,
+ camera_device_delegate_->GetWeakPtr(),
+ std::move(callback)));
+}
+
+void VideoCaptureDeviceChromeOSDelegate::SetPhotoOptions(
+ mojom::PhotoSettingsPtr settings,
+ VideoCaptureDevice::SetPhotoOptionsCallback callback) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&CameraDeviceDelegate::SetPhotoOptions,
+ camera_device_delegate_->GetWeakPtr(),
+ std::move(settings), std::move(callback)));
+}
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+
+void VideoCaptureDeviceChromeOSDelegate::SuspendDone() {
+ OpenDevice();
+}
+
+void VideoCaptureDeviceChromeOSDelegate::SuspendImminent() {
+ CloseDevice();
+}
+
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+void VideoCaptureDeviceChromeOSDelegate::OpenDevice() {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+
+ if (!camera_device_delegate_) {
+ return;
+ }
+ // It's safe to pass unretained |device_context_| here since
+ // VideoCaptureDeviceChromeOSDelegate owns |camera_device_delegate_| and makes
+ // sure |device_context_| outlives |camera_device_delegate_|.
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::AllocateAndStart,
+ camera_device_delegate_->GetWeakPtr(), capture_params_,
+ base::Unretained(device_context_.get())));
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::SetRotation,
+ camera_device_delegate_->GetWeakPtr(), rotation_));
+}
+
+void VideoCaptureDeviceChromeOSDelegate::CloseDevice() {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+
+ if (!camera_device_delegate_) {
+ return;
+ }
+ // We do our best to allow the camera HAL cleanly shut down the device. In
+ // general we don't trust the camera HAL so if the device does not close in
+ // time we simply terminate the Mojo channel by resetting
+ // |camera_device_delegate_|.
+ //
+ // VideoCaptureDeviceChromeOSDelegate owns both |camera_device_delegate_| and
+ // |device_closed_| and it stops |camera_device_ipc_thread_| in
+ // StopAndDeAllocate, so it's safe to pass |device_closed_| as unretained in
+ // the callback.
+ device_closed_.Reset();
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&CameraDeviceDelegate::StopAndDeAllocate,
+ camera_device_delegate_->GetWeakPtr(),
+ base::BindOnce(
+ [](base::WaitableEvent* device_closed) {
+ device_closed->Signal();
+ },
+ base::Unretained(&device_closed_))));
+ base::TimeDelta kWaitTimeoutSecs = base::TimeDelta::FromSeconds(3);
+ device_closed_.TimedWait(kWaitTimeoutSecs);
+}
+
+void VideoCaptureDeviceChromeOSDelegate::ReconfigureStreams() {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ DCHECK(camera_device_delegate_);
+
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::ReconfigureStreams,
+ camera_device_delegate_->GetWeakPtr(), capture_params_));
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::SetRotation,
+ camera_device_delegate_->GetWeakPtr(), rotation_));
+}
+
+void VideoCaptureDeviceChromeOSDelegate::SetDisplayRotation(
+ const display::Display& display) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ if (display.IsInternal())
+ SetRotation(display.rotation() * 90);
+}
+
+void VideoCaptureDeviceChromeOSDelegate::SetRotation(int rotation) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ if (!rotates_with_device_) {
+ rotation = 0;
+ } else if (lens_facing_ == VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT) {
+ // Original frame when |rotation| = 0
+ // -----------------------
+ // | * |
+ // | * * |
+ // | * * |
+ // | ******* |
+ // | * * |
+ // | * * |
+ // -----------------------
+ //
+ // |rotation| = 90, this is what back camera sees
+ // -----------------------
+ // | ******** |
+ // | * **** |
+ // | * *** |
+ // | * *** |
+ // | * **** |
+ // | ******** |
+ // -----------------------
+ //
+ // |rotation| = 90, this is what front camera sees
+ // -----------------------
+ // | ******** |
+ // | **** * |
+ // | *** * |
+ // | *** * |
+ // | **** * |
+ // | ******** |
+ // -----------------------
+ //
+ // Therefore, for back camera, we need to rotate (360 - |rotation|).
+ rotation = (360 - rotation) % 360;
+ }
+ rotation_ = rotation;
+ if (camera_device_ipc_thread_.IsRunning()) {
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::SetRotation,
+ camera_device_delegate_->GetWeakPtr(), rotation_));
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.h b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.h
new file mode 100644
index 00000000000..0e4a6a31b5f
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.h
@@ -0,0 +1,133 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_VIDEO_CAPTURE_DEVICE_CHROMEOS_DELEGATE_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_VIDEO_CAPTURE_DEVICE_CHROMEOS_DELEGATE_H_
+
+#include <memory>
+
+#include "base/containers/flat_map.h"
+#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/synchronization/lock.h"
+#include "base/threading/thread.h"
+#include "media/capture/video/chromeos/camera_device_context.h"
+#include "media/capture/video/chromeos/display_rotation_observer.h"
+#include "media/capture/video/video_capture_device.h"
+#include "media/capture/video/video_capture_device_descriptor.h"
+#include "media/capture/video_capture_types.h"
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+#include "media/capture/video/chromeos/ash/power_manager_client_proxy.h"
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+namespace display {
+
+class Display;
+
+} // namespace display
+
+namespace media {
+
+class CameraHalDelegate;
+class CameraDeviceDelegate;
+
+// Implementation of delegate for ChromeOS with CrOS camera HALv3.
+class CAPTURE_EXPORT VideoCaptureDeviceChromeOSDelegate final
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ : public DisplayRotationObserver,
+ public PowerManagerClientProxy::Observer {
+#else
+ : public DisplayRotationObserver {
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ public:
+ VideoCaptureDeviceChromeOSDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ const VideoCaptureDeviceDescriptor& device_descriptor,
+ scoped_refptr<CameraHalDelegate> camera_hal_delegate,
+ base::OnceClosure cleanup_callback);
+
+ ~VideoCaptureDeviceChromeOSDelegate();
+ void Shutdown();
+ bool HasDeviceClient();
+
+ void AllocateAndStart(const VideoCaptureParams& params,
+ std::unique_ptr<VideoCaptureDevice::Client> client,
+ ClientType client_type);
+ void StopAndDeAllocate(ClientType client_type);
+ void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback);
+ void GetPhotoState(VideoCaptureDevice::GetPhotoStateCallback callback);
+ void SetPhotoOptions(mojom::PhotoSettingsPtr settings,
+ VideoCaptureDevice::SetPhotoOptionsCallback callback);
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // Implementation of PowerManagerClientProxy::Observer.
+ void SuspendDone() final;
+ void SuspendImminent() final;
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ void OpenDevice();
+ void CloseDevice();
+
+ private:
+ void ReconfigureStreams();
+
+ // DisplayRotationDelegate implementation.
+ void SetDisplayRotation(const display::Display& display) final;
+ void SetRotation(int rotation);
+
+ const VideoCaptureDeviceDescriptor device_descriptor_;
+
+ // A reference to the CameraHalDelegate instance in the VCD factory. This is
+ // used by AllocateAndStart to query camera info and create the camera device.
+ const scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
+
+ // A reference to the thread that all the VideoCaptureDevice interface methods
+ // are expected to be called on.
+ const scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner_;
+
+ // The thread that all the Mojo operations of |camera_device_delegate_| take
+ // place. Started in AllocateAndStart and stopped in StopAndDeAllocate, where
+ // the access to the base::Thread methods are sequenced on
+ // |capture_task_runner_|.
+ base::Thread camera_device_ipc_thread_;
+
+ // Map client type to VideoCaptureParams.
+ base::flat_map<ClientType, VideoCaptureParams> capture_params_;
+
+ // |device_context_| is created and owned by
+ // VideoCaptureDeviceChromeOSDelegate and is only accessed by
+ // |camera_device_delegate_|.
+ std::unique_ptr<CameraDeviceContext> device_context_;
+
+ // Internal delegate doing the actual capture setting, buffer allocation and
+ // circulation with the camera HAL. Created in AllocateAndStart and deleted in
+ // StopAndDeAllocate on |capture_task_runner_|. All methods of
+ // |camera_device_delegate_| operate on |camera_device_ipc_thread_|.
+ std::unique_ptr<CameraDeviceDelegate> camera_device_delegate_;
+
+ scoped_refptr<ScreenObserverDelegate> screen_observer_delegate_;
+ const VideoFacingMode lens_facing_;
+ // Whether the incoming frames should rotate when the device rotates.
+ const bool rotates_with_device_;
+ int rotation_;
+
+ base::OnceClosure cleanup_callback_;
+
+ base::WaitableEvent device_closed_;
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ scoped_refptr<PowerManagerClientProxy> power_manager_client_proxy_;
+
+ base::WeakPtrFactory<PowerManagerClientProxy::Observer> weak_ptr_factory_{
+ this};
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceChromeOSDelegate);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_VIDEO_CAPTURE_DEVICE_CHROMEOS_DELEGATE_H_
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
index 07f2996154d..d7dc818b102 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
@@ -1,309 +1,53 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
+// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "base/bind.h"
-#include "base/callback_helpers.h"
-#include "base/location.h"
-#include "base/synchronization/waitable_event.h"
-#include "base/threading/platform_thread.h"
-#include "base/trace_event/trace_event.h"
-#include "chromeos/dbus/power/power_manager_client.h"
-#include "media/base/bind_to_current_loop.h"
-#include "media/capture/video/chromeos/camera_device_context.h"
-#include "media/capture/video/chromeos/camera_device_delegate.h"
-#include "media/capture/video/chromeos/camera_hal_delegate.h"
-#include "ui/display/display.h"
-#include "ui/display/display_observer.h"
-#include "ui/display/screen.h"
+#include "base/strings/string_util.h"
+#include "media/capture/video/chromeos/video_capture_device_chromeos_delegate.h"
namespace media {
-class VideoCaptureDeviceChromeOSHalv3::PowerManagerClientProxy
- : public base::RefCountedThreadSafe<PowerManagerClientProxy>,
- public chromeos::PowerManagerClient::Observer {
- public:
- PowerManagerClientProxy() = default;
-
- void Init(base::WeakPtr<VideoCaptureDeviceChromeOSHalv3> device,
- scoped_refptr<base::SingleThreadTaskRunner> device_task_runner,
- scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner) {
- device_ = std::move(device);
- device_task_runner_ = std::move(device_task_runner);
- dbus_task_runner_ = std::move(dbus_task_runner);
-
- dbus_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&PowerManagerClientProxy::InitOnDBusThread, this));
- }
-
- void Shutdown() {
- dbus_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&PowerManagerClientProxy::ShutdownOnDBusThread, this));
- }
-
- void UnblockSuspend(const base::UnguessableToken& unblock_suspend_token) {
- dbus_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&PowerManagerClientProxy::UnblockSuspendOnDBusThread,
- this, unblock_suspend_token));
- }
-
- private:
- friend class base::RefCountedThreadSafe<PowerManagerClientProxy>;
-
- ~PowerManagerClientProxy() override = default;
-
- void InitOnDBusThread() {
- DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
- chromeos::PowerManagerClient::Get()->AddObserver(this);
- }
-
- void ShutdownOnDBusThread() {
- DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
- chromeos::PowerManagerClient::Get()->RemoveObserver(this);
- }
-
- void UnblockSuspendOnDBusThread(
- const base::UnguessableToken& unblock_suspend_token) {
- DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
- chromeos::PowerManagerClient::Get()->UnblockSuspend(unblock_suspend_token);
- }
-
- // chromeos::PowerManagerClient::Observer:
- void SuspendImminent(power_manager::SuspendImminent::Reason reason) final {
- auto token = base::UnguessableToken::Create();
- chromeos::PowerManagerClient::Get()->BlockSuspend(
- token, "VideoCaptureDeviceChromeOSHalv3");
- device_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&VideoCaptureDeviceChromeOSHalv3::CloseDevice,
- device_, token));
- }
-
- void SuspendDone(const base::TimeDelta& sleep_duration) final {
- device_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&VideoCaptureDeviceChromeOSHalv3::OpenDevice, device_));
- }
-
- base::WeakPtr<VideoCaptureDeviceChromeOSHalv3> device_;
- scoped_refptr<base::SingleThreadTaskRunner> device_task_runner_;
- scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(PowerManagerClientProxy);
-};
+constexpr char kVirtualPrefix[] = "VIRTUAL_";
VideoCaptureDeviceChromeOSHalv3::VideoCaptureDeviceChromeOSHalv3(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- const VideoCaptureDeviceDescriptor& device_descriptor,
- scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- CameraAppDeviceImpl* camera_app_device,
- base::OnceClosure cleanup_callback)
- : device_descriptor_(device_descriptor),
- camera_hal_delegate_(std::move(camera_hal_delegate)),
- capture_task_runner_(base::ThreadTaskRunnerHandle::Get()),
- camera_device_ipc_thread_(std::string("CameraDeviceIpcThread") +
- device_descriptor.device_id),
- screen_observer_delegate_(
- ScreenObserverDelegate::Create(this, ui_task_runner)),
- lens_facing_(device_descriptor.facing),
- // External cameras have lens_facing as MEDIA_VIDEO_FACING_NONE.
- // We don't want to rotate the frame even if the device rotates.
- rotates_with_device_(lens_facing_ !=
- VideoFacingMode::MEDIA_VIDEO_FACING_NONE),
- rotation_(0),
- camera_app_device_(camera_app_device),
- cleanup_callback_(std::move(cleanup_callback)),
- power_manager_client_proxy_(
- base::MakeRefCounted<PowerManagerClientProxy>()),
- client_type_(ClientType::kPreviewClient) {
- power_manager_client_proxy_->Init(weak_ptr_factory_.GetWeakPtr(),
- capture_task_runner_,
- std::move(ui_task_runner));
+ VideoCaptureDeviceChromeOSDelegate* delegate,
+ const VideoCaptureDeviceDescriptor& vcd_descriptor)
+ : vcd_delegate_(delegate) {
+ client_type_ = base::StartsWith(vcd_descriptor.device_id, kVirtualPrefix)
+ ? ClientType::kVideoClient
+ : ClientType::kPreviewClient;
}
VideoCaptureDeviceChromeOSHalv3::~VideoCaptureDeviceChromeOSHalv3() {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- DCHECK(!camera_device_ipc_thread_.IsRunning());
- screen_observer_delegate_->RemoveObserver();
- power_manager_client_proxy_->Shutdown();
- std::move(cleanup_callback_).Run();
+ vcd_delegate_->Shutdown();
}
// VideoCaptureDevice implementation.
void VideoCaptureDeviceChromeOSHalv3::AllocateAndStart(
const VideoCaptureParams& params,
std::unique_ptr<Client> client) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- DCHECK(!camera_device_delegate_);
- TRACE_EVENT0("camera", "Start Device");
- if (!camera_device_ipc_thread_.Start()) {
- std::string error_msg = "Failed to start device thread";
- LOG(ERROR) << error_msg;
- client->OnError(
- media::VideoCaptureError::kCrosHalV3FailedToStartDeviceThread,
- FROM_HERE, error_msg);
- return;
- }
- capture_params_ = params;
- device_context_ = std::make_unique<CameraDeviceContext>();
- if (device_context_->AddClient(client_type_, std::move(client))) {
- camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
- device_descriptor_, camera_hal_delegate_,
- camera_device_ipc_thread_.task_runner(), camera_app_device_,
- client_type_);
- OpenDevice();
- }
+ vcd_delegate_->AllocateAndStart(params, std::move(client), client_type_);
}
void VideoCaptureDeviceChromeOSHalv3::StopAndDeAllocate() {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
-
- if (!camera_device_delegate_) {
- return;
- }
- CloseDevice(base::UnguessableToken());
- camera_device_ipc_thread_.Stop();
- camera_device_delegate_.reset();
- device_context_->RemoveClient(client_type_);
- device_context_.reset();
+ vcd_delegate_->StopAndDeAllocate(client_type_);
}
void VideoCaptureDeviceChromeOSHalv3::TakePhoto(TakePhotoCallback callback) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- DCHECK(camera_device_delegate_);
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&CameraDeviceDelegate::TakePhoto,
- camera_device_delegate_->GetWeakPtr(),
- std::move(callback)));
+ vcd_delegate_->TakePhoto(std::move(callback));
}
void VideoCaptureDeviceChromeOSHalv3::GetPhotoState(
GetPhotoStateCallback callback) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&CameraDeviceDelegate::GetPhotoState,
- camera_device_delegate_->GetWeakPtr(),
- std::move(callback)));
+ vcd_delegate_->GetPhotoState(std::move(callback));
}
void VideoCaptureDeviceChromeOSHalv3::SetPhotoOptions(
mojom::PhotoSettingsPtr settings,
SetPhotoOptionsCallback callback) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&CameraDeviceDelegate::SetPhotoOptions,
- camera_device_delegate_->GetWeakPtr(),
- std::move(settings), std::move(callback)));
-}
-
-void VideoCaptureDeviceChromeOSHalv3::OpenDevice() {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
-
- if (!camera_device_delegate_) {
- return;
- }
- // It's safe to pass unretained |device_context_| here since
- // VideoCaptureDeviceChromeOSHalv3 owns |camera_device_delegate_| and makes
- // sure |device_context_| outlives |camera_device_delegate_|.
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraDeviceDelegate::AllocateAndStart,
- camera_device_delegate_->GetWeakPtr(), capture_params_,
- base::Unretained(device_context_.get())));
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraDeviceDelegate::SetRotation,
- camera_device_delegate_->GetWeakPtr(), rotation_));
-}
-
-void VideoCaptureDeviceChromeOSHalv3::CloseDevice(
- base::UnguessableToken unblock_suspend_token) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
-
- if (!camera_device_delegate_) {
- return;
- }
- // We do our best to allow the camera HAL cleanly shut down the device. In
- // general we don't trust the camera HAL so if the device does not close in
- // time we simply terminate the Mojo channel by resetting
- // |camera_device_delegate_|.
- base::WaitableEvent device_closed(
- base::WaitableEvent::ResetPolicy::MANUAL,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&CameraDeviceDelegate::StopAndDeAllocate,
- camera_device_delegate_->GetWeakPtr(),
- base::BindOnce(
- [](base::WaitableEvent* device_closed) {
- device_closed->Signal();
- },
- base::Unretained(&device_closed))));
- base::TimeDelta kWaitTimeoutSecs = base::TimeDelta::FromSeconds(3);
- device_closed.TimedWait(kWaitTimeoutSecs);
- if (!unblock_suspend_token.is_empty())
- power_manager_client_proxy_->UnblockSuspend(unblock_suspend_token);
-}
-
-void VideoCaptureDeviceChromeOSHalv3::SetDisplayRotation(
- const display::Display& display) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- if (display.IsInternal())
- SetRotation(display.rotation() * 90);
-}
-
-void VideoCaptureDeviceChromeOSHalv3::SetRotation(int rotation) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- if (!rotates_with_device_) {
- rotation = 0;
- } else if (lens_facing_ == VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT) {
- // Original frame when |rotation| = 0
- // -----------------------
- // | * |
- // | * * |
- // | * * |
- // | ******* |
- // | * * |
- // | * * |
- // -----------------------
- //
- // |rotation| = 90, this is what back camera sees
- // -----------------------
- // | ******** |
- // | * **** |
- // | * *** |
- // | * *** |
- // | * **** |
- // | ******** |
- // -----------------------
- //
- // |rotation| = 90, this is what front camera sees
- // -----------------------
- // | ******** |
- // | **** * |
- // | *** * |
- // | *** * |
- // | **** * |
- // | ******** |
- // -----------------------
- //
- // Therefore, for back camera, we need to rotate (360 - |rotation|).
- rotation = (360 - rotation) % 360;
- }
- rotation_ = rotation;
- if (camera_device_ipc_thread_.IsRunning()) {
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraDeviceDelegate::SetRotation,
- camera_device_delegate_->GetWeakPtr(), rotation_));
- }
+ vcd_delegate_->SetPhotoOptions(std::move(settings), std::move(callback));
}
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
index 9f5a03ef70a..5c5188a82e5 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
@@ -1,4 +1,4 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
+// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -7,39 +7,21 @@
#include <memory>
-#include "base/macros.h"
-#include "base/memory/weak_ptr.h"
-#include "base/single_thread_task_runner.h"
-#include "base/threading/thread.h"
#include "media/capture/video/chromeos/camera_device_context.h"
-#include "media/capture/video/chromeos/display_rotation_observer.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video/video_capture_device_descriptor.h"
-#include "media/capture/video_capture_types.h"
-
-namespace display {
-
-class Display;
-
-} // namespace display
namespace media {
-class CameraAppDeviceImpl;
-class CameraHalDelegate;
-class CameraDeviceDelegate;
+class VideoCaptureDeviceChromeOSDelegate;
// Implementation of VideoCaptureDevice for ChromeOS with CrOS camera HALv3.
class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
- : public VideoCaptureDevice,
- public DisplayRotationObserver {
+ : public VideoCaptureDevice {
public:
VideoCaptureDeviceChromeOSHalv3(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- const VideoCaptureDeviceDescriptor& device_descriptor,
- scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- CameraAppDeviceImpl* camera_app_device,
- base::OnceClosure cleanup_callback);
+ VideoCaptureDeviceChromeOSDelegate* delegate,
+ const VideoCaptureDeviceDescriptor& vcd_descriptor);
~VideoCaptureDeviceChromeOSHalv3() final;
@@ -53,60 +35,10 @@ class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
SetPhotoOptionsCallback callback) final;
private:
- // Helper to interact with PowerManagerClient on DBus original thread.
- class PowerManagerClientProxy;
-
- void OpenDevice();
- void CloseDevice(base::UnguessableToken unblock_suspend_token);
-
- // DisplayRotationDelegate implementation.
- void SetDisplayRotation(const display::Display& display) final;
- void SetRotation(int rotation);
-
- const VideoCaptureDeviceDescriptor device_descriptor_;
-
- // A reference to the CameraHalDelegate instance in the VCD factory. This is
- // used by AllocateAndStart to query camera info and create the camera device.
- const scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
-
- // A reference to the thread that all the VideoCaptureDevice interface methods
- // are expected to be called on.
- const scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner_;
+ VideoCaptureDeviceChromeOSDelegate* vcd_delegate_;
- // The thread that all the Mojo operations of |camera_device_delegate_| take
- // place. Started in AllocateAndStart and stopped in StopAndDeAllocate, where
- // the access to the base::Thread methods are sequenced on
- // |capture_task_runner_|.
- base::Thread camera_device_ipc_thread_;
-
- VideoCaptureParams capture_params_;
- // |device_context_| is created and owned by VideoCaptureDeviceChromeOSHalv3
- // and is only accessed by |camera_device_delegate_|.
- std::unique_ptr<CameraDeviceContext> device_context_;
-
- // Internal delegate doing the actual capture setting, buffer allocation and
- // circulation with the camera HAL. Created in AllocateAndStart and deleted in
- // StopAndDeAllocate on |capture_task_runner_|. All methods of
- // |camera_device_delegate_| operate on |camera_device_ipc_thread_|.
- std::unique_ptr<CameraDeviceDelegate> camera_device_delegate_;
-
- scoped_refptr<ScreenObserverDelegate> screen_observer_delegate_;
- const VideoFacingMode lens_facing_;
- // Whether the incoming frames should rotate when the device rotates.
- const bool rotates_with_device_;
- int rotation_;
-
- CameraAppDeviceImpl* camera_app_device_; // Weak.
-
- base::OnceClosure cleanup_callback_;
-
- scoped_refptr<PowerManagerClientProxy> power_manager_client_proxy_;
-
- // The client type in CameraDeviceContext.
ClientType client_type_;
- base::WeakPtrFactory<VideoCaptureDeviceChromeOSHalv3> weak_ptr_factory_{this};
-
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceChromeOSHalv3);
};
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
index 6c0862e3cea..687bed919fe 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
@@ -9,7 +9,10 @@
#include "base/memory/ptr_util.h"
#include "media/base/bind_to_current_loop.h"
#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
namespace media {
@@ -20,17 +23,18 @@ gpu::GpuMemoryBufferManager* g_gpu_buffer_manager = nullptr;
} // namespace
VideoCaptureDeviceFactoryChromeOS::VideoCaptureDeviceFactoryChromeOS(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
- CameraAppDeviceBridgeImpl* camera_app_device_bridge)
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer)
: task_runner_for_screen_observer_(task_runner_for_screen_observer),
camera_hal_ipc_thread_("CameraHalIpcThread"),
- camera_app_device_bridge_(camera_app_device_bridge),
initialized_(Init()) {}
VideoCaptureDeviceFactoryChromeOS::~VideoCaptureDeviceFactoryChromeOS() {
- if (camera_app_device_bridge_) {
- camera_app_device_bridge_->UnsetCameraInfoGetter();
- }
+ CameraAppDeviceBridgeImpl::GetInstance()->UnsetCameraInfoGetter();
+
+ auto* camera_app_device_bridge = CameraAppDeviceBridgeImpl::GetInstance();
+ camera_app_device_bridge->UnsetCameraInfoGetter();
+ camera_app_device_bridge->UnsetVirtualDeviceController();
+
camera_hal_delegate_->Reset();
camera_hal_ipc_thread_.Stop();
}
@@ -43,8 +47,7 @@ VideoCaptureDeviceFactoryChromeOS::CreateDevice(
return std::unique_ptr<VideoCaptureDevice>();
}
return camera_hal_delegate_->CreateDevice(task_runner_for_screen_observer_,
- device_descriptor,
- camera_app_device_bridge_);
+ device_descriptor);
}
void VideoCaptureDeviceFactoryChromeOS::GetDevicesInfo(
@@ -76,27 +79,30 @@ bool VideoCaptureDeviceFactoryChromeOS::Init() {
return false;
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
if (!CameraHalDispatcherImpl::GetInstance()->IsStarted()) {
LOG(ERROR) << "CameraHalDispatcherImpl is not started";
return false;
}
-
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
camera_hal_delegate_ =
new CameraHalDelegate(camera_hal_ipc_thread_.task_runner());
- camera_hal_delegate_->RegisterCameraClient();
-
- // Since the |camera_hal_delegate_| is initialized on the constructor of this
- // object and is destroyed after |camera_app_device_bridge_| unsetting its
- // reference, it is safe to use base::Unretained() here.
- if (camera_app_device_bridge_) {
- camera_app_device_bridge_->SetCameraInfoGetter(
- base::BindRepeating(&CameraHalDelegate::GetCameraInfoFromDeviceId,
- base::Unretained(camera_hal_delegate_.get())));
+
+ if (!camera_hal_delegate_->RegisterCameraClient()) {
+ LOG(ERROR) << "Failed to register camera client";
+ return false;
}
- return true;
-}
-bool VideoCaptureDeviceFactoryChromeOS::IsSupportedCameraAppDeviceBridge() {
+ // Since we will unset camera info getter and virtual device controller before
+ // invalidate |camera_hal_delegate_| in the destructor, it should be safe to
+ // use base::Unretained() here.
+ auto* camera_app_device_bridge = CameraAppDeviceBridgeImpl::GetInstance();
+ camera_app_device_bridge->SetCameraInfoGetter(
+ base::BindRepeating(&CameraHalDelegate::GetCameraInfoFromDeviceId,
+ base::Unretained(camera_hal_delegate_.get())));
+ camera_app_device_bridge->SetVirtualDeviceController(
+ base::BindRepeating(&CameraHalDelegate::EnableVirtualDevice,
+ base::Unretained(camera_hal_delegate_.get())));
return true;
}
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
index efa7c10bdf4..970c199e4d2 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
@@ -16,8 +16,6 @@
namespace media {
-class CameraAppDeviceBridgeImpl;
-
using MojoMjpegDecodeAcceleratorFactoryCB = base::RepeatingCallback<void(
mojo::PendingReceiver<chromeos_camera::mojom::MjpegDecodeAccelerator>)>;
@@ -26,8 +24,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
public:
explicit VideoCaptureDeviceFactoryChromeOS(
scoped_refptr<base::SingleThreadTaskRunner>
- task_runner_for_screen_observer,
- CameraAppDeviceBridgeImpl* camera_app_device_bridge);
+ task_runner_for_screen_observer);
~VideoCaptureDeviceFactoryChromeOS() override;
@@ -36,8 +33,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
const VideoCaptureDeviceDescriptor& device_descriptor) final;
void GetDevicesInfo(GetDevicesInfoCallback callback) override;
- bool IsSupportedCameraAppDeviceBridge() override;
-
static gpu::GpuMemoryBufferManager* GetBufferManager();
static void SetGpuBufferManager(gpu::GpuMemoryBufferManager* buffer_manager);
@@ -59,8 +54,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
// |camera_hal_ipc_thread_|.
scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
- CameraAppDeviceBridgeImpl* camera_app_device_bridge_; // Weak.
-
bool initialized_;
base::WeakPtrFactory<VideoCaptureDeviceFactoryChromeOS> weak_ptr_factory_{
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h
index 34ce0dba2cc..59850884926 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h
@@ -26,12 +26,9 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoder {
// decode error.
};
- using DecodeDoneCB = base::RepeatingCallback<void(
- int buffer_id,
- int frame_feedback_id,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::
- ScopedAccessPermission> buffer_read_permission,
- mojom::VideoFrameInfoPtr frame_info)>;
+ using DecodeDoneCB =
+ base::RepeatingCallback<void(ReadyFrameInBuffer,
+ std::vector<ReadyFrameInBuffer>)>;
virtual ~VideoCaptureJpegDecoder() {}
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
index c272d15e92b..6b16b2cc2fb 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
@@ -25,15 +25,16 @@ VideoCaptureJpegDecoderImpl::VideoCaptureJpegDecoderImpl(
decode_done_cb_(std::move(decode_done_cb)),
send_log_message_cb_(std::move(send_log_message_cb)),
has_received_decoded_frame_(false),
+ decoder_status_(INIT_PENDING),
next_task_id_(0),
- task_id_(chromeos_camera::MjpegDecodeAccelerator::kInvalidTaskId),
- decoder_status_(INIT_PENDING) {}
+ task_id_(chromeos_camera::MjpegDecodeAccelerator::kInvalidTaskId) {}
VideoCaptureJpegDecoderImpl::~VideoCaptureJpegDecoderImpl() {
DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
}
void VideoCaptureJpegDecoderImpl::Initialize() {
+ base::AutoLock lock(lock_);
if (!IsVideoCaptureAcceleratedJpegDecodingEnabled()) {
decoder_status_ = FAILED;
RecordInitDecodeUMA_Locked();
@@ -130,8 +131,8 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
out_frame->BackWithOwnedSharedMemory(std::move(out_region),
std::move(out_mapping));
- out_frame->metadata()->frame_rate = frame_format.frame_rate;
- out_frame->metadata()->reference_time = reference_time;
+ out_frame->metadata().frame_rate = frame_format.frame_rate;
+ out_frame->metadata().reference_time = reference_time;
media::mojom::VideoFrameInfoPtr out_frame_info =
media::mojom::VideoFrameInfo::New();
@@ -139,14 +140,17 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
out_frame_info->pixel_format = media::PIXEL_FORMAT_I420;
out_frame_info->coded_size = dimensions;
out_frame_info->visible_rect = gfx::Rect(dimensions);
- out_frame_info->metadata = *(out_frame->metadata());
+ out_frame_info->metadata = out_frame->metadata();
out_frame_info->color_space = out_frame->ColorSpace();
{
base::AutoLock lock(lock_);
decode_done_closure_ = base::BindOnce(
- decode_done_cb_, out_buffer.id, out_buffer.frame_feedback_id,
- std::move(out_buffer.access_permission), std::move(out_frame_info));
+ decode_done_cb_,
+ ReadyFrameInBuffer(out_buffer.id, out_buffer.frame_feedback_id,
+ std::move(out_buffer.access_permission),
+ std::move(out_frame_info)),
+ std::vector<ReadyFrameInBuffer>());
}
// base::Unretained is safe because |decoder_| is deleted on
@@ -236,6 +240,7 @@ bool VideoCaptureJpegDecoderImpl::IsDecoding_Locked() const {
}
void VideoCaptureJpegDecoderImpl::RecordInitDecodeUMA_Locked() {
+ lock_.AssertAcquired();
UMA_HISTOGRAM_BOOLEAN("Media.VideoCaptureGpuJpegDecoder.InitDecodeSuccess",
decoder_status_ == INIT_PASSED);
}
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
index 7bb0296b25a..fbc3a41d95e 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
@@ -87,11 +87,10 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
const base::RepeatingCallback<void(const std::string&)> send_log_message_cb_;
bool has_received_decoded_frame_;
- // Guards |decode_done_closure_| and |decoder_status_|.
- mutable base::Lock lock_;
-
// The closure of |decode_done_cb_| with bound parameters.
- base::OnceClosure decode_done_closure_;
+ mutable base::Lock lock_;
+ STATUS decoder_status_ GUARDED_BY(lock_);
+ base::OnceClosure decode_done_closure_ GUARDED_BY(lock_);
// Next id for input BitstreamBuffer.
int32_t next_task_id_;
@@ -104,8 +103,6 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
base::UnsafeSharedMemoryRegion in_shared_region_;
base::WritableSharedMemoryMapping in_shared_mapping_;
- STATUS decoder_status_;
-
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<VideoCaptureJpegDecoderImpl> weak_ptr_factory_{this};
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.cc b/chromium/media/capture/video/create_video_capture_device_factory.cc
index bab72603630..435ede8bd3b 100644
--- a/chromium/media/capture/video/create_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/create_video_capture_device_factory.cc
@@ -11,10 +11,9 @@
#include "media/capture/video/fake_video_capture_device_factory.h"
#include "media/capture/video/file_video_capture_device_factory.h"
-#if defined(OS_LINUX) || BUILDFLAG(IS_LACROS)
+#if defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
-#elif BUILDFLAG(IS_ASH)
-#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
@@ -57,11 +56,10 @@ CreateFakeVideoCaptureDeviceFactory() {
}
}
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
std::unique_ptr<VideoCaptureDeviceFactory>
CreateChromeOSVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- media::CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
// On Chrome OS we have to support two use cases:
//
// 1. For devices that have the camera HAL v3 service running on Chrome OS,
@@ -72,21 +70,20 @@ CreateChromeOSVideoCaptureDeviceFactory(
// some special devices that may never be able to implement a camera HAL
// v3.
if (ShouldUseCrosCameraService()) {
- return std::make_unique<VideoCaptureDeviceFactoryChromeOS>(
- ui_task_runner, camera_app_device_bridge);
+ return std::make_unique<VideoCaptureDeviceFactoryChromeOS>(ui_task_runner);
} else {
return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
}
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
std::unique_ptr<VideoCaptureDeviceFactory>
CreatePlatformSpecificVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
-#if defined(OS_LINUX) || BUILDFLAG(IS_LACROS)
+#if defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
-#elif BUILDFLAG(IS_ASH)
- return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner, {});
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
+ return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner);
#elif defined(OS_WIN)
return std::make_unique<VideoCaptureDeviceFactoryWin>();
#elif defined(OS_MAC)
@@ -115,20 +112,4 @@ std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
}
}
-#if BUILDFLAG(IS_ASH)
-std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- media::CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
- auto fake_device_factory = CreateFakeVideoCaptureDeviceFactory();
- if (fake_device_factory) {
- return fake_device_factory;
- } else {
- // |ui_task_runner| is needed for the Linux ChromeOS factory to retrieve
- // screen rotations.
- return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner,
- camera_app_device_bridge);
- }
-}
-#endif // BUILDFLAG(IS_ASH)
-
} // namespace media
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.h b/chromium/media/capture/video/create_video_capture_device_factory.h
index 9a9c85d756c..5e8a77090c8 100644
--- a/chromium/media/capture/video/create_video_capture_device_factory.h
+++ b/chromium/media/capture/video/create_video_capture_device_factory.h
@@ -14,19 +14,10 @@
namespace media {
-class CameraAppDeviceBridgeImpl;
-
std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
-#if BUILDFLAG(IS_ASH)
-std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
-CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- media::CameraAppDeviceBridgeImpl* camera_app_device_bridge);
-#endif // BUILDFLAG(IS_ASH)
-
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_CREATE_VIDEO_CAPTURE_DEVICE_FACTORY_H_
diff --git a/chromium/media/capture/video/fake_video_capture_device.cc b/chromium/media/capture/video/fake_video_capture_device.cc
index aef12478e12..b3892136940 100644
--- a/chromium/media/capture/video/fake_video_capture_device.cc
+++ b/chromium/media/capture/video/fake_video_capture_device.cc
@@ -162,6 +162,7 @@ gfx::ColorSpace GetDefaultColorSpace(VideoPixelFormat format) {
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_BGRA:
+ case PIXEL_FORMAT_RGBAF16:
return gfx::ColorSpace::CreateSRGB();
case PIXEL_FORMAT_UNKNOWN:
return gfx::ColorSpace();
diff --git a/chromium/media/capture/video/file_video_capture_device_factory.cc b/chromium/media/capture/video/file_video_capture_device_factory.cc
index d899c509444..29fd7c77a0e 100644
--- a/chromium/media/capture/video/file_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/file_video_capture_device_factory.cc
@@ -8,6 +8,7 @@
#include "base/files/file_path.h"
#include "base/strings/sys_string_conversions.h"
#include "base/threading/scoped_blocking_call.h"
+#include "base/threading/thread_restrictions.h"
#include "build/build_config.h"
#include "media/base/media_switches.h"
#include "media/capture/video/file_video_capture_device.h"
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc b/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
index 538f01d2a1e..1c42b270512 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
@@ -208,8 +208,11 @@ void VideoCaptureDeviceFactoryFuchsia::OnWatchDevicesResult(
}
if (it->second->is_pending()) {
// If the device info request was still pending then consider it
- // complete now.
- OnDeviceInfoFetched();
+ // complete now. If this was the only device in pending state then all
+ // callbacks will be resolved in
+ // MaybeResolvePendingDeviceInfoCallbacks() called below.
+ DCHECK_GT(num_pending_device_info_requests_, 0U);
+ num_pending_device_info_requests_--;
}
devices_->erase(it);
continue;
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
index 2f892b7fdbc..2692ac2a3ca 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
@@ -113,7 +113,8 @@ bool VideoCaptureDeviceFuchsia::IsSupportedPixelFormat(
}
VideoCaptureDeviceFuchsia::VideoCaptureDeviceFuchsia(
- fidl::InterfaceHandle<fuchsia::camera3::Device> device) {
+ fidl::InterfaceHandle<fuchsia::camera3::Device> device)
+ : sysmem_allocator_("CrVideoCaptureDeviceFuchsia") {
device_.Bind(std::move(device));
device_.set_error_handler(
fit::bind_member(this, &VideoCaptureDeviceFuchsia::OnDeviceError));
@@ -263,6 +264,11 @@ void VideoCaptureDeviceFuchsia::InitializeBufferCollection(
SysmemBufferReader::GetRecommendedConstraints(
kMaxUsedOutputFrames,
/*min_buffer_size=*/base::nullopt);
+ // This is not an actual device driver, so the priority should be > 1. It's
+ // also not a high-level system, so the name should be < 100.
+ constexpr uint32_t kNamePriority = 10;
+ buffer_collection_creator_->SetName(kNamePriority,
+ "CrVideoCaptureDeviceFuchsia");
buffer_collection_creator_->Create(
std::move(constraints),
base::BindOnce(&VideoCaptureDeviceFuchsia::OnBufferCollectionCreated,
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
index 849fa5f8086..61ae93f0ad6 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
@@ -163,9 +163,8 @@ class TestVideoCaptureClient : public VideoCaptureDevice::Client {
NOTREACHED();
}
void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) override {
NOTREACHED();
diff --git a/chromium/media/capture/video/gpu_memory_buffer_utils.cc b/chromium/media/capture/video/gpu_memory_buffer_utils.cc
index 2d1117a8501..2cc0655ff76 100644
--- a/chromium/media/capture/video/gpu_memory_buffer_utils.cc
+++ b/chromium/media/capture/video/gpu_memory_buffer_utils.cc
@@ -62,7 +62,7 @@ VideoCaptureDevice::Client::ReserveResult AllocateNV12GpuMemoryBuffer(
*out_gpu_memory_buffer = gmb_support->CreateGpuMemoryBufferImplFromHandle(
out_capture_buffer->handle_provider->GetGpuMemoryBufferHandle(),
buffer_size, kOpaqueGfxFormat,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
base::NullCallback());
return reserve_result;
}
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
index 242aef391fd..185ca779fd4 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
@@ -66,6 +66,7 @@ struct {
size_t num_planes;
} constexpr kSupportedFormatsAndPlanarity[] = {
{V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420, 1},
+ {V4L2_PIX_FMT_NV12, PIXEL_FORMAT_NV12, 1},
{V4L2_PIX_FMT_Y16, PIXEL_FORMAT_Y16, 1},
{V4L2_PIX_FMT_Z16, PIXEL_FORMAT_Y16, 1},
{V4L2_PIX_FMT_INVZ, PIXEL_FORMAT_Y16, 1},
@@ -264,10 +265,18 @@ void V4L2CaptureDelegate::AllocateAndStart(
ResetUserAndCameraControlsToDefault();
+ // In theory, checking for CAPTURE/OUTPUT in caps.capabilities should only
+ // be done if V4L2_CAP_DEVICE_CAPS is not set. However, this was not done
+ // in the past and it is unclear if it breaks with existing devices. And if
+ // a device is accepted incorrectly then it will not have any usable
+ // formats and is skipped anyways.
v4l2_capability cap = {};
if (!(DoIoctl(VIDIOC_QUERYCAP, &cap) == 0 &&
- ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
- !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)))) {
+ (((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
+ !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) ||
+ ((cap.capabilities & V4L2_CAP_DEVICE_CAPS) &&
+ (cap.device_caps & V4L2_CAP_VIDEO_CAPTURE) &&
+ !(cap.device_caps & V4L2_CAP_VIDEO_OUTPUT))))) {
device_fd_.reset();
SetErrorState(VideoCaptureError::kV4L2ThisIsNotAV4L2VideoCaptureDevice,
FROM_HERE, "This is not a V4L2 video capture device");
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
index 48bcb6e168f..677d733460e 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
@@ -28,7 +28,7 @@
#include <linux/videodev2.h>
#endif
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/linux/camera_config_chromeos.h"
#include "media/capture/video/linux/video_capture_device_chromeos.h"
#endif
@@ -53,7 +53,7 @@ const char kPidPathTemplate[] = "/sys/class/video4linux/%s/device/../idProduct";
const char kInterfacePathTemplate[] =
"/sys/class/video4linux/%s/device/interface";
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
static CameraConfigChromeOS* GetCameraConfig() {
static CameraConfigChromeOS* config = new CameraConfigChromeOS();
return config;
@@ -125,7 +125,7 @@ class DevVideoFilePathsDeviceProvider
VideoFacingMode GetCameraFacing(const std::string& device_id,
const std::string& model_id) override {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return GetCameraConfig()->GetCameraFacing(device_id, model_id);
#else
NOTREACHED();
@@ -135,7 +135,7 @@ class DevVideoFilePathsDeviceProvider
int GetOrientation(const std::string& device_id,
const std::string& model_id) override {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return GetCameraConfig()->GetOrientation(device_id, model_id);
#else
NOTREACHED();
@@ -166,7 +166,7 @@ std::unique_ptr<VideoCaptureDevice>
VideoCaptureDeviceFactoryLinux::CreateDevice(
const VideoCaptureDeviceDescriptor& device_descriptor) {
DCHECK(thread_checker_.CalledOnValidThread());
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
ChromeOSDeviceCameraConfig camera_config(
device_provider_->GetCameraFacing(device_descriptor.device_id,
device_descriptor.model_id),
@@ -210,10 +210,18 @@ void VideoCaptureDeviceFactoryLinux::GetDevicesInfo(
// one supported capture format. Devices that have capture and output
// capabilities at the same time are memory-to-memory and are skipped, see
// http://crbug.com/139356.
+ // In theory, checking for CAPTURE/OUTPUT in caps.capabilities should only
+ // be done if V4L2_CAP_DEVICE_CAPS is not set. However, this was not done
+ // in the past and it is unclear if it breaks with existing devices. And if
+ // a device is accepted incorrectly then it will not have any usable
+ // formats and is skipped anyways.
v4l2_capability cap;
if ((DoIoctl(fd.get(), VIDIOC_QUERYCAP, &cap) == 0) &&
- (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE &&
- !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) &&
+ ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE &&
+ !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) ||
+ (cap.capabilities & V4L2_CAP_DEVICE_CAPS &&
+ cap.device_caps & V4L2_CAP_VIDEO_CAPTURE &&
+ !(cap.device_caps & V4L2_CAP_VIDEO_OUTPUT))) &&
HasUsableFormats(fd.get(), cap.capabilities)) {
const std::string model_id =
device_provider_->GetDeviceModelId(unique_id);
@@ -223,19 +231,25 @@ void VideoCaptureDeviceFactoryLinux::GetDevicesInfo(
display_name = reinterpret_cast<char*>(cap.card);
VideoFacingMode facing_mode =
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
device_provider_->GetCameraFacing(unique_id, model_id);
#else
VideoFacingMode::MEDIA_VIDEO_FACING_NONE;
#endif
+ VideoCaptureFormats supported_formats;
+ GetSupportedFormatsForV4L2BufferType(fd.get(), &supported_formats);
+ if (supported_formats.empty()) {
+ DVLOG(1) << "No supported formats: " << unique_id;
+ continue;
+ }
+
devices_info.emplace_back(VideoCaptureDeviceDescriptor(
display_name, unique_id, model_id,
VideoCaptureApi::LINUX_V4L2_SINGLE_PLANE, GetControlSupport(fd.get()),
VideoCaptureTransportType::OTHER_TRANSPORT, facing_mode));
- GetSupportedFormatsForV4L2BufferType(
- fd.get(), &devices_info.back().supported_formats);
+ devices_info.back().supported_formats = std::move(supported_formats);
}
}
diff --git a/chromium/media/capture/video/mac/DEPS b/chromium/media/capture/video/mac/DEPS
index 577e795b73b..b17486db0f9 100644
--- a/chromium/media/capture/video/mac/DEPS
+++ b/chromium/media/capture/video/mac/DEPS
@@ -2,3 +2,9 @@ include_rules = [
"+third_party/decklink",
"+services/video_capture/public/uma",
]
+
+specific_include_rules = {
+"video_capture_metrics_mac_unittest.mm": [
+ "+third_party/ocmock"
+]
+}
diff --git a/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm b/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
index 55befbd1574..bf0b561b96c 100644
--- a/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
@@ -137,6 +137,36 @@ TEST(PixelBufferPoolTest,
IOSurfaceGetID(second_buffer_io_surface));
}
+TEST(PixelBufferPoolTest, RecreatePoolAndObserveRecycledIOSurfaceID) {
+ constexpr size_t kPoolMaxBuffers = 1;
+ std::unique_ptr<PixelBufferPool> pool = PixelBufferPool::Create(
+ kPixelFormatNv12, kVgaWidth, kVgaHeight, kPoolMaxBuffers);
+ base::ScopedCFTypeRef<CVPixelBufferRef> first_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(first_buffer);
+ IOSurfaceID first_buffer_id =
+ IOSurfaceGetID(CVPixelBufferGetIOSurface(first_buffer));
+
+ // Free references and recreate the pool. There is nothing preventing the
+ // IOSurfaceID from being recycled, even by a different CVPixelBufferPool with
+ // a different resolution!
+ first_buffer.reset();
+ pool = PixelBufferPool::Create(kPixelFormatNv12, kVgaWidth / 2,
+ kVgaHeight / 2, kPoolMaxBuffers);
+
+ base::ScopedCFTypeRef<CVPixelBufferRef> second_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(second_buffer);
+ IOSurfaceID second_buffer_id =
+ IOSurfaceGetID(CVPixelBufferGetIOSurface(second_buffer));
+
+ // The new pool is allowed to recycle the old IOSurface ID.
+ //
+ // This test documents "foot gun" behavior that is not documented by Apple
+ // anywhere. If the test starts failing, it may be because this behavior is
+ // specific to version or hardware. In such cases, feel free to disable the
+ // test.
+ EXPECT_EQ(first_buffer_id, second_buffer_id);
+}
+
TEST(PixelBufferPoolTest, BuffersCanOutliveThePool) {
std::unique_ptr<PixelBufferPool> pool =
PixelBufferPool::Create(kPixelFormatNv12, kVgaWidth, kVgaHeight, 1);
diff --git a/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm
index fa00dfca33d..ea1334d2506 100644
--- a/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm
@@ -8,6 +8,7 @@
#include <vector>
#include "base/logging.h"
+#include "build/build_config.h"
#include "media/capture/video/mac/pixel_buffer_pool_mac.h"
#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
@@ -59,7 +60,14 @@ TEST(PixelBufferTransfererTest, CanCopyYuvsAndVerifyColor) {
kColorR, kColorG, kColorB));
}
-TEST(PixelBufferTransfererTest, CanScaleYuvsAndVerifyColor) {
+#if defined(ARCH_CPU_ARM64)
+// Bulk-disabled as part of arm64 bot stabilization: https://crbug.com/1154345
+#define MAYBE_CanScaleYuvsAndVerifyColor DISABLED_CanScaleYuvsAndVerifyColor
+#else
+#define MAYBE_CanScaleYuvsAndVerifyColor CanScaleYuvsAndVerifyColor
+#endif
+
+TEST(PixelBufferTransfererTest, MAYBE_CanScaleYuvsAndVerifyColor) {
constexpr OSType kPixelFormat = kPixelFormatYuvs;
constexpr int kSourceWidth = 32;
constexpr int kSourceHeight = 32;
@@ -115,7 +123,16 @@ TEST(PixelBufferTransfererTest, CanScaleYuvsAndVerifyCheckerPattern) {
EXPECT_EQ(num_tiles_across_y, kSourceNumTilesAcross);
}
-TEST(PixelBufferTransfererTest, CanStretchYuvsAndVerifyCheckerPattern) {
+#if defined(ARCH_CPU_ARM64)
+// Bulk-disabled as part of arm64 bot stabilization: https://crbug.com/1154345
+#define MAYBE_CanStretchYuvsAndVerifyCheckerPattern \
+ DISABLED_CanStretchYuvsAndVerifyCheckerPattern
+#else
+#define MAYBE_CanStretchYuvsAndVerifyCheckerPattern \
+ CanStretchYuvsAndVerifyCheckerPattern
+#endif
+
+TEST(PixelBufferTransfererTest, MAYBE_CanStretchYuvsAndVerifyCheckerPattern) {
// Note: The ARGB -> YUVS -> ARGB conversions results in a small loss of
// information, so for the checker pattern to be intact the buffer can't be
// tiny (e.g. 4x4).
@@ -149,7 +166,14 @@ TEST(PixelBufferTransfererTest, CanStretchYuvsAndVerifyCheckerPattern) {
EXPECT_EQ(num_tiles_across_y, kSourceNumTilesAcross);
}
-TEST(PixelBufferTransfererTest, CanStretchYuvsAndVerifyColor) {
+#if defined(ARCH_CPU_ARM64)
+// Bulk-disabled as part of arm64 bot stabilization: https://crbug.com/1154345
+#define MAYBE_CanStretchYuvsAndVerifyColor DISABLED_CanStretchYuvsAndVerifyColor
+#else
+#define MAYBE_CanStretchYuvsAndVerifyColor CanStretchYuvsAndVerifyColor
+#endif
+
+TEST(PixelBufferTransfererTest, MAYBE_CanStretchYuvsAndVerifyColor) {
constexpr OSType kPixelFormat = kPixelFormatYuvs;
constexpr int kSourceWidth = 32;
constexpr int kSourceHeight = 32;
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc
index daa96cb01b0..043381c1b6f 100644
--- a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc
@@ -18,17 +18,8 @@ namespace media {
const base::Feature kInCaptureConvertToNv12{"InCaptureConvertToNv12",
base::FEATURE_ENABLED_BY_DEFAULT};
-const base::Feature kInCaptureConvertToNv12WithPixelTransfer{
- "InCaptureConvertToNv12WithPixelTransfer",
- base::FEATURE_DISABLED_BY_DEFAULT};
-
-const base::Feature kInCaptureConvertToNv12WithLibyuv{
- "InCaptureConvertToNv12WithLibyuv", base::FEATURE_DISABLED_BY_DEFAULT};
-
namespace {
-constexpr size_t kDefaultBufferPoolSize = 10;
-
// NV12 a.k.a. 420v
constexpr OSType kPixelFormatNv12 =
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
@@ -139,6 +130,24 @@ struct NV12Planes {
size_t uv_plane_stride;
};
+// TODO(eshr): Move this to libyuv.
+void CopyNV12(const uint8_t* src_y,
+ int src_y_stride,
+ const uint8_t* src_uv,
+ int src_uv_stride,
+ uint8_t* dst_y,
+ int dst_y_stride,
+ uint8_t* dst_uv,
+ int dst_uv_stride,
+ int width,
+ int height) {
+ libyuv::CopyPlane(src_y, src_y_stride, dst_y, dst_y_stride, width, height);
+ size_t half_width = (width + 1) >> 1;
+ size_t half_height = (height + 1) >> 1;
+ libyuv::CopyPlane(src_uv, src_uv_stride, dst_uv, dst_uv_stride,
+ half_width * 2, half_height);
+}
+
size_t GetContiguousNV12BufferSize(size_t width, size_t height) {
gfx::Size dimensions(width, height);
return VideoFrame::PlaneSize(PIXEL_FORMAT_NV12, VideoFrame::kYPlane,
@@ -204,44 +213,34 @@ bool ConvertFromMjpegToI420(uint8_t* source_buffer_base_address,
return result == 0;
}
-// Returns true on success. Converting uncompressed pixel formats should never
-// fail, however MJPEG frames produces by some webcams have been observed to be
-// invalid in special circumstances (see https://crbug.com/1147867). To support
-// a graceful failure path in this case, this function may return false.
-bool ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
- const I420Planes& destination) {
+void ConvertFromAnyToNV12(CVPixelBufferRef source_pixel_buffer,
+ const NV12Planes& destination) {
auto pixel_format = CVPixelBufferGetPixelFormatType(source_pixel_buffer);
+ int ret;
switch (pixel_format) {
// UYVY a.k.a. 2vuy
case kCVPixelFormatType_422YpCbCr8: {
const uint8_t* src_uyvy = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddress(source_pixel_buffer));
size_t src_stride_uyvy = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
- return libyuv::UYVYToI420(
- src_uyvy, src_stride_uyvy, destination.y_plane_data,
- destination.y_plane_stride, destination.u_plane_data,
- destination.u_plane_stride, destination.v_plane_data,
- destination.v_plane_stride, destination.width,
- destination.height) == 0;
+ ret = libyuv::UYVYToNV12(
+ src_uyvy, src_stride_uyvy, destination.y_plane_data,
+ destination.y_plane_stride, destination.uv_plane_data,
+ destination.uv_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
}
// YUY2 a.k.a. yuvs
case kCMPixelFormat_422YpCbCr8_yuvs: {
const uint8_t* src_yuy2 = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddress(source_pixel_buffer));
size_t src_stride_yuy2 = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
- return libyuv::YUY2ToI420(
- src_yuy2, src_stride_yuy2, destination.y_plane_data,
- destination.y_plane_stride, destination.u_plane_data,
- destination.u_plane_stride, destination.v_plane_data,
- destination.v_plane_stride, destination.width,
- destination.height) == 0;
- }
- // MJPEG a.k.a. dmb1
- case kCMVideoCodecType_JPEG_OpenDML: {
- uint8_t* src_jpg = static_cast<uint8_t*>(
- CVPixelBufferGetBaseAddress(source_pixel_buffer));
- size_t src_jpg_size = CVPixelBufferGetDataSize(source_pixel_buffer);
- return ConvertFromMjpegToI420(src_jpg, src_jpg_size, destination);
+ ret = libyuv::YUY2ToNV12(
+ src_yuy2, src_stride_yuy2, destination.y_plane_data,
+ destination.y_plane_stride, destination.uv_plane_data,
+ destination.uv_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
}
// NV12 a.k.a. 420v
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
@@ -255,12 +254,11 @@ bool ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 1));
size_t src_stride_uv =
CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 1);
- return libyuv::NV12ToI420(
- src_y, src_stride_y, src_uv, src_stride_uv,
- destination.y_plane_data, destination.y_plane_stride,
- destination.u_plane_data, destination.u_plane_stride,
- destination.v_plane_data, destination.v_plane_stride,
- destination.width, destination.height) == 0;
+ CopyNV12(src_y, src_stride_y, src_uv, src_stride_uv,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.uv_plane_data, destination.uv_plane_stride,
+ destination.width, destination.height);
+ return;
}
// I420 a.k.a. y420
case kCVPixelFormatType_420YpCbCr8Planar: {
@@ -278,32 +276,98 @@ bool ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 2));
size_t src_stride_v =
CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 2);
- return libyuv::I420Copy(
- src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
- destination.y_plane_data, destination.y_plane_stride,
- destination.u_plane_data, destination.u_plane_stride,
- destination.v_plane_data, destination.v_plane_stride,
- destination.width, destination.height) == 0;
+ ret = libyuv::I420ToNV12(
+ src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.uv_plane_data, destination.uv_plane_stride,
+ destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
}
default:
NOTREACHED() << "Pixel format " << pixel_format << " not supported.";
}
- return false;
}
-void ConvertFromI420ToNV12(const I420Planes& source,
- const NV12Planes& destination) {
- DCHECK_EQ(source.width, destination.width);
- DCHECK_EQ(source.height, destination.height);
- int result = libyuv::I420ToNV12(
- source.y_plane_data, source.y_plane_stride, source.u_plane_data,
- source.u_plane_stride, source.v_plane_data, source.v_plane_stride,
- destination.y_plane_data, destination.y_plane_stride,
- destination.uv_plane_data, destination.uv_plane_stride, source.width,
- source.height);
- // A webcam has never been observed to produce invalid uncompressed pixel
- // buffer, so we do not support a graceful failure path in this case.
- DCHECK_EQ(result, 0);
+void ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
+ const I420Planes& destination) {
+ auto pixel_format = CVPixelBufferGetPixelFormatType(source_pixel_buffer);
+ int ret;
+ switch (pixel_format) {
+ // UYVY a.k.a. 2vuy
+ case kCVPixelFormatType_422YpCbCr8: {
+ const uint8_t* src_uyvy = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddress(source_pixel_buffer));
+ size_t src_stride_uyvy = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
+ ret = libyuv::UYVYToI420(
+ src_uyvy, src_stride_uyvy, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
+ }
+ // YUY2 a.k.a. yuvs
+ case kCMPixelFormat_422YpCbCr8_yuvs: {
+ const uint8_t* src_yuy2 = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddress(source_pixel_buffer));
+ size_t src_stride_yuy2 = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
+ ret = libyuv::YUY2ToI420(
+ src_yuy2, src_stride_yuy2, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
+ }
+ // NV12 a.k.a. 420v
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ DCHECK(CVPixelBufferIsPlanar(source_pixel_buffer));
+ DCHECK_EQ(2u, CVPixelBufferGetPlaneCount(source_pixel_buffer));
+ const uint8_t* src_y = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 0));
+ size_t src_stride_y =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 0);
+ const uint8_t* src_uv = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 1));
+ size_t src_stride_uv =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 1);
+ ret = libyuv::NV12ToI420(
+ src_y, src_stride_y, src_uv, src_stride_uv, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
+ }
+ // I420 a.k.a. y420
+ case kCVPixelFormatType_420YpCbCr8Planar: {
+ DCHECK(CVPixelBufferIsPlanar(source_pixel_buffer));
+ DCHECK_EQ(3u, CVPixelBufferGetPlaneCount(source_pixel_buffer));
+ const uint8_t* src_y = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 0));
+ size_t src_stride_y =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 0);
+ const uint8_t* src_u = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 1));
+ size_t src_stride_u =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 1);
+ const uint8_t* src_v = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 2));
+ size_t src_stride_v =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 2);
+ ret = libyuv::I420Copy(
+ src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.u_plane_data, destination.u_plane_stride,
+ destination.v_plane_data, destination.v_plane_stride,
+ destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
+ }
+ default:
+ NOTREACHED() << "Pixel format " << pixel_format << " not supported.";
+ }
}
// Returns true on success. MJPEG frames produces by some webcams have been
@@ -361,44 +425,40 @@ void ScaleNV12(const NV12Planes& source, const NV12Planes& destination) {
void CopyNV12(const NV12Planes& source, const NV12Planes& destination) {
DCHECK_EQ(source.width, destination.width);
DCHECK_EQ(source.height, destination.height);
- libyuv::CopyPlane(source.y_plane_data, source.y_plane_stride,
- destination.y_plane_data, destination.y_plane_stride,
- destination.width, destination.height);
- size_t half_width = (destination.width + 1) >> 1;
- size_t half_height = (destination.height + 1) >> 1;
- libyuv::CopyPlane(source.uv_plane_data, source.uv_plane_stride,
- destination.uv_plane_data, destination.uv_plane_stride,
- half_width * 2, half_height);
+ CopyNV12(source.y_plane_data, source.y_plane_stride, source.uv_plane_data,
+ source.uv_plane_stride, destination.y_plane_data,
+ destination.y_plane_stride, destination.uv_plane_data,
+ destination.uv_plane_stride, source.width, source.height);
}
} // namespace
// static
-std::unique_ptr<SampleBufferTransformer>
-SampleBufferTransformer::CreateIfAutoReconfigureEnabled() {
- return IsAutoReconfigureEnabled()
- ? std::make_unique<SampleBufferTransformer>()
- : nullptr;
-}
+const SampleBufferTransformer::Transformer
+ SampleBufferTransformer::kBestTransformerForPixelBufferToNv12Output =
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer;
// static
-std::unique_ptr<SampleBufferTransformer> SampleBufferTransformer::Create() {
- return std::make_unique<SampleBufferTransformer>();
+SampleBufferTransformer::Transformer
+SampleBufferTransformer::GetBestTransformerForNv12Output(
+ CMSampleBufferRef sample_buffer) {
+ if (CVPixelBufferRef pixel_buffer =
+ CMSampleBufferGetImageBuffer(sample_buffer)) {
+ return kBestTransformerForPixelBufferToNv12Output;
+ }
+ // When we don't have a pixel buffer (e.g. it's MJPEG or we get a SW-backed
+ // byte buffer) only libyuv is able to perform the transform.
+ return Transformer::kLibyuv;
}
// static
-bool SampleBufferTransformer::IsAutoReconfigureEnabled() {
- return base::FeatureList::IsEnabled(kInCaptureConvertToNv12) ||
- base::FeatureList::IsEnabled(
- kInCaptureConvertToNv12WithPixelTransfer) ||
- base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithLibyuv);
+std::unique_ptr<SampleBufferTransformer> SampleBufferTransformer::Create() {
+ return std::make_unique<SampleBufferTransformer>();
}
SampleBufferTransformer::SampleBufferTransformer()
: transformer_(Transformer::kNotConfigured),
- destination_pixel_format_(0x0),
- destination_width_(0),
- destination_height_(0) {}
+ destination_pixel_format_(0x0) {}
SampleBufferTransformer::~SampleBufferTransformer() {}
@@ -411,26 +471,14 @@ OSType SampleBufferTransformer::destination_pixel_format() const {
return destination_pixel_format_;
}
-size_t SampleBufferTransformer::destination_width() const {
- return destination_width_;
-}
-
-size_t SampleBufferTransformer::destination_height() const {
- return destination_height_;
-}
-
-base::ScopedCFTypeRef<CVPixelBufferRef>
-SampleBufferTransformer::AutoReconfigureAndTransform(
- CMSampleBufferRef sample_buffer) {
- AutoReconfigureBasedOnInputAndFeatureFlags(sample_buffer);
- return Transform(sample_buffer);
+const gfx::Size& SampleBufferTransformer::destination_size() const {
+ return destination_size_;
}
void SampleBufferTransformer::Reconfigure(
Transformer transformer,
OSType destination_pixel_format,
- size_t destination_width,
- size_t destination_height,
+ const gfx::Size& destination_size,
base::Optional<size_t> buffer_pool_size) {
DCHECK(transformer != Transformer::kLibyuv ||
destination_pixel_format == kPixelFormatI420 ||
@@ -438,18 +486,16 @@ void SampleBufferTransformer::Reconfigure(
<< "Destination format is unsupported when running libyuv";
if (transformer_ == transformer &&
destination_pixel_format_ == destination_pixel_format &&
- destination_width_ == destination_width &&
- destination_height_ == destination_height) {
+ destination_size_ == destination_size) {
// Already configured as desired, abort.
return;
}
transformer_ = transformer;
destination_pixel_format_ = destination_pixel_format;
- destination_width_ = destination_width;
- destination_height_ = destination_height;
- destination_pixel_buffer_pool_ =
- PixelBufferPool::Create(destination_pixel_format_, destination_width_,
- destination_height_, buffer_pool_size);
+ destination_size_ = destination_size;
+ destination_pixel_buffer_pool_ = PixelBufferPool::Create(
+ destination_pixel_format_, destination_size_.width(),
+ destination_size_.height(), buffer_pool_size);
if (transformer == Transformer::kPixelBufferTransfer) {
pixel_buffer_transferer_ = std::make_unique<PixelBufferTransferer>();
} else {
@@ -459,57 +505,21 @@ void SampleBufferTransformer::Reconfigure(
intermediate_nv12_buffer_.resize(0);
}
-void SampleBufferTransformer::AutoReconfigureBasedOnInputAndFeatureFlags(
- CMSampleBufferRef sample_buffer) {
- DCHECK(IsAutoReconfigureEnabled());
- Transformer desired_transformer = Transformer::kNotConfigured;
- size_t desired_width;
- size_t desired_height;
- if (CVPixelBufferRef pixel_buffer =
- CMSampleBufferGetImageBuffer(sample_buffer)) {
- // We have a pixel buffer.
- if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12)) {
- // Pixel transfers are believed to be more efficient for X -> NV12.
- desired_transformer = Transformer::kPixelBufferTransfer;
- }
- desired_width = CVPixelBufferGetWidth(pixel_buffer);
- desired_height = CVPixelBufferGetHeight(pixel_buffer);
- } else {
- // We don't have a pixel buffer. Reconfigure to be prepared for MJPEG.
- if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12)) {
- // Only libyuv supports MJPEG -> NV12.
- desired_transformer = Transformer::kLibyuv;
- }
- CMFormatDescriptionRef format_description =
- CMSampleBufferGetFormatDescription(sample_buffer);
- CMVideoDimensions dimensions =
- CMVideoFormatDescriptionGetDimensions(format_description);
- desired_width = dimensions.width;
- desired_height = dimensions.height;
- }
- if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithPixelTransfer)) {
- desired_transformer = Transformer::kPixelBufferTransfer;
- } else if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithLibyuv)) {
- desired_transformer = Transformer::kLibyuv;
- }
- Reconfigure(desired_transformer, kPixelFormatNv12, desired_width,
- desired_height, kDefaultBufferPoolSize);
-}
-
base::ScopedCFTypeRef<CVPixelBufferRef> SampleBufferTransformer::Transform(
- CMSampleBufferRef sample_buffer) {
+ CVPixelBufferRef pixel_buffer) {
DCHECK(transformer_ != Transformer::kNotConfigured);
- CVPixelBufferRef source_pixel_buffer =
- CMSampleBufferGetImageBuffer(sample_buffer);
+ DCHECK(pixel_buffer);
// Fast path: If source and destination formats are identical, return the
// source pixel buffer.
- if (source_pixel_buffer &&
- destination_width_ == CVPixelBufferGetWidth(source_pixel_buffer) &&
- destination_height_ == CVPixelBufferGetHeight(source_pixel_buffer) &&
+ if (pixel_buffer &&
+ static_cast<size_t>(destination_size_.width()) ==
+ CVPixelBufferGetWidth(pixel_buffer) &&
+ static_cast<size_t>(destination_size_.height()) ==
+ CVPixelBufferGetHeight(pixel_buffer) &&
destination_pixel_format_ ==
- CVPixelBufferGetPixelFormatType(source_pixel_buffer) &&
- CVPixelBufferGetIOSurface(source_pixel_buffer)) {
- return base::ScopedCFTypeRef<CVPixelBufferRef>(source_pixel_buffer,
+ CVPixelBufferGetPixelFormatType(pixel_buffer) &&
+ CVPixelBufferGetIOSurface(pixel_buffer)) {
+ return base::ScopedCFTypeRef<CVPixelBufferRef>(pixel_buffer,
base::scoped_policy::RETAIN);
}
// Create destination buffer from pool.
@@ -521,10 +531,28 @@ base::ScopedCFTypeRef<CVPixelBufferRef> SampleBufferTransformer::Transform(
LOG(ERROR) << "Maximum destination buffers exceeded";
return base::ScopedCFTypeRef<CVPixelBufferRef>();
}
- if (source_pixel_buffer) {
- // Pixel buffer path. Do pixel transfer or libyuv conversion + rescale.
- TransformPixelBuffer(source_pixel_buffer, destination_pixel_buffer);
- return destination_pixel_buffer;
+ // Do pixel transfer or libyuv conversion + rescale.
+ TransformPixelBuffer(pixel_buffer, destination_pixel_buffer);
+ return destination_pixel_buffer;
+}
+
+base::ScopedCFTypeRef<CVPixelBufferRef> SampleBufferTransformer::Transform(
+ CMSampleBufferRef sample_buffer) {
+ DCHECK(transformer_ != Transformer::kNotConfigured);
+ DCHECK(sample_buffer);
+ // If the sample buffer has a pixel buffer, run the pixel buffer path instead.
+ if (CVPixelBufferRef pixel_buffer =
+ CMSampleBufferGetImageBuffer(sample_buffer)) {
+ return Transform(pixel_buffer);
+ }
+ // Create destination buffer from pool.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination_pixel_buffer =
+ destination_pixel_buffer_pool_->CreateBuffer();
+ if (!destination_pixel_buffer) {
+ // Maximum destination buffers exceeded. Old buffers are not being released
+ // (and thus not returned to the pool) in time.
+ LOG(ERROR) << "Maximum destination buffers exceeded";
+ return base::ScopedCFTypeRef<CVPixelBufferRef>();
}
// Sample buffer path - it's MJPEG. Do libyuv conversion + rescale.
if (!TransformSampleBuffer(sample_buffer, destination_pixel_buffer)) {
@@ -602,8 +630,9 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToI420(
CVPixelBufferGetPixelFormatType(source_pixel_buffer);
// Rescaling has to be done in a separate step.
- const bool rescale_needed = destination_width_ != source_width ||
- destination_height_ != source_height;
+ const bool rescale_needed =
+ static_cast<size_t>(destination_size_.width()) != source_width ||
+ static_cast<size_t>(destination_size_.height()) != source_height;
// Step 1: Convert to I420.
I420Planes i420_fullscale_buffer;
@@ -628,11 +657,7 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToI420(
i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_i420_buffer_);
}
- if (!ConvertFromAnyToI420(source_pixel_buffer, i420_fullscale_buffer)) {
- // Only MJPEG conversions are known to be able to fail. Because X is an
- // uncompressed pixel format, this conversion should never fail.
- NOTREACHED();
- }
+ ConvertFromAnyToI420(source_pixel_buffer, i420_fullscale_buffer);
}
// Step 2: Rescale I420.
@@ -653,8 +678,9 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToNV12(
CVPixelBufferGetPixelFormatType(source_pixel_buffer);
// Rescaling has to be done in a separate step.
- const bool rescale_needed = destination_width_ != source_width ||
- destination_height_ != source_height;
+ const bool rescale_needed =
+ static_cast<size_t>(destination_size_.width()) != source_width ||
+ static_cast<size_t>(destination_size_.height()) != source_height;
// Step 1: Convert to NV12.
NV12Planes nv12_fullscale_buffer;
@@ -671,21 +697,6 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToNV12(
return;
}
} else {
- // Convert X -> I420 -> NV12. (We don't know how to do X -> NV12.)
- // TODO(https://crbug.com/1154273): Convert to NV12 directly.
- I420Planes i420_fullscale_buffer;
- if (source_pixel_format == kPixelFormatI420) {
- // We are already at I420.
- i420_fullscale_buffer = GetI420PlanesFromPixelBuffer(source_pixel_buffer);
- } else {
- // Convert X -> I420.
- i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
- source_width, source_height, &intermediate_i420_buffer_);
- if (!ConvertFromAnyToI420(source_pixel_buffer, i420_fullscale_buffer)) {
- NOTREACHED();
- }
- }
- // Convert I420 -> NV12.
if (!rescale_needed) {
nv12_fullscale_buffer =
GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
@@ -693,7 +704,7 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToNV12(
nv12_fullscale_buffer = EnsureNV12BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_nv12_buffer_);
}
- ConvertFromI420ToNV12(i420_fullscale_buffer, nv12_fullscale_buffer);
+ ConvertFromAnyToNV12(source_pixel_buffer, nv12_fullscale_buffer);
}
// Step 2: Rescale NV12.
@@ -713,7 +724,7 @@ bool SampleBufferTransformer::TransformSampleBuffer(
CMSampleBufferGetFormatDescription(source_sample_buffer);
FourCharCode source_pixel_format =
CMFormatDescriptionGetMediaSubType(source_format_description);
- DCHECK(source_pixel_format == kPixelFormatMjpeg);
+ CHECK_EQ(source_pixel_format, kPixelFormatMjpeg);
CMVideoDimensions source_dimensions =
CMVideoFormatDescriptionGetDimensions(source_format_description);
@@ -759,8 +770,9 @@ bool SampleBufferTransformer::TransformSampleBufferFromMjpegToI420(
CVPixelBufferRef destination_pixel_buffer) {
DCHECK(destination_pixel_format_ == kPixelFormatI420);
// Rescaling has to be done in a separate step.
- const bool rescale_needed = destination_width_ != source_width ||
- destination_height_ != source_height;
+ const bool rescale_needed =
+ static_cast<size_t>(destination_size_.width()) != source_width ||
+ static_cast<size_t>(destination_size_.height()) != source_height;
// Step 1: Convert MJPEG -> I420.
I420Planes i420_fullscale_buffer;
@@ -793,8 +805,9 @@ bool SampleBufferTransformer::TransformSampleBufferFromMjpegToNV12(
CVPixelBufferRef destination_pixel_buffer) {
DCHECK(destination_pixel_format_ == kPixelFormatNv12);
// Rescaling has to be done in a separate step.
- const bool rescale_needed = destination_width_ != source_width ||
- destination_height_ != source_height;
+ const bool rescale_needed =
+ static_cast<size_t>(destination_size_.width()) != source_width ||
+ static_cast<size_t>(destination_size_.height()) != source_height;
// Step 1: Convert MJPEG -> NV12.
NV12Planes nv12_fullscale_buffer;
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h
index 4f9dc45bbe3..24d42cff78d 100644
--- a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h
@@ -14,28 +14,15 @@
#include "media/capture/capture_export.h"
#include "media/capture/video/mac/pixel_buffer_pool_mac.h"
#include "media/capture/video/mac/pixel_buffer_transferer_mac.h"
+#include "ui/gfx/geometry/size.h"
namespace media {
-// When enabled, AutoReconfigureAndTransform() configures the
-// SampleBufferTransformer to use the conversion path (pixel transfer or libyuv)
-// that is believed to be most efficient for the input sample buffer.
+// This flag is used to decide whether or not to use the SampleBufferTransformer
+// to convert captured images to NV12, see
+// video_capture_device_avfoundation_mac.mm.
CAPTURE_EXPORT extern const base::Feature kInCaptureConvertToNv12;
-// Feature flag used for performance measurements. This will not be shipped.
-//
-// When enabled, AutoReconfigureAndTransform() configures the
-// SampleBufferTransformer to use the pixel transfer path. Transforming an MJPEG
-// sample buffer with this configuration will DCHECK crash.
-CAPTURE_EXPORT extern const base::Feature
- kInCaptureConvertToNv12WithPixelTransfer;
-
-// Feature flag used for performance measurements. This will not be shipped.
-//
-// When enabled, AutoReconfigureAndTransform() configures the
-// SampleBufferTransformer to use the libyuv path.
-CAPTURE_EXPORT extern const base::Feature kInCaptureConvertToNv12WithLibyuv;
-
// Capable of converting from any supported capture format (NV12, YUY2, UYVY and
// MJPEG) to NV12 or I420 and doing rescaling. This class can be configured to
// use VTPixelTransferSession (sometimes HW-accelerated) or third_party/libyuv
@@ -51,37 +38,35 @@ class CAPTURE_EXPORT SampleBufferTransformer {
kLibyuv,
};
- // Only construct a sample transformer if one of the "InCaptureConvertToNv12"
- // flags are enabled and AutoReconfigureAndTransform() is supported. See
- // IsAutoReconfigureEnabled().
- static std::unique_ptr<SampleBufferTransformer>
- CreateIfAutoReconfigureEnabled();
- static std::unique_ptr<SampleBufferTransformer> Create();
+ // TODO(https://crbug.com/1175763): Make determining the optimal Transformer
+ // an implementation detail determined at Transform()-time, making
+ // Reconfigure() only care about destination resolution and pixel format. Then
+ // make it possible to override this decision explicitly but only do that for
+ // testing and measurements purposes, not in default capturer integration.
+ static const Transformer kBestTransformerForPixelBufferToNv12Output;
+ static Transformer GetBestTransformerForNv12Output(
+ CMSampleBufferRef sample_buffer);
+ static std::unique_ptr<SampleBufferTransformer> Create();
~SampleBufferTransformer();
Transformer transformer() const;
OSType destination_pixel_format() const;
- size_t destination_width() const;
- size_t destination_height() const;
-
- // Automatically reconfigures based on |sample_buffer| and base::Feature flags
- // if needed before performing a Transform().
- base::ScopedCFTypeRef<CVPixelBufferRef> AutoReconfigureAndTransform(
- CMSampleBufferRef sample_buffer);
+ const gfx::Size& destination_size() const;
// Future calls to Transform() will output pixel buffers according to this
- // configuration.
+ // configuration. Changing configuration will allocate a new buffer pool, but
+ // calling Reconfigure() multiple times with the same parameters is a NO-OP.
void Reconfigure(Transformer transformer,
OSType destination_pixel_format,
- size_t destination_width,
- size_t destination_height,
- base::Optional<size_t> buffer_pool_size);
+ const gfx::Size& destination_size,
+ base::Optional<size_t> buffer_pool_size = base::nullopt);
- // Converts the sample buffer to an IOSurface-backed pixel buffer according to
+ // Converts the input buffer to an IOSurface-backed pixel buffer according to
// current configurations. If no transformation is needed (input format is the
- // same as the configured output format), the sample buffer's pixel buffer is
- // returned.
+ // same as the configured output format), the input pixel buffer is returned.
+ base::ScopedCFTypeRef<CVPixelBufferRef> Transform(
+ CVPixelBufferRef pixel_buffer);
base::ScopedCFTypeRef<CVPixelBufferRef> Transform(
CMSampleBufferRef sample_buffer);
@@ -89,13 +74,8 @@ class CAPTURE_EXPORT SampleBufferTransformer {
friend std::unique_ptr<SampleBufferTransformer>
std::make_unique<SampleBufferTransformer>();
- static bool IsAutoReconfigureEnabled();
-
SampleBufferTransformer();
- void AutoReconfigureBasedOnInputAndFeatureFlags(
- CMSampleBufferRef sample_buffer);
-
// Sample buffers from the camera contain pixel buffers when an uncompressed
// pixel format is used (i.e. it's not MJPEG).
void TransformPixelBuffer(CVPixelBufferRef source_pixel_buffer,
@@ -132,8 +112,7 @@ class CAPTURE_EXPORT SampleBufferTransformer {
Transformer transformer_;
OSType destination_pixel_format_;
- size_t destination_width_;
- size_t destination_height_;
+ gfx::Size destination_size_;
std::unique_ptr<PixelBufferPool> destination_pixel_buffer_pool_;
// For kPixelBufferTransfer.
std::unique_ptr<PixelBufferTransferer> pixel_buffer_transferer_;
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm b/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm
index dde2a940139..3c83dd24d4c 100644
--- a/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm
@@ -7,7 +7,8 @@
#include <tuple>
#include "base/logging.h"
-#include "base/test/scoped_feature_list.h"
+#include "base/mac/scoped_cftyperef.h"
+#include "build/build_config.h"
#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -147,6 +148,116 @@ base::ScopedCFTypeRef<CVPixelBufferRef> CreatePixelBuffer(OSType pixel_format,
return pixel_buffer;
}
+enum class PixelBufferType {
+ kIoSurfaceBacked,
+ kIoSurfaceMissing,
+};
+
+void NonPlanarCvPixelBufferReleaseCallback(void* releaseRef, const void* data) {
+ free(const_cast<void*>(data));
+}
+
+void PlanarCvPixelBufferReleaseCallback(void* releaseRef,
+ const void* data,
+ size_t size,
+ size_t num_planes,
+ const void* planes[]) {
+ free(const_cast<void*>(data));
+ for (size_t plane = 0; plane < num_planes; ++plane)
+ free(const_cast<void*>(planes[plane]));
+}
+
+std::pair<uint8_t*, size_t> GetDataAndStride(CVPixelBufferRef pixel_buffer,
+ size_t plane) {
+ if (CVPixelBufferIsPlanar(pixel_buffer)) {
+ return {static_cast<uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, plane)),
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, plane)};
+ } else {
+ DCHECK_EQ(plane, 0u) << "Non-planar pixel buffers only have 1 plane.";
+ return {static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixel_buffer)),
+ CVPixelBufferGetBytesPerRow(pixel_buffer)};
+ }
+}
+
+base::ScopedCFTypeRef<CVPixelBufferRef> AddPadding(
+ CVPixelBufferRef pixel_buffer,
+ OSType pixel_format,
+ int width,
+ int height,
+ int padding) {
+ size_t num_planes = CVPixelBufferGetPlaneCount(pixel_buffer);
+ size_t padded_size = 0;
+ std::vector<size_t> plane_widths;
+ std::vector<size_t> plane_heights;
+ std::vector<size_t> plane_strides;
+ if (CVPixelBufferIsPlanar(pixel_buffer)) {
+ for (size_t plane = 0; plane < num_planes; ++plane) {
+ size_t plane_stride =
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, plane);
+ size_t padded_stride = plane_stride + padding;
+ size_t h = CVPixelBufferGetHeightOfPlane(pixel_buffer, plane);
+ size_t w = CVPixelBufferGetWidthOfPlane(pixel_buffer, plane);
+ plane_heights.push_back(h);
+ plane_widths.push_back(w);
+ plane_strides.push_back(padded_stride);
+ padded_size += h * padded_stride;
+ }
+ } else {
+ // CVPixelBufferGetPlaneCount returns 0 for non-planar buffers.
+ num_planes = 1;
+ size_t plane_stride = CVPixelBufferGetBytesPerRow(pixel_buffer);
+ size_t padded_stride = plane_stride + padding;
+ size_t h = CVPixelBufferGetHeight(pixel_buffer);
+ padded_size += h * padded_stride;
+ plane_heights.push_back(h);
+ plane_strides.push_back(padded_stride);
+ }
+ std::vector<void*> plane_address;
+ CHECK_EQ(
+ CVPixelBufferLockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly),
+ kCVReturnSuccess);
+ // Allocate and copy each plane.
+ for (size_t plane = 0; plane < num_planes; ++plane) {
+ plane_address.push_back(
+ calloc(1, plane_strides[plane] * plane_heights[plane]));
+ uint8_t* dst_ptr = static_cast<uint8_t*>(plane_address[plane]);
+ uint8_t* src_ptr;
+ size_t plane_stride;
+ std::tie(src_ptr, plane_stride) = GetDataAndStride(pixel_buffer, plane);
+ CHECK(dst_ptr);
+ CHECK(src_ptr);
+ for (size_t r = 0; r < plane_heights[plane]; ++r) {
+ memcpy(dst_ptr, src_ptr, plane_stride);
+ src_ptr += plane_stride;
+ dst_ptr += plane_strides[plane];
+ }
+ }
+ CHECK_EQ(
+ CVPixelBufferUnlockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly),
+ kCVReturnSuccess);
+
+ base::ScopedCFTypeRef<CVPixelBufferRef> padded_pixel_buffer;
+ CVReturn create_buffer_result;
+ if (CVPixelBufferIsPlanar(pixel_buffer)) {
+ // Without some memory block the callback won't be called and we leak the
+ // planar data.
+ void* descriptor = calloc(1, sizeof(CVPlanarPixelBufferInfo_YCbCrPlanar));
+ create_buffer_result = CVPixelBufferCreateWithPlanarBytes(
+ nullptr, width, height, pixel_format, descriptor, 0, num_planes,
+ plane_address.data(), plane_widths.data(), plane_heights.data(),
+ plane_strides.data(), &PlanarCvPixelBufferReleaseCallback,
+ plane_strides.data(), nullptr, padded_pixel_buffer.InitializeInto());
+ } else {
+ create_buffer_result = CVPixelBufferCreateWithBytes(
+ nullptr, width, height, pixel_format, plane_address[0],
+ plane_strides[0], &NonPlanarCvPixelBufferReleaseCallback, nullptr,
+ nullptr, padded_pixel_buffer.InitializeInto());
+ }
+ DCHECK_EQ(create_buffer_result, kCVReturnSuccess);
+ return padded_pixel_buffer;
+}
+
base::ScopedCFTypeRef<CMSampleBufferRef> CreateSampleBuffer(
OSType pixel_format,
int width,
@@ -154,19 +265,40 @@ base::ScopedCFTypeRef<CMSampleBufferRef> CreateSampleBuffer(
uint8_t r,
uint8_t g,
uint8_t b,
- bool iosurface_backed = true) {
- base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer;
- if (iosurface_backed) {
- pixel_buffer = CreatePixelBuffer(pixel_format, width, height, r, g, b);
- } else {
- CVPixelBufferCreate(nullptr, width, height, pixel_format, nullptr,
- pixel_buffer.InitializeInto());
+ PixelBufferType pixel_buffer_type,
+ size_t padding = 0) {
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer =
+ CreatePixelBuffer(pixel_format, width, height, r, g, b);
+ if (padding != 0) {
+ CHECK_EQ(pixel_buffer_type, PixelBufferType::kIoSurfaceMissing)
+ << "Padding does not work with IOSurfaces.";
+ }
+ if (pixel_buffer_type == PixelBufferType::kIoSurfaceMissing) {
+ // Our pixel buffer currently has an IOSurface. To get rid of it, we perform
+ // a pixel buffer transfer to a destination pixel buffer that is not backed
+ // by an IOSurface. The resulting pixel buffer will have the desired color.
+ base::ScopedCFTypeRef<CVPixelBufferRef> iosurfaceless_pixel_buffer;
+ CVReturn create_buffer_result =
+ CVPixelBufferCreate(nullptr, width, height, pixel_format, nullptr,
+ iosurfaceless_pixel_buffer.InitializeInto());
+ DCHECK_EQ(create_buffer_result, kCVReturnSuccess);
+ PixelBufferTransferer transferer;
+ bool success =
+ transferer.TransferImage(pixel_buffer, iosurfaceless_pixel_buffer);
+ DCHECK(success);
+ DCHECK(!CVPixelBufferGetIOSurface(iosurfaceless_pixel_buffer));
+ pixel_buffer = iosurfaceless_pixel_buffer;
+
+ if (padding > 0) {
+ pixel_buffer =
+ AddPadding(pixel_buffer, pixel_format, width, height, padding);
+ }
}
// Wrap the pixel buffer in a sample buffer.
- CMFormatDescriptionRef format_description;
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> format_description;
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(
- nil, pixel_buffer, &format_description);
+ nil, pixel_buffer, format_description.InitializeInto());
DCHECK(status == noErr);
// Dummy information to make CMSampleBufferCreateForImageBuffer() happy.
@@ -189,16 +321,24 @@ base::ScopedCFTypeRef<CMSampleBufferRef> CreateMjpegSampleBuffer(
size_t mjpeg_data_size,
size_t width,
size_t height) {
- CMBlockBufferRef data_buffer;
+ CMBlockBufferCustomBlockSource source = {0};
+ source.FreeBlock = [](void* refcon, void* doomedMemoryBlock,
+ size_t sizeInBytes) {
+ // Do nothing. The data to be released is not dynamically allocated in this
+ // test code.
+ };
+
+ base::ScopedCFTypeRef<CMBlockBufferRef> data_buffer;
OSStatus status = CMBlockBufferCreateWithMemoryBlock(
nil, const_cast<void*>(static_cast<const void*>(mjpeg_data)),
- mjpeg_data_size, nil, nil, 0, mjpeg_data_size, 0, &data_buffer);
+ mjpeg_data_size, nil, &source, 0, mjpeg_data_size, 0,
+ data_buffer.InitializeInto());
DCHECK(status == noErr);
- CMFormatDescriptionRef format_description;
- status =
- CMVideoFormatDescriptionCreate(nil, kCMVideoCodecType_JPEG_OpenDML, width,
- height, nil, &format_description);
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> format_description;
+ status = CMVideoFormatDescriptionCreate(nil, kCMVideoCodecType_JPEG_OpenDML,
+ width, height, nil,
+ format_description.InitializeInto());
DCHECK(status == noErr);
// Dummy information to make CMSampleBufferCreateReady() happy.
@@ -247,38 +387,130 @@ TEST_P(SampleBufferTransformerPixelTransferTest, CanConvertFullScale) {
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, kColorR, kColorG, kColorB);
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceBacked);
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
transformer->Reconfigure(
SampleBufferTransformer::Transformer::kPixelBufferTransfer,
- output_pixel_format, kFullResolutionWidth, kFullResolutionHeight, 1);
+ output_pixel_format,
+ gfx::Size(kFullResolutionWidth, kFullResolutionHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
-TEST_P(SampleBufferTransformerPixelTransferTest, CanConvertAndScaleDown) {
+#if defined(ARCH_CPU_ARM64)
+// Bulk-disabled for arm64 bot stabilization: https://crbug.com/1154345
+#define MAYBE_CanConvertAndScaleDown DISABLED_CanConvertAndScaleDown
+#else
+#define MAYBE_CanConvertAndScaleDown CanConvertAndScaleDown
+#endif
+
+TEST_P(SampleBufferTransformerPixelTransferTest, MAYBE_CanConvertAndScaleDown) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, kColorR, kColorG, kColorB);
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceBacked);
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
transformer->Reconfigure(
SampleBufferTransformer::Transformer::kPixelBufferTransfer,
- output_pixel_format, kScaledDownResolutionWidth,
- kScaledDownResolutionHeight, 1);
+ output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerPixelTransferTest,
+ CanConvertAndScaleDownWhenIoSurfaceIsMissing) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerPixelTransferTest,
+ CanConvertWithPaddingFullScale) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing, /*padding*/ 100);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ output_pixel_format,
+ gfx::Size(kFullResolutionWidth, kFullResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerPixelTransferTest,
+ CanConvertAndScaleWithPadding) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing, /*padding*/ 100);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionWidth,
CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionHeight,
@@ -304,37 +536,116 @@ TEST_P(SampleBufferTransformerLibyuvTest, CanConvertFullScale) {
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, kColorR, kColorG, kColorB);
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceBacked);
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
- transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, 1);
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kFullResolutionWidth, kFullResolutionHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
-TEST_P(SampleBufferTransformerLibyuvTest, CanConvertAndScaleDown) {
+TEST_P(SampleBufferTransformerLibyuvTest, MAYBE_CanConvertAndScaleDown) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, kColorR, kColorG, kColorB);
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceBacked);
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
- transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kScaledDownResolutionWidth,
- kScaledDownResolutionHeight, 1);
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerLibyuvTest, CanConvertWithPaddingFullScale) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing, /*padding*/ 100);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kFullResolutionWidth, kFullResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerLibyuvTest, CanConvertAndScaleWithPadding) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing, /*padding*/ 100);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerLibyuvTest,
+ CanConvertAndScaleDownWhenIoSurfaceIsMissing) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionWidth,
CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionHeight,
@@ -361,8 +672,8 @@ TEST_P(SampleBufferTransformerMjpegTest, CanConvertFullScale) {
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kExampleJpegWidth,
- kExampleJpegHeight, 1);
+ output_pixel_format,
+ gfx::Size(kExampleJpegWidth, kExampleJpegHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
@@ -372,16 +683,16 @@ TEST_P(SampleBufferTransformerMjpegTest, CanConvertFullScale) {
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
-TEST_P(SampleBufferTransformerMjpegTest, CanConvertAndScaleDown) {
+TEST_P(SampleBufferTransformerMjpegTest, MAYBE_CanConvertAndScaleDown) {
OSType output_pixel_format = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateExampleMjpegSampleBuffer();
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
- transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kExampleJpegScaledDownWidth,
- kExampleJpegScaledDownHeight, 1);
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kExampleJpegScaledDownWidth, kExampleJpegScaledDownHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
@@ -402,8 +713,8 @@ TEST_P(SampleBufferTransformerMjpegTest,
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kExampleJpegWidth,
- kExampleJpegHeight, 1);
+ output_pixel_format,
+ gfx::Size(kExampleJpegWidth, kExampleJpegHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
EXPECT_FALSE(output_pixel_buffer);
@@ -414,28 +725,23 @@ INSTANTIATE_TEST_SUITE_P(SampleBufferTransformerTest,
SupportedOutputFormats(),
TestParametersOSTypeToString);
-TEST(SampleBufferTransformerAutoReconfigureTest,
- AutoReconfigureIsEnabledByDefault) {
- EXPECT_TRUE(SampleBufferTransformer::CreateIfAutoReconfigureEnabled());
-}
-
-TEST(SampleBufferTransformerAutoReconfigureTest,
- SourceAndDestinationResolutionMatches) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
+ SourceAndDestinationResolutionMatches_InputSampleBuffer) {
std::unique_ptr<SampleBufferTransformer> transformer =
- SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
- ASSERT_TRUE(transformer);
+ SampleBufferTransformer::Create();
base::ScopedCFTypeRef<CMSampleBufferRef> sample0 = CreateSampleBuffer(
kPixelFormatNv12, kFullResolutionWidth, kFullResolutionHeight, kColorR,
- kColorG, kColorB, /*iosurface_backed=*/false);
+ kColorG, kColorB, PixelBufferType::kIoSurfaceMissing);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample0),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample0));
base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
- transformer->AutoReconfigureAndTransform(sample0);
+ transformer->Transform(sample0);
- EXPECT_EQ(kFullResolutionWidth, transformer->destination_width());
- EXPECT_EQ(kFullResolutionHeight, transformer->destination_height());
+ EXPECT_EQ(gfx::Size(kFullResolutionWidth, kFullResolutionHeight),
+ transformer->destination_size());
EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_buffer));
EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_buffer));
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
@@ -445,12 +751,15 @@ TEST(SampleBufferTransformerAutoReconfigureTest,
base::ScopedCFTypeRef<CMSampleBufferRef> sample1 = CreateSampleBuffer(
kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB);
+ kColorR, kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
- output_buffer = transformer->AutoReconfigureAndTransform(sample1);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample1),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample1));
+ output_buffer = transformer->Transform(sample1);
- EXPECT_EQ(kScaledDownResolutionWidth, transformer->destination_width());
- EXPECT_EQ(kScaledDownResolutionHeight, transformer->destination_height());
+ EXPECT_EQ(gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight),
+ transformer->destination_size());
EXPECT_EQ(kScaledDownResolutionWidth, CVPixelBufferGetWidth(output_buffer));
EXPECT_EQ(kScaledDownResolutionHeight, CVPixelBufferGetHeight(output_buffer));
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
@@ -458,88 +767,194 @@ TEST(SampleBufferTransformerAutoReconfigureTest,
EXPECT_EQ(output_buffer.get(), CMSampleBufferGetImageBuffer(sample1.get()));
}
-TEST(SampleBufferTransformerAutoReconfigureTest,
+// Same test as above, verifying that Transform() methods work on pixel buffers
+// directly (so that there's no need to have a sample buffer).
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
+ SourceAndDestinationResolutionMatches_InputPixelBuffer) {
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample0 = CreateSampleBuffer(
+ kPixelFormatNv12, kFullResolutionWidth, kFullResolutionHeight, kColorR,
+ kColorG, kColorB, PixelBufferType::kIoSurfaceMissing);
+ CVPixelBufferRef pixel0 = CMSampleBufferGetImageBuffer(sample0);
+ ASSERT_TRUE(pixel0);
+
+ transformer->Reconfigure(
+ SampleBufferTransformer::kBestTransformerForPixelBufferToNv12Output,
+ kPixelFormatNv12, media::GetPixelBufferSize(pixel0));
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
+ transformer->Transform(pixel0);
+
+ EXPECT_EQ(gfx::Size(kFullResolutionWidth, kFullResolutionHeight),
+ transformer->destination_size());
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_buffer));
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+ // Because pixel0 has no underlying IOSurface, it should not be returned from
+ // the transformer.
+ EXPECT_NE(output_buffer.get(), pixel0);
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample1 = CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
+ CVPixelBufferRef pixel1 = CMSampleBufferGetImageBuffer(sample1);
+ ASSERT_TRUE(pixel1);
+
+ transformer->Reconfigure(
+ SampleBufferTransformer::kBestTransformerForPixelBufferToNv12Output,
+ kPixelFormatNv12, media::GetPixelBufferSize(pixel1));
+ output_buffer = transformer->Transform(pixel1);
+
+ EXPECT_EQ(gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight),
+ transformer->destination_size());
+ EXPECT_EQ(kScaledDownResolutionWidth, CVPixelBufferGetWidth(output_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight, CVPixelBufferGetHeight(output_buffer));
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+ // Because pixel1 does have an IOSurface, it can be returned directly.
+ EXPECT_EQ(output_buffer.get(), pixel1);
+}
+
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
+ CanConvertAndScaleDown_InputPixelBuffer) {
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample_buffer = CreateSampleBuffer(
+ kPixelFormatNv12, kFullResolutionWidth, kFullResolutionHeight, kColorR,
+ kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
+ CVPixelBufferRef pixel_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
+ ASSERT_TRUE(pixel_buffer);
+
+ transformer->Reconfigure(
+ SampleBufferTransformer::kBestTransformerForPixelBufferToNv12Output,
+ kPixelFormatNv12,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight));
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
+ transformer->Transform(pixel_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth, CVPixelBufferGetWidth(output_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight, CVPixelBufferGetHeight(output_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
DestinationPixelFormatIsAlwaysNv12) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
std::unique_ptr<SampleBufferTransformer> transformer =
- SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
- ASSERT_TRUE(transformer);
+ SampleBufferTransformer::Create();
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample = CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
- transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatNv12, kScaledDownResolutionWidth,
- kScaledDownResolutionHeight, kColorR, kColorG, kColorB));
+ transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatUyvy, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatUyvy, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatYuy2, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatYuy2, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatI420, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatI420, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
- output_buffer = transformer->AutoReconfigureAndTransform(
- CreateExampleMjpegSampleBuffer());
+ sample = CreateExampleMjpegSampleBuffer();
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
}
-TEST(SampleBufferTransformerAutoReconfigureTest, UsesBestTransformerPaths) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
+ UsesBestTransformerPaths) {
std::unique_ptr<SampleBufferTransformer> transformer =
- SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
- ASSERT_TRUE(transformer);
+ SampleBufferTransformer::Create();
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample = CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
- transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatNv12, kScaledDownResolutionWidth,
- kScaledDownResolutionHeight, kColorR, kColorG, kColorB));
+ transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatUyvy, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatUyvy, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatYuy2, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatYuy2, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatI420, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatI420, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
- output_buffer = transformer->AutoReconfigureAndTransform(
- CreateExampleMjpegSampleBuffer());
+ sample = CreateExampleMjpegSampleBuffer();
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kLibyuv,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm
index c8a62dff46b..fe80ca2db2f 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm
@@ -405,6 +405,10 @@ void ExtractBaseAddressAndLength(char** base_address,
return YES;
}
+- (void)setScaledResolutions:(std::vector<gfx::Size>)resolutions {
+ // The legacy capturer does not implement in-capturer scaling.
+}
+
- (BOOL)startCapture {
DCHECK(_main_thread_checker.CalledOnValidThread());
if (!_captureSession) {
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
index 120f8c656ed..885f631591e 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
@@ -7,6 +7,7 @@
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
+#include "base/callback_forward.h"
#include "base/mac/scoped_dispatch_object.h"
#include "base/mac/scoped_nsobject.h"
@@ -19,6 +20,14 @@
namespace media {
+// When this feature is enabled, the capturer can be configured using
+// setScaledResolutions to output scaled versions of the captured frame (in
+// addition to the original frame), whenever NV12 IOSurfaces are available to
+// the capturer. These are available either when the camera supports it and
+// kAVFoundationCaptureV2ZeroCopy is enabled or when kInCaptureConvertToNv12 is
+// used to convert frames to NV12.
+CAPTURE_EXPORT extern const base::Feature kInCapturerScaling;
+
// Find the best capture format from |formats| for the specified dimensions and
// frame rate. Returns an element of |formats|, or nil.
AVCaptureDeviceFormat* CAPTURE_EXPORT
@@ -53,6 +62,13 @@ CAPTURE_EXPORT
base::Lock _lock;
media::VideoCaptureDeviceAVFoundationFrameReceiver* _frameReceiver
GUARDED_BY(_lock); // weak.
+ bool _capturedFirstFrame GUARDED_BY(_lock);
+ bool _capturedFrameSinceLastStallCheck GUARDED_BY(_lock);
+ std::unique_ptr<base::WeakPtrFactory<VideoCaptureDeviceAVFoundation>>
+ _weakPtrFactoryForStallCheck;
+
+ // Used to rate-limit crash reports for https://crbug.com/1168112.
+ bool _hasDumpedForFrameSizeMismatch;
base::scoped_nsobject<AVCaptureSession> _captureSession;
@@ -64,6 +80,12 @@ CAPTURE_EXPORT
// When enabled, converts captured frames to NV12.
std::unique_ptr<media::SampleBufferTransformer> _sampleBufferTransformer;
+ // Transformers used to create downscaled versions of the captured image.
+ // Enabled when setScaledResolutions is called (i.e. media::VideoFrameFeedback
+ // asks for scaled frames on behalf of a consumer in the Renderer process),
+ // NV12 output is enabled and the kInCapturerScaling feature is on.
+ std::vector<std::unique_ptr<media::SampleBufferTransformer>>
+ _scaledFrameTransformers;
// An AVDataOutput specialized for taking pictures out of |captureSession_|.
base::scoped_nsobject<AVCaptureStillImageOutput> _stillImageOutput;
@@ -87,6 +109,20 @@ CAPTURE_EXPORT
- (void)setOnStillImageOutputStoppedForTesting:
(base::RepeatingCallback<void()>)onStillImageOutputStopped;
+// Use the below only for test.
+- (void)callLocked:(base::OnceClosure)lambda;
+
+- (void)processPixelBufferNV12IOSurface:(CVPixelBufferRef)pixelBuffer
+ captureFormat:
+ (const media::VideoCaptureFormat&)captureFormat
+ colorSpace:(const gfx::ColorSpace&)colorSpace
+ timestamp:(const base::TimeDelta)timestamp;
+
+- (BOOL)processPixelBufferPlanes:(CVImageBufferRef)pixelBuffer
+ captureFormat:(const media::VideoCaptureFormat&)captureFormat
+ colorSpace:(const gfx::ColorSpace&)colorSpace
+ timestamp:(const base::TimeDelta)timestamp;
+
@end
#endif // MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_MAC_H_
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
index 31d5516bf2e..6dd77ee0eb9 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
@@ -10,13 +10,15 @@
#include <stddef.h>
#include <stdint.h>
+#include "base/debug/dump_without_crashing.h"
#include "base/location.h"
#include "base/mac/foundation_util.h"
-#include "base/mac/mac_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/sequenced_task_runner.h"
#include "base/strings/string_util.h"
+#include "base/strings/stringprintf.h"
#include "base/strings/sys_string_conversions.h"
+#include "components/crash/core/common/crash_key.h"
#include "media/base/mac/color_space_util_mac.h"
#include "media/base/media_switches.h"
#include "media/base/timestamp_constants.h"
@@ -24,6 +26,7 @@
#import "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#include "media/capture/video/mac/video_capture_device_factory_mac.h"
#include "media/capture/video/mac/video_capture_device_mac.h"
+#import "media/capture/video/mac/video_capture_metrics_mac.h"
#include "media/capture/video_capture_types.h"
#include "services/video_capture/public/uma/video_capture_service_event.h"
#include "ui/gfx/geometry/size.h"
@@ -54,10 +57,15 @@ base::TimeDelta GetCMSampleBufferTimestamp(CMSampleBufferRef sampleBuffer) {
return timestamp;
}
+constexpr size_t kPixelBufferPoolSize = 10;
+
} // anonymous namespace
namespace media {
+const base::Feature kInCapturerScaling{"InCapturerScaling",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
AVCaptureDeviceFormat* FindBestCaptureFormat(
NSArray<AVCaptureDeviceFormat*>* formats,
int width,
@@ -163,14 +171,14 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
DISPATCH_QUEUE_SERIAL),
base::scoped_policy::ASSUME);
DCHECK(frameReceiver);
+ _capturedFirstFrame = false;
_weakPtrFactoryForTakePhoto =
std::make_unique<base::WeakPtrFactory<VideoCaptureDeviceAVFoundation>>(
self);
[self setFrameReceiver:frameReceiver];
_captureSession.reset([[AVCaptureSession alloc] init]);
- _sampleBufferTransformer =
- media::SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
- if (_sampleBufferTransformer) {
+ if (base::FeatureList::IsEnabled(media::kInCaptureConvertToNv12)) {
+ _sampleBufferTransformer = media::SampleBufferTransformer::Create();
VLOG(1) << "Capturing with SampleBufferTransformer enabled";
}
}
@@ -268,7 +276,6 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
media::FindBestCaptureFormat([_captureDevice formats], width, height,
frameRate),
base::scoped_policy::RETAIN);
- // Default to NV12, a pixel format commonly supported by web cameras.
FourCharCode best_fourcc = kDefaultFourCCPixelFormat;
if (_bestCaptureFormat) {
best_fourcc = CMFormatDescriptionGetMediaSubType(
@@ -328,6 +335,39 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
return YES;
}
+- (void)setScaledResolutions:(std::vector<gfx::Size>)resolutions {
+ if (!base::FeatureList::IsEnabled(media::kInCapturerScaling)) {
+ return;
+ }
+ // The lock is needed for |_scaledFrameTransformers|.
+ base::AutoLock lock(_lock);
+ bool reconfigureScaledFrameTransformers = false;
+ if (resolutions.size() != _scaledFrameTransformers.size()) {
+ reconfigureScaledFrameTransformers = true;
+ } else {
+ for (size_t i = 0; i < resolutions.size(); ++i) {
+ if (resolutions[i] != _scaledFrameTransformers[i]->destination_size()) {
+ reconfigureScaledFrameTransformers = true;
+ break;
+ }
+ }
+ }
+ if (!reconfigureScaledFrameTransformers)
+ return;
+ _scaledFrameTransformers.clear();
+ for (const auto& resolution : resolutions) {
+ // Configure the transformer to and from NV12 pixel buffers - we only want
+ // to pay scaling costs, not conversion costs.
+ auto scaledFrameTransformer = media::SampleBufferTransformer::Create();
+ scaledFrameTransformer->Reconfigure(
+ media::SampleBufferTransformer::
+ kBestTransformerForPixelBufferToNv12Output,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, resolution,
+ kPixelBufferPoolSize);
+ _scaledFrameTransformers.push_back(std::move(scaledFrameTransformer));
+ }
+}
+
- (BOOL)startCapture {
DCHECK(_mainThreadTaskRunner->BelongsToCurrentThread());
if (!_captureSession) {
@@ -351,11 +391,18 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
}
}
+ {
+ base::AutoLock lock(_lock);
+ _capturedFirstFrame = false;
+ _capturedFrameSinceLastStallCheck = NO;
+ }
+ [self doStallCheck:0];
return YES;
}
- (void)stopCapture {
DCHECK(_mainThreadTaskRunner->BelongsToCurrentThread());
+ _weakPtrFactoryForStallCheck.reset();
[self stopStillImageOutput];
if ([_captureSession isRunning])
[_captureSession stopRunning]; // Synchronous.
@@ -557,10 +604,10 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
timestamp);
}
-- (BOOL)processPixelBuffer:(CVImageBufferRef)pixelBuffer
- captureFormat:(const media::VideoCaptureFormat&)captureFormat
- colorSpace:(const gfx::ColorSpace&)colorSpace
- timestamp:(const base::TimeDelta)timestamp {
+- (BOOL)processPixelBufferPlanes:(CVImageBufferRef)pixelBuffer
+ captureFormat:(const media::VideoCaptureFormat&)captureFormat
+ colorSpace:(const gfx::ColorSpace&)colorSpace
+ timestamp:(const base::TimeDelta)timestamp {
VLOG(3) << __func__;
if (CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly) !=
kCVReturnSuccess) {
@@ -620,20 +667,40 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
packedBufferSize += bytesPerRow * height;
}
+ // If media::VideoFrame::PlaneSize differs from the CVPixelBuffer's size then
+ // generate a crash report to show the difference.
+ // https://crbug.com/1168112
+ CHECK_EQ(pixelBufferHeights.size(), packedHeights.size());
+ for (size_t plane = 0; plane < pixelBufferHeights.size(); ++plane) {
+ if (pixelBufferHeights[plane] != packedHeights[plane] &&
+ !_hasDumpedForFrameSizeMismatch) {
+ static crash_reporter::CrashKeyString<64> planeInfoKey(
+ "core-video-plane-info");
+ planeInfoKey.Set(
+ base::StringPrintf("plane:%zu cv_height:%zu packed_height:%zu", plane,
+ pixelBufferHeights[plane], packedHeights[plane]));
+ base::debug::DumpWithoutCrashing();
+ _hasDumpedForFrameSizeMismatch = true;
+ }
+ }
+
// If |pixelBuffer| is not tightly packed, then copy it to |packedBufferCopy|,
// because ReceiveFrame() below assumes tight packing.
// https://crbug.com/1151936
bool needsCopyToPackedBuffer = pixelBufferBytesPerRows != packedBytesPerRows;
- CHECK(pixelBufferHeights == packedHeights);
std::vector<uint8_t> packedBufferCopy;
if (needsCopyToPackedBuffer) {
- CHECK(pixelBufferHeights == packedHeights);
- packedBufferCopy.resize(packedBufferSize);
+ packedBufferCopy.resize(packedBufferSize, 0);
uint8_t* dstAddr = packedBufferCopy.data();
for (size_t plane = 0; plane < numPlanes; ++plane) {
uint8_t* srcAddr = pixelBufferAddresses[plane];
- for (size_t row = 0; row < packedHeights[plane]; ++row) {
- memcpy(dstAddr, srcAddr, packedBytesPerRows[plane]);
+ size_t row = 0;
+ for (row = 0;
+ row < std::min(packedHeights[plane], pixelBufferHeights[plane]);
+ ++row) {
+ memcpy(dstAddr, srcAddr,
+ std::min(packedBytesPerRows[plane],
+ pixelBufferBytesPerRows[plane]));
dstAddr += packedBytesPerRows[plane];
srcAddr += pixelBufferBytesPerRows[plane];
}
@@ -649,12 +716,68 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
return YES;
}
-- (void)processNV12IOSurface:(IOSurfaceRef)ioSurface
- captureFormat:(const media::VideoCaptureFormat&)captureFormat
- colorSpace:(const gfx::ColorSpace&)colorSpace
- timestamp:(const base::TimeDelta)timestamp {
+- (void)processPixelBufferNV12IOSurface:(CVPixelBufferRef)pixelBuffer
+ captureFormat:
+ (const media::VideoCaptureFormat&)captureFormat
+ colorSpace:(const gfx::ColorSpace&)colorSpace
+ timestamp:(const base::TimeDelta)timestamp {
VLOG(3) << __func__;
DCHECK_EQ(captureFormat.pixel_format, media::PIXEL_FORMAT_NV12);
+
+ IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
+ DCHECK(ioSurface);
+ media::CapturedExternalVideoBuffer externalBuffer =
+ [self capturedExternalVideoBufferFromNV12IOSurface:ioSurface
+ captureFormat:captureFormat
+ colorSpace:colorSpace];
+
+ // The lock is needed for |_scaledFrameTransformers| and |_frameReceiver|.
+ _lock.AssertAcquired();
+ std::vector<media::CapturedExternalVideoBuffer> scaledExternalBuffers;
+ scaledExternalBuffers.reserve(_scaledFrameTransformers.size());
+ for (auto& scaledFrameTransformer : _scaledFrameTransformers) {
+ gfx::Size scaledFrameSize = scaledFrameTransformer->destination_size();
+ // Only proceed if this results in downscaling in one or both dimensions.
+ //
+ // It is not clear that we want to continue to allow changing the aspect
+ // ratio like this since this causes visible stretching in the image if the
+ // stretch is significantly large.
+ // TODO(https://crbug.com/1157072): When we know what to do about aspect
+ // ratios, consider adding a DCHECK here or otherwise ignore wrong aspect
+ // ratios (within some fault tolerance).
+ if (scaledFrameSize.width() > captureFormat.frame_size.width() ||
+ scaledFrameSize.height() > captureFormat.frame_size.height() ||
+ scaledFrameSize == captureFormat.frame_size) {
+ continue;
+ }
+ base::ScopedCFTypeRef<CVPixelBufferRef> scaledPixelBuffer =
+ scaledFrameTransformer->Transform(pixelBuffer);
+ if (!scaledPixelBuffer) {
+ LOG(ERROR) << "Failed to downscale frame, skipping resolution "
+ << scaledFrameSize.ToString();
+ continue;
+ }
+ IOSurfaceRef scaledIoSurface = CVPixelBufferGetIOSurface(scaledPixelBuffer);
+ media::VideoCaptureFormat scaledCaptureFormat = captureFormat;
+ scaledCaptureFormat.frame_size = scaledFrameSize;
+ scaledExternalBuffers.push_back([self
+ capturedExternalVideoBufferFromNV12IOSurface:scaledIoSurface
+ captureFormat:scaledCaptureFormat
+ colorSpace:colorSpace]);
+ }
+
+ _frameReceiver->ReceiveExternalGpuMemoryBufferFrame(
+ std::move(externalBuffer), std::move(scaledExternalBuffers), timestamp);
+}
+
+- (media::CapturedExternalVideoBuffer)
+ capturedExternalVideoBufferFromNV12IOSurface:(IOSurfaceRef)ioSurface
+ captureFormat:
+ (const media::VideoCaptureFormat&)
+ captureFormat
+ colorSpace:
+ (const gfx::ColorSpace&)colorSpace {
+ DCHECK(ioSurface);
gfx::GpuMemoryBufferHandle handle;
handle.id.id = -1;
handle.type = gfx::GpuMemoryBufferType::IO_SURFACE_BUFFER;
@@ -673,9 +796,63 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
kCGColorSpaceSRGB);
}
- _lock.AssertAcquired();
- _frameReceiver->ReceiveExternalGpuMemoryBufferFrame(
- std::move(handle), captureFormat, overriddenColorSpace, timestamp);
+ return media::CapturedExternalVideoBuffer(std::move(handle), captureFormat,
+ overriddenColorSpace);
+}
+
+// Sometimes (especially when the camera is accessed by another process, e.g,
+// Photo Booth), the AVCaptureSession will stop producing new frames. This check
+// happens with no errors or notifications being produced. To recover from this,
+// check to see if a new frame has been captured second. If 5 of these checks
+// fail consecutively, restart the capture session.
+// https://crbug.com/1176568
+- (void)doStallCheck:(int)failedCheckCount {
+ DCHECK(_mainThreadTaskRunner->BelongsToCurrentThread());
+
+ int nextFailedCheckCount = failedCheckCount + 1;
+ {
+ base::AutoLock lock(_lock);
+ // This is to detect a capture was working, but stopped submitting new
+ // frames. If we haven't received any frames yet, don't do anything.
+ if (!_capturedFirstFrame)
+ nextFailedCheckCount = 0;
+
+ // If we captured a frame since last check, then we aren't stalled.
+ if (_capturedFrameSinceLastStallCheck)
+ nextFailedCheckCount = 0;
+ _capturedFrameSinceLastStallCheck = NO;
+ }
+
+ constexpr int kMaxFailedCheckCount = 5;
+ if (nextFailedCheckCount < kMaxFailedCheckCount) {
+ // Post a task to check for progress in 1 second. Create the weak factory
+ // for the posted task, if needed.
+ if (!_weakPtrFactoryForStallCheck) {
+ _weakPtrFactoryForStallCheck = std::make_unique<
+ base::WeakPtrFactory<VideoCaptureDeviceAVFoundation>>(self);
+ }
+ constexpr base::TimeDelta kStallCheckInterval =
+ base::TimeDelta::FromSeconds(1);
+ auto callback_lambda =
+ [](base::WeakPtr<VideoCaptureDeviceAVFoundation> weakSelf,
+ int failedCheckCount) {
+ VideoCaptureDeviceAVFoundation* strongSelf = weakSelf.get();
+ if (!strongSelf)
+ return;
+ [strongSelf doStallCheck:failedCheckCount];
+ };
+ _mainThreadTaskRunner->PostDelayedTask(
+ FROM_HERE,
+ base::BindOnce(callback_lambda,
+ _weakPtrFactoryForStallCheck->GetWeakPtr(),
+ nextFailedCheckCount),
+ kStallCheckInterval);
+ } else {
+ // Capture appears to be stalled. Restart it.
+ LOG(ERROR) << "Capture appears to have stalled, restarting.";
+ [self stopCapture];
+ [self startCapture];
+ }
}
// |captureOutput| is called by the capture device to deliver a new frame.
@@ -689,10 +866,15 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// Concurrent calls into |_frameReceiver| are not supported, so take |_lock|
// before any of the subsequent paths.
base::AutoLock lock(_lock);
+ _capturedFrameSinceLastStallCheck = YES;
if (!_frameReceiver)
return;
const base::TimeDelta timestamp = GetCMSampleBufferTimestamp(sampleBuffer);
+ bool logUma = !std::exchange(_capturedFirstFrame, true);
+ if (logUma) {
+ media::LogFirstCapturedVideoFrame(_bestCaptureFormat, sampleBuffer);
+ }
// The SampleBufferTransformer CHECK-crashes if the sample buffer is not MJPEG
// and does not have a pixel buffer (https://crbug.com/1160647) so we fall
@@ -700,26 +882,30 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// TODO(https://crbug.com/1160315): When the SampleBufferTransformer is
// patched to support non-MJPEG-and-non-pixel-buffer sample buffers, remove
// this workaround.
- bool sampleBufferLacksPixelBufferAndIsNotMjpeg =
- !CMSampleBufferGetImageBuffer(sampleBuffer) &&
+ bool sampleHasPixelBufferOrIsMjpeg =
+ CMSampleBufferGetImageBuffer(sampleBuffer) ||
CMFormatDescriptionGetMediaSubType(CMSampleBufferGetFormatDescription(
- sampleBuffer)) != kCMVideoCodecType_JPEG_OpenDML;
+ sampleBuffer)) == kCMVideoCodecType_JPEG_OpenDML;
// If the SampleBufferTransformer is enabled, convert all possible capture
// formats to an IOSurface-backed NV12 pixel buffer.
- // TODO(hbos): If |_sampleBufferTransformer| gets shipped 100%, delete the
+ // TODO(https://crbug.com/1175142): Update this code path so that it is
+ // possible to turn on/off the kAVFoundationCaptureV2ZeroCopy feature and the
+ // kInCaptureConvertToNv12 feature separately.
+ // TODO(hbos): When |_sampleBufferTransformer| gets shipped 100%, delete the
// other code paths.
- if (_sampleBufferTransformer && !sampleBufferLacksPixelBufferAndIsNotMjpeg) {
+ if (_sampleBufferTransformer && sampleHasPixelBufferOrIsMjpeg) {
+ _sampleBufferTransformer->Reconfigure(
+ media::SampleBufferTransformer::GetBestTransformerForNv12Output(
+ sampleBuffer),
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ media::GetSampleBufferSize(sampleBuffer), kPixelBufferPoolSize);
base::ScopedCFTypeRef<CVPixelBufferRef> pixelBuffer =
- _sampleBufferTransformer->AutoReconfigureAndTransform(sampleBuffer);
+ _sampleBufferTransformer->Transform(sampleBuffer);
if (!pixelBuffer) {
LOG(ERROR) << "Failed to transform captured frame. Dropping frame.";
return;
}
- IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
- CHECK(ioSurface);
- CHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer),
- kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); // NV12
const media::VideoCaptureFormat captureFormat(
gfx::Size(CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer)),
@@ -730,14 +916,14 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// results in log spam and a default color space format is returned. To
// avoid this, we pretend the color space is kColorSpaceRec709Apple which
// triggers a path that avoids color space parsing inside of
- // processNV12IOSurface.
+ // processPixelBufferNV12IOSurface.
// TODO(hbos): Investigate how to successfully parse and/or configure the
// color space correctly. The implications of this hack is not fully
// understood.
- [self processNV12IOSurface:ioSurface
- captureFormat:captureFormat
- colorSpace:kColorSpaceRec709Apple
- timestamp:timestamp];
+ [self processPixelBufferNV12IOSurface:pixelBuffer
+ captureFormat:captureFormat
+ colorSpace:kColorSpaceRec709Apple
+ timestamp:timestamp];
return;
}
@@ -773,29 +959,28 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
static const bool kEnableGpuMemoryBuffers =
base::FeatureList::IsEnabled(media::kAVFoundationCaptureV2ZeroCopy);
if (kEnableGpuMemoryBuffers) {
- IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
- if (ioSurface && videoPixelFormat == media::PIXEL_FORMAT_NV12) {
- [self processNV12IOSurface:ioSurface
- captureFormat:captureFormat
- colorSpace:colorSpace
- timestamp:timestamp];
+ if (CVPixelBufferGetIOSurface(pixelBuffer) &&
+ videoPixelFormat == media::PIXEL_FORMAT_NV12) {
+ [self processPixelBufferNV12IOSurface:pixelBuffer
+ captureFormat:captureFormat
+ colorSpace:colorSpace
+ timestamp:timestamp];
return;
}
}
- // Second preference is to read the CVPixelBuffer.
- if ([self processPixelBuffer:pixelBuffer
- captureFormat:captureFormat
- colorSpace:colorSpace
- timestamp:timestamp]) {
+ // Second preference is to read the CVPixelBuffer's planes.
+ if ([self processPixelBufferPlanes:pixelBuffer
+ captureFormat:captureFormat
+ colorSpace:colorSpace
+ timestamp:timestamp]) {
return;
}
}
// Last preference is to read the CMSampleBuffer.
- gfx::ColorSpace colorSpace;
- if (@available(macOS 10.11, *))
- colorSpace = media::GetFormatDescriptionColorSpace(formatDescription);
+ gfx::ColorSpace colorSpace =
+ media::GetFormatDescriptionColorSpace(formatDescription);
[self processSample:sampleBuffer
captureFormat:captureFormat
colorSpace:colorSpace
@@ -821,4 +1006,9 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
FROM_HERE, base::SysNSStringToUTF8(error));
}
+- (void)callLocked:(base::OnceClosure)lambda {
+ base::AutoLock lock(_lock);
+ std::move(lambda).Run();
+}
+
@end
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm
index 63e3cf84b1e..cd55822cea3 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm
@@ -9,16 +9,411 @@
#include "base/bind.h"
#include "base/mac/scoped_nsobject.h"
#include "base/run_loop.h"
+#include "base/test/bind.h"
#include "base/test/gmock_callback_support.h"
-#import "media/capture/video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.h"
-#import "media/capture/video/mac/test/video_capture_test_utils_mac.h"
+#include "base/test/scoped_feature_list.h"
+#include "base/time/time.h"
+#include "media/base/video_types.h"
+#include "media/capture/video/mac/sample_buffer_transformer_mac.h"
+#include "media/capture/video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.h"
+#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
+#include "media/capture/video/mac/test/video_capture_test_utils_mac.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/color_space.h"
using testing::_;
namespace media {
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ OutputsNv12WithoutScalingByDefault) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitWithFeatures(
+ {kInCaptureConvertToNv12, kInCapturerScaling}, {});
+
+ RunTestCase(base::BindOnce([] {
+ NSString* deviceId = GetFirstDeviceId();
+ if (!deviceId) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Ignore subsequent frames.
+ return;
+ }
+ EXPECT_EQ(frame.format.pixel_format, PIXEL_FORMAT_NV12);
+ EXPECT_TRUE(scaled_frames.empty());
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ SpecifiedScalingIsIgnoredWhenInCapturerScalingIsNotEnabled) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+ // By default, kInCapturerScaling is false.
+ EXPECT_FALSE(base::FeatureList::IsEnabled(kInCapturerScaling));
+
+ RunTestCase(base::BindOnce([] {
+ NSString* deviceId = GetFirstDeviceId();
+ if (!deviceId) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+
+ std::vector<gfx::Size> scaled_resolutions;
+ scaled_resolutions.emplace_back(320, 240);
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Ignore subsequent frames.
+ return;
+ }
+ EXPECT_TRUE(scaled_frames.empty());
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+TEST(VideoCaptureDeviceAVFoundationMacTest, SpecifiedScalingOutputsNv12) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitWithFeatures(
+ {kInCaptureConvertToNv12, kInCapturerScaling}, {});
+
+ RunTestCase(base::BindOnce([] {
+ NSString* deviceId = GetFirstDeviceId();
+ if (!deviceId) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+
+ std::vector<gfx::Size> scaled_resolutions;
+ scaled_resolutions.emplace_back(320, 240);
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Ignore subsequent frames.
+ return;
+ }
+ EXPECT_EQ(frame.format.pixel_format, PIXEL_FORMAT_NV12);
+ ASSERT_EQ(scaled_frames.size(), 1u);
+ EXPECT_EQ(scaled_frames[0].format.frame_size,
+ scaled_resolutions[0]);
+ EXPECT_EQ(scaled_frames[0].format.pixel_format,
+ PIXEL_FORMAT_NV12);
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ SpecifiedScalingCanChangeDuringCapture) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitWithFeatures(
+ {kInCaptureConvertToNv12, kInCapturerScaling}, {});
+
+ RunTestCase(base::BindOnce([] {
+ NSString* deviceId = GetFirstDeviceId();
+ if (!deviceId) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+
+ // Start capture without scaling and wait until frames are flowing.
+ [captureDevice setScaledResolutions:{}];
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Ignore subsequent frames.
+ return;
+ }
+ EXPECT_TRUE(scaled_frames.empty());
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ // Specify scaling and wait for scaled frames to arrive.
+ std::vector<gfx::Size> scaled_resolutions;
+ scaled_resolutions.emplace_back(320, 240);
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ bool has_received_scaled_frame = false;
+ base::RunLoop scaled_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_scaled_frame || scaled_frames.empty()) {
+ // Ignore subsequent frames.
+ return;
+ }
+ has_received_scaled_frame = true;
+ scaled_frame_received.Quit();
+ }));
+ scaled_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ SpecifiedScalingUsesGoodSizesButNotBadSizes) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitWithFeatures(
+ {kInCaptureConvertToNv12, kInCapturerScaling}, {});
+
+ RunTestCase(base::BindOnce([] {
+ VideoCaptureDeviceFactoryMac video_capture_device_factory;
+ std::vector<VideoCaptureDeviceInfo> device_infos =
+ GetDevicesInfo(&video_capture_device_factory);
+ if (device_infos.empty()) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+ const auto& device_info = device_infos.front();
+ NSString* deviceId = [NSString
+ stringWithUTF8String:device_info.descriptor.device_id.c_str()];
+ VideoCaptureFormat camera_format = device_info.supported_formats.front();
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+
+ // Capture at a lower resolution than we request to scale.
+ ASSERT_TRUE([captureDevice
+ setCaptureHeight:camera_format.frame_size.height()
+ width:camera_format.frame_size.width()
+ frameRate:camera_format.frame_rate]);
+ std::vector<gfx::Size> scaled_resolutions;
+ // Bad resolution because it causes upscale.
+ scaled_resolutions.emplace_back(camera_format.frame_size.width() * 2,
+ camera_format.frame_size.height() * 2);
+ // Bad resolution because it is the same as the captured resolution.
+ scaled_resolutions.push_back(camera_format.frame_size);
+ // Good resolution because it causes downscale in both dimensions.
+ scaled_resolutions.emplace_back(camera_format.frame_size.width() / 2,
+ camera_format.frame_size.height() / 2);
+ // Good resolution because it causes downscale in both dimensions.
+ scaled_resolutions.emplace_back(camera_format.frame_size.width() / 4,
+ camera_format.frame_size.height() / 4);
+ // Good resolution because it causes downscale in one dimension (stretch).
+ scaled_resolutions.emplace_back(camera_format.frame_size.width() / 2,
+ camera_format.frame_size.height());
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Normally we have time to stop capturing before multiple
+ // frames are received but in order for the test to be able to
+ // run on slow bots we are prepared for this method to be
+ // invoked any number of times. Frames subsequent the first one
+ // are ignored.
+ return;
+ }
+
+ EXPECT_EQ(scaled_frames.size(), 3u);
+ // The bad resolutions were ignored and the good resolutions are
+ // outputted in the requested order.
+ EXPECT_EQ(scaled_frames[0].format.frame_size,
+ scaled_resolutions[2]);
+ EXPECT_EQ(scaled_frames[1].format.frame_size,
+ scaled_resolutions[3]);
+ EXPECT_EQ(scaled_frames[2].format.frame_size,
+ scaled_resolutions[4]);
+
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+// This is approximately the same test as the one above except it does not rely
+// on having a camera. Instead we mock-invoke processPixelBufferNV12IOSurface
+// from the test as-if a camera had produced a frame.
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ ProcessPixelBufferNV12IOSurfaceWithGoodAndBadScaling) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kInCapturerScaling);
+
+ RunTestCase(base::BindOnce([] {
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ // Capture at a lower resolution than we request to scale.
+ gfx::Size capture_resolution(640, 360);
+ std::vector<gfx::Size> scaled_resolutions;
+ // Bad resolution because it causes upscale.
+ scaled_resolutions.emplace_back(capture_resolution.width() * 2,
+ capture_resolution.height() * 2);
+ // Bad resolution because it is the same as the captured resolution.
+ scaled_resolutions.push_back(capture_resolution);
+ // Good resolution because it causes downscale in both dimensions.
+ scaled_resolutions.emplace_back(capture_resolution.width() / 2,
+ capture_resolution.height() / 2);
+ // Good resolution because it causes downscale in both dimensions.
+ scaled_resolutions.emplace_back(capture_resolution.width() / 4,
+ capture_resolution.height() / 4);
+ // Good resolution because it causes downscale in one dimension (stretch).
+ scaled_resolutions.emplace_back(capture_resolution.width() / 2,
+ capture_resolution.height());
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ // Create a blank NV12 pixel buffer that we pretend was captured.
+ VideoCaptureFormat capture_format(capture_resolution, 30,
+ PIXEL_FORMAT_NV12);
+ std::unique_ptr<ByteArrayPixelBuffer> yuvs_buffer =
+ CreateYuvsPixelBufferFromSingleRgbColor(
+ capture_resolution.width(), capture_resolution.height(), 0, 0, 0);
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixelBuffer =
+ PixelBufferPool::Create(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ capture_resolution.width(),
+ capture_resolution.height(), 1)
+ ->CreateBuffer();
+ DCHECK(PixelBufferTransferer().TransferImage(yuvs_buffer->pixel_buffer,
+ pixelBuffer));
+
+ [captureDevice
+ callLocked:base::BindLambdaForTesting([&] {
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillOnce(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ EXPECT_EQ(scaled_frames.size(), 3u);
+ // The bad resolutions were ignored and the good
+ // resolutions are outputted in the requested order.
+ EXPECT_EQ(scaled_frames[0].format.frame_size,
+ scaled_resolutions[2]);
+ EXPECT_EQ(scaled_frames[1].format.frame_size,
+ scaled_resolutions[3]);
+ EXPECT_EQ(scaled_frames[2].format.frame_size,
+ scaled_resolutions[4]);
+ }));
+ [captureDevice
+ processPixelBufferNV12IOSurface:pixelBuffer
+ captureFormat:capture_format
+ colorSpace:gfx::ColorSpace::CreateSRGB()
+ timestamp:base::TimeDelta()];
+ })];
+ }));
+}
+
TEST(VideoCaptureDeviceAVFoundationMacTest, TakePhoto) {
RunTestCase(base::BindOnce([] {
NSString* deviceId = GetFirstDeviceId();
@@ -213,4 +608,29 @@ TEST(VideoCaptureDeviceAVFoundationMacTest,
}));
}
+TEST(VideoCaptureDeviceAVFoundationMacTest, ForwardsOddPixelBufferResolution) {
+ // See crbug/1168112.
+ RunTestCase(base::BindOnce([] {
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ gfx::Size size(1280, 719);
+ VideoCaptureFormat format(size, 30, PIXEL_FORMAT_YUY2);
+ std::unique_ptr<ByteArrayPixelBuffer> buffer =
+ CreateYuvsPixelBufferFromSingleRgbColor(size.width(), size.height(), 0,
+ 0, 0);
+ [captureDevice
+ callLocked:base::BindLambdaForTesting([&] {
+ EXPECT_CALL(frame_receiver, ReceiveFrame(_, _, format, _, _, _, _));
+ [captureDevice processPixelBufferPlanes:buffer->pixel_buffer
+ captureFormat:format
+ colorSpace:gfx::ColorSpace::CreateSRGB()
+ timestamp:base::TimeDelta()];
+ })];
+ }));
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
index f121dd8fe2e..63ccc6ddb1d 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
@@ -8,11 +8,14 @@
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
+#include <vector>
+
#import "base/mac/scoped_nsobject.h"
#include "base/synchronization/lock.h"
#include "base/threading/thread_checker.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video_capture_types.h"
+#include "ui/gfx/geometry/size.h"
namespace media {
class VideoCaptureDeviceMac;
@@ -35,9 +38,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver {
// function may be called from any thread, including those controlled by
// AVFoundation.
virtual void ReceiveExternalGpuMemoryBufferFrame(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& frame_format,
- const gfx::ColorSpace color_space,
+ CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
base::TimeDelta timestamp) = 0;
// Callbacks with the result of a still image capture, or in case of error,
@@ -94,6 +96,13 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver {
width:(int)width
frameRate:(float)frameRate;
+// If an efficient path is available, the capturer will perform scaling and
+// deliver scaled frames to the |frameReceiver| as specified by |resolutions|.
+// The scaled frames are delivered in addition to the original captured frame.
+// Resolutions that match the captured frame or that would result in upscaling
+// are ignored.
+- (void)setScaledResolutions:(std::vector<gfx::Size>)resolutions;
+
// Starts video capturing and registers notification listeners. Must be
// called after setCaptureDevice:, and, eventually, also after
// setCaptureHeight:width:frameRate:.
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
index 82956f85a20..b5fa0a7178c 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
@@ -39,6 +39,9 @@ void ExtractBaseAddressAndLength(char** base_address,
// on if |kMacNextGenerationCapturer| is enabled or disabled.
Class GetVideoCaptureDeviceAVFoundationImplementationClass();
+gfx::Size CAPTURE_EXPORT GetPixelBufferSize(CVPixelBufferRef pixel_buffer);
+gfx::Size CAPTURE_EXPORT GetSampleBufferSize(CMSampleBufferRef sample_buffer);
+
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_UTILS_MAC_H_
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
index 164e850a526..ca211aeb10d 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
@@ -233,4 +233,21 @@ Class GetVideoCaptureDeviceAVFoundationImplementationClass() {
return [VideoCaptureDeviceAVFoundationLegacy class];
}
+gfx::Size GetPixelBufferSize(CVPixelBufferRef pixel_buffer) {
+ return gfx::Size(CVPixelBufferGetWidth(pixel_buffer),
+ CVPixelBufferGetHeight(pixel_buffer));
+}
+
+gfx::Size GetSampleBufferSize(CMSampleBufferRef sample_buffer) {
+ if (CVPixelBufferRef pixel_buffer =
+ CMSampleBufferGetImageBuffer(sample_buffer)) {
+ return GetPixelBufferSize(pixel_buffer);
+ }
+ CMFormatDescriptionRef format_description =
+ CMSampleBufferGetFormatDescription(sample_buffer);
+ CMVideoDimensions dimensions =
+ CMVideoFormatDescriptionGetDimensions(format_description);
+ return gfx::Size(dimensions.width, dimensions.height);
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/mac/video_capture_device_mac.h b/chromium/media/capture/video/mac/video_capture_device_mac.h
index 1090327d09b..8178526d54e 100644
--- a/chromium/media/capture/video/mac/video_capture_device_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_mac.h
@@ -81,9 +81,8 @@ class VideoCaptureDeviceMac
int aspect_denominator,
base::TimeDelta timestamp) override;
void ReceiveExternalGpuMemoryBufferFrame(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& frame_format,
- const gfx::ColorSpace color_space,
+ CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
base::TimeDelta timestamp) override;
void OnPhotoTaken(const uint8_t* image_data,
size_t image_length,
diff --git a/chromium/media/capture/video/mac/video_capture_device_mac.mm b/chromium/media/capture/video/mac/video_capture_device_mac.mm
index 3a0c2e11974..d5691a42324 100644
--- a/chromium/media/capture/video/mac/video_capture_device_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_mac.mm
@@ -803,20 +803,19 @@ void VideoCaptureDeviceMac::ReceiveFrame(const uint8_t* video_frame,
}
void VideoCaptureDeviceMac::ReceiveExternalGpuMemoryBufferFrame(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace color_space,
+ CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
base::TimeDelta timestamp) {
- if (capture_format_.frame_size != format.frame_size) {
+ if (capture_format_.frame_size != frame.format.frame_size) {
ReceiveError(VideoCaptureError::kMacReceivedFrameWithUnexpectedResolution,
FROM_HERE,
- "Captured resolution " + format.frame_size.ToString() +
+ "Captured resolution " + frame.format.frame_size.ToString() +
", and expected " + capture_format_.frame_size.ToString());
return;
}
- client_->OnIncomingCapturedExternalBuffer(std::move(handle), format,
- color_space, base::TimeTicks::Now(),
- timestamp);
+ client_->OnIncomingCapturedExternalBuffer(std::move(frame),
+ std::move(scaled_frames),
+ base::TimeTicks::Now(), timestamp);
}
void VideoCaptureDeviceMac::OnPhotoTaken(const uint8_t* image_data,
diff --git a/chromium/media/capture/video/mac/video_capture_metrics_mac.h b/chromium/media/capture/video/mac/video_capture_metrics_mac.h
new file mode 100644
index 00000000000..c56789a5b22
--- /dev/null
+++ b/chromium/media/capture/video/mac/video_capture_metrics_mac.h
@@ -0,0 +1,23 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_METRICS_MAC_H_
+#define MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_METRICS_MAC_H_
+
+#import <AVFoundation/AVFoundation.h>
+#include <CoreMedia/CoreMedia.h>
+#import <Foundation/Foundation.h>
+
+#include "media/capture/capture_export.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+CAPTURE_EXPORT
+void LogFirstCapturedVideoFrame(const AVCaptureDeviceFormat* bestCaptureFormat,
+ const CMSampleBufferRef buffer);
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_METRICS_MAC_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/mac/video_capture_metrics_mac.mm b/chromium/media/capture/video/mac/video_capture_metrics_mac.mm
new file mode 100644
index 00000000000..9c6a64dd643
--- /dev/null
+++ b/chromium/media/capture/video/mac/video_capture_metrics_mac.mm
@@ -0,0 +1,88 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "media/capture/video/mac/video_capture_metrics_mac.h"
+
+#include "base/metrics/histogram_functions.h"
+#import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
+#include "media/capture/video/video_capture_device_info.h"
+
+namespace media {
+
+namespace {
+
+enum class ResolutionComparison {
+ kWidthGtHeightEq = 0,
+ kWidthLtHeightEq = 1,
+ kWidthEqHeightGt = 2,
+ kWidthEqHeightLt = 3,
+ kEq = 4,
+ kWidthGtHeightGt = 5,
+ kWidthLtHeightGt = 6,
+ kWidthGtHeightLt = 7,
+ kWidthLtHeightLt = 8,
+ kMaxValue = kWidthLtHeightLt,
+};
+
+ResolutionComparison CompareDimensions(const CMVideoDimensions& requested,
+ const CMVideoDimensions& captured) {
+ if (requested.width > captured.width) {
+ if (requested.height > captured.height)
+ return ResolutionComparison::kWidthGtHeightGt;
+ if (requested.height < captured.height)
+ return ResolutionComparison::kWidthGtHeightLt;
+ return ResolutionComparison::kWidthGtHeightEq;
+ } else if (requested.width < captured.width) {
+ if (requested.height > captured.height)
+ return ResolutionComparison::kWidthLtHeightGt;
+ if (requested.height < captured.height)
+ return ResolutionComparison::kWidthLtHeightLt;
+ return ResolutionComparison::kWidthLtHeightEq;
+ } else {
+ if (requested.height > captured.height)
+ return ResolutionComparison::kWidthEqHeightGt;
+ if (requested.height < captured.height)
+ return ResolutionComparison::kWidthEqHeightLt;
+ return ResolutionComparison::kEq;
+ }
+}
+
+} // namespace
+
+void LogFirstCapturedVideoFrame(const AVCaptureDeviceFormat* bestCaptureFormat,
+ const CMSampleBufferRef buffer) {
+ if (bestCaptureFormat) {
+ const CMFormatDescriptionRef requestedFormat =
+ [bestCaptureFormat formatDescription];
+ base::UmaHistogramEnumeration(
+ "Media.VideoCapture.Mac.Device.RequestedPixelFormat",
+ [VideoCaptureDeviceAVFoundation
+ FourCCToChromiumPixelFormat:CMFormatDescriptionGetMediaSubType(
+ requestedFormat)],
+ media::VideoPixelFormat::PIXEL_FORMAT_MAX);
+
+ if (buffer) {
+ const CMFormatDescriptionRef capturedFormat =
+ CMSampleBufferGetFormatDescription(buffer);
+ base::UmaHistogramBoolean(
+ "Media.VideoCapture.Mac.Device.CapturedWithRequestedPixelFormat",
+ CMFormatDescriptionGetMediaSubType(capturedFormat) ==
+ CMFormatDescriptionGetMediaSubType(requestedFormat));
+ base::UmaHistogramEnumeration(
+ "Media.VideoCapture.Mac.Device.CapturedWithRequestedResolution",
+ CompareDimensions(
+ CMVideoFormatDescriptionGetDimensions(requestedFormat),
+ CMVideoFormatDescriptionGetDimensions(capturedFormat)));
+
+ const CVPixelBufferRef pixelBufferRef =
+ CMSampleBufferGetImageBuffer(buffer);
+ bool is_io_sufrace =
+ pixelBufferRef && CVPixelBufferGetIOSurface(pixelBufferRef);
+ base::UmaHistogramBoolean(
+ "Media.VideoCapture.Mac.Device.CapturedIOSurface", is_io_sufrace);
+ }
+ }
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/mac/video_capture_metrics_mac_unittest.mm b/chromium/media/capture/video/mac/video_capture_metrics_mac_unittest.mm
new file mode 100644
index 00000000000..205b20ec9bc
--- /dev/null
+++ b/chromium/media/capture/video/mac/video_capture_metrics_mac_unittest.mm
@@ -0,0 +1,87 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "media/capture/video/mac/video_capture_metrics_mac.h"
+
+#import <AVFoundation/AVFoundation.h>
+#include <CoreMedia/CoreMedia.h>
+#import <Foundation/Foundation.h>
+
+#include "base/mac/scoped_cftyperef.h"
+#include "base/test/metrics/histogram_tester.h"
+#include "media/base/video_types.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#import "third_party/ocmock/OCMock/OCMock.h"
+#include "third_party/ocmock/gtest_support.h"
+
+namespace media {
+
+namespace {} // namespace
+
+TEST(VideoCaptureMetricsMacTest, NoMetricsLoggedIfNullRequestedCaptureFormat) {
+ base::HistogramTester histogram_tester;
+ LogFirstCapturedVideoFrame(nullptr, nullptr);
+ EXPECT_THAT(histogram_tester.GetTotalCountsForPrefix("Media."),
+ testing::IsEmpty());
+}
+
+TEST(VideoCaptureMetricsMacTest, LogRequestedPixelFormat) {
+ base::HistogramTester histogram_tester;
+
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> requested_format;
+ OSStatus status = CMVideoFormatDescriptionCreate(
+ kCFAllocatorDefault,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange /*NV12*/, 320, 180,
+ nullptr, requested_format.InitializeInto());
+ ASSERT_EQ(0, status);
+ id capture_format = OCMClassMock([AVCaptureDeviceFormat class]);
+ OCMStub([capture_format formatDescription]).andReturn(requested_format.get());
+
+ LogFirstCapturedVideoFrame(capture_format, nullptr);
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.RequestedPixelFormat"),
+ testing::UnorderedElementsAre(
+ base::Bucket(VideoPixelFormat::PIXEL_FORMAT_NV12, 1)));
+}
+
+TEST(VideoCaptureMetricsMacTest, LogFirstFrameWhenAsRequested) {
+ base::HistogramTester histogram_tester;
+
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> requested_format;
+ OSStatus status = CMVideoFormatDescriptionCreate(
+ kCFAllocatorDefault,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange /*NV12*/, 320, 180,
+ nullptr, requested_format.InitializeInto());
+ ASSERT_EQ(0, status);
+ id capture_format = OCMClassMock([AVCaptureDeviceFormat class]);
+ OCMStub([capture_format formatDescription]).andReturn(requested_format.get());
+
+ // First frame equal.
+ base::ScopedCFTypeRef<CMSampleBufferRef> first_frame;
+ status = CMSampleBufferCreate(kCFAllocatorDefault, nullptr, false, nullptr,
+ nullptr, requested_format, 0, 0, nullptr, 0,
+ nullptr, first_frame.InitializeInto());
+ ASSERT_EQ(0, status);
+
+ LogFirstCapturedVideoFrame(capture_format, first_frame);
+
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.RequestedPixelFormat"),
+ testing::UnorderedElementsAre(
+ base::Bucket(VideoPixelFormat::PIXEL_FORMAT_NV12, 1)));
+ EXPECT_THAT(
+ histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.CapturedWithRequestedPixelFormat"),
+ testing::UnorderedElementsAre(base::Bucket(1, 1)));
+ EXPECT_THAT(
+ histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.CapturedWithRequestedResolution"),
+ testing::UnorderedElementsAre(base::Bucket(4, 1)));
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.CapturedIOSurface"),
+ testing::UnorderedElementsAre(base::Bucket(0, 1)));
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
index adac8b74a2b..0246f6cbe99 100644
--- a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
@@ -7,7 +7,7 @@
#include "media/video/fake_gpu_memory_buffer.h"
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/request_manager.h"
#endif
@@ -26,7 +26,7 @@ MockGpuMemoryBufferManager::CreateFakeGpuMemoryBuffer(
gfx::BufferUsage usage,
gpu::SurfaceHandle surface_handle) {
auto gmb = std::make_unique<FakeGpuMemoryBuffer>(size, format);
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
// For faking a valid JPEG blob buffer.
if (base::checked_cast<size_t>(size.width()) >= sizeof(Camera3JpegBlob)) {
Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
index 20bbf07e4f3..614cf8b8ac6 100644
--- a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
@@ -30,6 +30,15 @@ class MockGpuMemoryBufferManager : public gpu::GpuMemoryBufferManager {
void(gfx::GpuMemoryBuffer* buffer,
const gpu::SyncToken& sync_token));
+ MOCK_METHOD3(CopyGpuMemoryBufferAsync,
+ void(gfx::GpuMemoryBufferHandle buffer_handle,
+ base::UnsafeSharedMemoryRegion memory_region,
+ base::OnceCallback<void(bool)> callback));
+
+ MOCK_METHOD2(CopyGpuMemoryBufferSync,
+ bool(gfx::GpuMemoryBufferHandle buffer_handle,
+ base::UnsafeSharedMemoryRegion memory_region));
+
static std::unique_ptr<gfx::GpuMemoryBuffer> CreateFakeGpuMemoryBuffer(
const gfx::Size& size,
gfx::BufferFormat format,
diff --git a/chromium/media/capture/video/mock_video_capture_device_client.h b/chromium/media/capture/video/mock_video_capture_device_client.h
index 1adfc805885..fd0ed66b802 100644
--- a/chromium/media/capture/video/mock_video_capture_device_client.h
+++ b/chromium/media/capture/video/mock_video_capture_device_client.h
@@ -35,10 +35,9 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
base::TimeTicks reference_time,
base::TimeDelta timestamp,
int frame_feedback_id));
- MOCK_METHOD5(OnIncomingCapturedExternalBuffer,
- void(gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ MOCK_METHOD4(OnIncomingCapturedExternalBuffer,
+ void(CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp));
MOCK_METHOD4(ReserveOutputBuffer,
diff --git a/chromium/media/capture/video/mock_video_frame_receiver.h b/chromium/media/capture/video/mock_video_frame_receiver.h
index cee00ad229a..01b05db9b2c 100644
--- a/chromium/media/capture/video/mock_video_frame_receiver.h
+++ b/chromium/media/capture/video/mock_video_frame_receiver.h
@@ -37,14 +37,10 @@ class MockVideoFrameReceiver : public VideoFrameReceiver {
}
void OnFrameReadyInBuffer(
- int32_t buffer_id,
- int frame_feedback_id,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- buffer_read_permission,
- media::mojom::VideoFrameInfoPtr frame_info) override {
- MockOnFrameReadyInBuffer(buffer_id, &buffer_read_permission,
- frame_info->coded_size);
+ ReadyFrameInBuffer frame,
+ std::vector<ReadyFrameInBuffer> scaled_frames) override {
+ MockOnFrameReadyInBuffer(frame.buffer_id, &frame.buffer_read_permission,
+ frame.frame_info->coded_size);
}
};
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
index 6913bfed465..acc9a427b1b 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
@@ -15,6 +15,10 @@
#include "media/capture/video/video_capture_buffer_tracker_factory_impl.h"
#include "ui/gfx/buffer_format_util.h"
+#if defined(OS_WIN)
+#include "media/capture/video/win/video_capture_buffer_tracker_factory_win.h"
+#endif // defined(OS_WIN)
+
namespace media {
VideoCaptureBufferPoolImpl::VideoCaptureBufferPoolImpl(
@@ -22,8 +26,14 @@ VideoCaptureBufferPoolImpl::VideoCaptureBufferPoolImpl(
int count)
: buffer_type_(buffer_type),
count_(count),
+#if defined(OS_WIN)
+ buffer_tracker_factory_(
+ std::make_unique<media::VideoCaptureBufferTrackerFactoryWin>())
+#else
buffer_tracker_factory_(
- std::make_unique<media::VideoCaptureBufferTrackerFactoryImpl>()) {
+ std::make_unique<media::VideoCaptureBufferTrackerFactoryImpl>())
+#endif
+{
DCHECK_GT(count, 0);
}
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
index 92c2a96d7b1..0fb7b918cdd 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
@@ -10,7 +10,7 @@
#include "build/build_config.h"
#include "media/capture/video/shared_memory_buffer_tracker.h"
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/gpu_memory_buffer_tracker.h"
#endif
@@ -25,7 +25,7 @@ VideoCaptureBufferTrackerFactoryImpl::CreateTracker(
VideoCaptureBufferType buffer_type) {
switch (buffer_type) {
case VideoCaptureBufferType::kGpuMemoryBuffer:
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return std::make_unique<GpuMemoryBufferTracker>();
#elif defined(OS_MAC)
return std::make_unique<GpuMemoryBufferTrackerMac>();
diff --git a/chromium/media/capture/video/video_capture_device.cc b/chromium/media/capture/video/video_capture_device.cc
index 4085a0c505f..648ca2fb57c 100644
--- a/chromium/media/capture/video/video_capture_device.cc
+++ b/chromium/media/capture/video/video_capture_device.cc
@@ -13,6 +13,30 @@
namespace media {
+CapturedExternalVideoBuffer::CapturedExternalVideoBuffer(
+ gfx::GpuMemoryBufferHandle handle,
+ VideoCaptureFormat format,
+ gfx::ColorSpace color_space)
+ : handle(std::move(handle)),
+ format(std::move(format)),
+ color_space(std::move(color_space)) {}
+
+CapturedExternalVideoBuffer::CapturedExternalVideoBuffer(
+ CapturedExternalVideoBuffer&& other)
+ : handle(std::move(other.handle)),
+ format(std::move(other.format)),
+ color_space(std::move(other.color_space)) {}
+
+CapturedExternalVideoBuffer::~CapturedExternalVideoBuffer() = default;
+
+CapturedExternalVideoBuffer& CapturedExternalVideoBuffer::operator=(
+ CapturedExternalVideoBuffer&& other) {
+ handle = std::move(other.handle);
+ format = std::move(other.format);
+ color_space = std::move(other.color_space);
+ return *this;
+}
+
VideoCaptureDevice::Client::Buffer::Buffer() : id(0), frame_feedback_id(0) {}
VideoCaptureDevice::Client::Buffer::Buffer(
diff --git a/chromium/media/capture/video/video_capture_device.h b/chromium/media/capture/video/video_capture_device.h
index 8fee0be95e5..b0c4c0466a0 100644
--- a/chromium/media/capture/video/video_capture_device.h
+++ b/chromium/media/capture/video/video_capture_device.h
@@ -67,6 +67,20 @@ class CAPTURE_EXPORT VideoFrameConsumerFeedbackObserver {
media::VideoFrameFeedback feedback) {}
};
+struct CAPTURE_EXPORT CapturedExternalVideoBuffer {
+ CapturedExternalVideoBuffer(gfx::GpuMemoryBufferHandle handle,
+ VideoCaptureFormat format,
+ gfx::ColorSpace color_space);
+ CapturedExternalVideoBuffer(CapturedExternalVideoBuffer&& other);
+ ~CapturedExternalVideoBuffer();
+
+ CapturedExternalVideoBuffer& operator=(CapturedExternalVideoBuffer&& other);
+
+ gfx::GpuMemoryBufferHandle handle;
+ VideoCaptureFormat format;
+ gfx::ColorSpace color_space;
+};
+
class CAPTURE_EXPORT VideoCaptureDevice
: public VideoFrameConsumerFeedbackObserver {
public:
@@ -187,9 +201,8 @@ class CAPTURE_EXPORT VideoCaptureDevice
// gfx::ScopedInUseIOSurface is used to prevent reuse of buffers until all
// consumers have consumed them.
virtual void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) = 0;
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index d11eece6e42..cbe37e1b753 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -24,9 +24,9 @@
#include "media/capture/video_capture_types.h"
#include "third_party/libyuv/include/libyuv.h"
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/video_capture_jpeg_decoder.h"
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
namespace {
@@ -163,7 +163,7 @@ class BufferPoolBufferHandleProvider
const int buffer_id_;
};
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
VideoCaptureDeviceClient::VideoCaptureDeviceClient(
VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
@@ -188,7 +188,7 @@ VideoCaptureDeviceClient::VideoCaptureDeviceClient(
receiver_(std::move(receiver)),
buffer_pool_(std::move(buffer_pool)),
last_captured_pixel_format_(PIXEL_FORMAT_UNKNOWN) {}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {
for (int buffer_id : buffer_ids_known_by_receiver_)
@@ -226,7 +226,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
OnLog("Pixel format: " + VideoPixelFormatToString(format.pixel_format));
last_captured_pixel_format_ = format.pixel_format;
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
if (format.pixel_format == PIXEL_FORMAT_MJPEG &&
optional_jpeg_decoder_factory_callback_) {
external_jpeg_decoder_ =
@@ -234,7 +234,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
DCHECK(external_jpeg_decoder_);
external_jpeg_decoder_->Initialize();
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
}
if (!format.IsValid()) {
@@ -353,7 +353,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
// paddings and/or alignments, but it cannot be smaller.
DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize());
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
if (external_jpeg_decoder_) {
const VideoCaptureJpegDecoder::STATUS status =
external_jpeg_decoder_->GetStatus();
@@ -369,7 +369,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
return;
}
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
// libyuv::ConvertToI420 use Rec601 to convert RGB to YUV.
if (libyuv::ConvertToI420(
@@ -465,16 +465,31 @@ void VideoCaptureDeviceClient::OnIncomingCapturedGfxBuffer(
}
void VideoCaptureDeviceClient::OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp) {
+ auto ready_frame = CreateReadyFrameFromExternalBuffer(
+ std::move(buffer), reference_time, timestamp);
+ std::vector<ReadyFrameInBuffer> scaled_ready_frames;
+ scaled_ready_frames.reserve(scaled_buffers.size());
+ for (auto& scaled_buffer : scaled_buffers) {
+ scaled_ready_frames.push_back(CreateReadyFrameFromExternalBuffer(
+ std::move(scaled_buffer), reference_time, timestamp));
+ }
+ receiver_->OnFrameReadyInBuffer(std::move(ready_frame),
+ std::move(scaled_ready_frames));
+}
+
+ReadyFrameInBuffer VideoCaptureDeviceClient::CreateReadyFrameFromExternalBuffer(
+ CapturedExternalVideoBuffer buffer,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
// Reserve an ID for this buffer that will not conflict with any of the IDs
// used by |buffer_pool_|.
int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId;
- int buffer_id =
- buffer_pool_->ReserveIdForExternalBuffer(handle, &buffer_id_to_drop);
+ int buffer_id = buffer_pool_->ReserveIdForExternalBuffer(buffer.handle,
+ &buffer_id_to_drop);
// If a buffer to retire was specified, retire one.
if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) {
@@ -491,30 +506,30 @@ void VideoCaptureDeviceClient::OnIncomingCapturedExternalBuffer(
if (!base::Contains(buffer_ids_known_by_receiver_, buffer_id)) {
media::mojom::VideoBufferHandlePtr buffer_handle =
media::mojom::VideoBufferHandle::New();
- buffer_handle->set_gpu_memory_buffer_handle(std::move(handle));
+ buffer_handle->set_gpu_memory_buffer_handle(std::move(buffer.handle));
receiver_->OnNewBuffer(buffer_id, std::move(buffer_handle));
buffer_ids_known_by_receiver_.push_back(buffer_id);
}
- // Tell |receiver_| that the frame has been received.
- {
- mojom::VideoFrameInfoPtr info = mojom::VideoFrameInfo::New();
- info->timestamp = timestamp;
- info->pixel_format = format.pixel_format;
- info->color_space = color_space;
- info->coded_size = format.frame_size;
- info->visible_rect = gfx::Rect(format.frame_size);
- info->metadata.frame_rate = format.frame_rate;
- info->metadata.reference_time = reference_time;
-
- buffer_pool_->HoldForConsumers(buffer_id, 1);
- buffer_pool_->RelinquishProducerReservation(buffer_id);
- receiver_->OnFrameReadyInBuffer(
- buffer_id, 0 /* frame_feedback_id */,
- std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
- buffer_pool_, buffer_id),
- std::move(info));
- }
+ // Construct the ready frame, to be passed on to the |receiver_| by the caller
+ // of this method.
+ mojom::VideoFrameInfoPtr info = mojom::VideoFrameInfo::New();
+ info->timestamp = timestamp;
+ info->pixel_format = buffer.format.pixel_format;
+ info->color_space = buffer.color_space;
+ info->coded_size = buffer.format.frame_size;
+ info->visible_rect = gfx::Rect(buffer.format.frame_size);
+ info->metadata.frame_rate = buffer.format.frame_rate;
+ info->metadata.reference_time = reference_time;
+
+ buffer_pool_->HoldForConsumers(buffer_id, 1);
+ buffer_pool_->RelinquishProducerReservation(buffer_id);
+
+ return ReadyFrameInBuffer(
+ buffer_id, 0 /* frame_feedback_id */,
+ std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
+ buffer_pool_, buffer_id),
+ std::move(info));
}
VideoCaptureDevice::Client::ReserveResult
@@ -614,10 +629,12 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
buffer_pool_->HoldForConsumers(buffer.id, 1);
receiver_->OnFrameReadyInBuffer(
- buffer.id, buffer.frame_feedback_id,
- std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
- buffer_pool_, buffer.id),
- std::move(info));
+ ReadyFrameInBuffer(
+ buffer.id, buffer.frame_feedback_id,
+ std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
+ buffer_pool_, buffer.id),
+ std::move(info)),
+ {});
}
void VideoCaptureDeviceClient::OnError(VideoCaptureError error,
diff --git a/chromium/media/capture/video/video_capture_device_client.h b/chromium/media/capture/video/video_capture_device_client.h
index 7e9d6eb82ff..07d39c57cb5 100644
--- a/chromium/media/capture/video/video_capture_device_client.h
+++ b/chromium/media/capture/video/video_capture_device_client.h
@@ -18,6 +18,7 @@
#include "media/capture/capture_export.h"
#include "media/capture/mojom/video_capture_types.mojom.h"
#include "media/capture/video/video_capture_device.h"
+#include "media/capture/video/video_frame_receiver.h"
namespace media {
class VideoCaptureBufferPool;
@@ -44,7 +45,7 @@ using VideoCaptureJpegDecoderFactoryCB =
class CAPTURE_EXPORT VideoCaptureDeviceClient
: public VideoCaptureDevice::Client {
public:
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
VideoCaptureDeviceClient(
VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
@@ -54,7 +55,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
VideoCaptureDeviceClient(VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
scoped_refptr<VideoCaptureBufferPool> buffer_pool);
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
~VideoCaptureDeviceClient() override;
static Buffer MakeBufferStruct(
@@ -81,9 +82,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
base::TimeDelta timestamp,
int frame_feedback_id = 0) override;
void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) override;
ReserveResult ReserveOutputBuffer(const gfx::Size& dimensions,
@@ -111,6 +111,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
double GetBufferPoolUtilization() const override;
private:
+ ReadyFrameInBuffer CreateReadyFrameFromExternalBuffer(
+ CapturedExternalVideoBuffer buffer,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp);
+
// A branch of OnIncomingCapturedData for Y16 frame_format.pixel_format.
void OnIncomingCapturedY16Data(const uint8_t* data,
int length,
@@ -125,11 +130,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
const std::unique_ptr<VideoFrameReceiver> receiver_;
std::vector<int> buffer_ids_known_by_receiver_;
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
VideoCaptureJpegDecoderFactoryCB optional_jpeg_decoder_factory_callback_;
std::unique_ptr<VideoCaptureJpegDecoder> external_jpeg_decoder_;
base::OnceClosure on_started_using_gpu_cb_;
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
// The pool of shared-memory buffers used for capturing.
const scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
diff --git a/chromium/media/capture/video/video_capture_device_client_unittest.cc b/chromium/media/capture/video/video_capture_device_client_unittest.cc
index e23ddf03e51..7933122fc55 100644
--- a/chromium/media/capture/video/video_capture_device_client_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_client_unittest.cc
@@ -22,9 +22,9 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/video_capture_jpeg_decoder.h"
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
using ::testing::_;
using ::testing::AtLeast;
@@ -38,11 +38,11 @@ namespace media {
namespace {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
std::unique_ptr<VideoCaptureJpegDecoder> ReturnNullPtrAsJpecDecoder() {
return nullptr;
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
} // namespace
@@ -61,7 +61,7 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
receiver_ = controller.get();
gpu_memory_buffer_manager_ =
std::make_unique<unittest_internal::MockGpuMemoryBufferManager>();
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
device_client_ = std::make_unique<VideoCaptureDeviceClient>(
VideoCaptureBufferType::kSharedMemory, std::move(controller),
buffer_pool, base::BindRepeating(&ReturnNullPtrAsJpecDecoder));
@@ -69,7 +69,7 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
device_client_ = std::make_unique<VideoCaptureDeviceClient>(
VideoCaptureBufferType::kSharedMemory, std::move(controller),
buffer_pool);
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
}
~VideoCaptureDeviceClientTest() override = default;
@@ -110,7 +110,7 @@ TEST_F(VideoCaptureDeviceClientTest, Minimal) {
std::unique_ptr<gfx::GpuMemoryBuffer> buffer =
gpu_memory_buffer_manager_->CreateFakeGpuMemoryBuffer(
kBufferDimensions, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
gpu::kNullSurfaceHandle);
{
InSequence s;
diff --git a/chromium/media/capture/video/video_capture_device_factory.cc b/chromium/media/capture/video/video_capture_device_factory.cc
index 7bcdbb65e3f..dc668051cf7 100644
--- a/chromium/media/capture/video/video_capture_device_factory.cc
+++ b/chromium/media/capture/video/video_capture_device_factory.cc
@@ -21,10 +21,4 @@ VideoCaptureDeviceFactory::VideoCaptureDeviceFactory() {
VideoCaptureDeviceFactory::~VideoCaptureDeviceFactory() = default;
-#if BUILDFLAG(IS_ASH)
-bool VideoCaptureDeviceFactory::IsSupportedCameraAppDeviceBridge() {
- return false;
-}
-#endif // BUILDFLAG(IS_ASH)
-
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_factory.h b/chromium/media/capture/video/video_capture_device_factory.h
index 8b653ef4750..565343b975e 100644
--- a/chromium/media/capture/video/video_capture_device_factory.h
+++ b/chromium/media/capture/video/video_capture_device_factory.h
@@ -44,10 +44,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactory {
std::vector<VideoCaptureDeviceInfo> devices_info)>;
virtual void GetDevicesInfo(GetDevicesInfoCallback callback) = 0;
-#if BUILDFLAG(IS_ASH)
- virtual bool IsSupportedCameraAppDeviceBridge();
-#endif // BUILDFLAG(IS_ASH)
-
protected:
base::ThreadChecker thread_checker_;
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index 563f687e126..ec301cbf976 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -49,10 +49,10 @@
#include "media/capture/video/android/video_capture_device_factory_android.h"
#endif
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "chromeos/dbus/power/power_manager_client.h"
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
@@ -66,9 +66,19 @@
DISABLED_UsingRealWebcam_AllocateBadSize
// We will always get YUYV from the Mac AVFoundation implementations.
#define MAYBE_UsingRealWebcam_CaptureMjpeg DISABLED_UsingRealWebcam_CaptureMjpeg
-#define MAYBE_UsingRealWebcam_TakePhoto UsingRealWebcam_TakePhoto
-#define MAYBE_UsingRealWebcam_GetPhotoState UsingRealWebcam_GetPhotoState
-#define MAYBE_UsingRealWebcam_CaptureWithSize UsingRealWebcam_CaptureWithSize
+
+// TODO(crbug.com/1128470): Re-enable as soon as issues with resource access
+// are fixed.
+#define MAYBE_UsingRealWebcam_TakePhoto DISABLED_UsingRealWebcam_TakePhoto
+// TODO(crbug.com/1128470): Re-enable as soon as issues with resource access
+// are fixed.
+#define MAYBE_UsingRealWebcam_GetPhotoState \
+ DISABLED_UsingRealWebcam_GetPhotoState
+// TODO(crbug.com/1128470): Re-enable as soon as issues with resource access
+// are fixed.
+#define MAYBE_UsingRealWebcam_CaptureWithSize \
+ DISABLED_UsingRealWebcam_CaptureWithSize
+
#define MAYBE_UsingRealWebcam_CheckPhotoCallbackRelease \
UsingRealWebcam_CheckPhotoCallbackRelease
#elif defined(OS_WIN) || defined(OS_FUCHSIA)
@@ -94,7 +104,7 @@
#define MAYBE_UsingRealWebcam_CaptureWithSize UsingRealWebcam_CaptureWithSize
#define MAYBE_UsingRealWebcam_CheckPhotoCallbackRelease \
UsingRealWebcam_CheckPhotoCallbackRelease
-#elif BUILDFLAG(IS_ASH)
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
#define MAYBE_UsingRealWebcam_AllocateBadSize \
DISABLED_UsingRealWebcam_AllocateBadSize
#define MAYBE_UsingRealWebcam_CaptureMjpeg UsingRealWebcam_CaptureMjpeg
@@ -107,7 +117,7 @@
DISABLED_UsingRealWebcam_CaptureWithSize
#define MAYBE_UsingRealWebcam_CheckPhotoCallbackRelease \
UsingRealWebcam_CheckPhotoCallbackRelease
-#elif defined(OS_LINUX)
+#elif defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
// UsingRealWebcam_AllocateBadSize will hang when a real camera is attached and
// if more than one test is trying to use the camera (even across processes). Do
// NOT renable this test without fixing the many bugs associated with it:
@@ -270,7 +280,7 @@ class VideoCaptureDeviceTest
main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
video_capture_client_(CreateDeviceClient()),
image_capture_client_(new MockImageCaptureClient()) {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
local_gpu_memory_buffer_manager_ =
std::make_unique<LocalGpuMemoryBufferManager>();
VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
@@ -289,7 +299,7 @@ class VideoCaptureDeviceTest
}
void SetUp() override {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
chromeos::PowerManagerClient::InitializeFake();
#endif
#if defined(OS_ANDROID)
@@ -304,7 +314,7 @@ class VideoCaptureDeviceTest
}
void TearDown() override {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
chromeos::PowerManagerClient::Shutdown();
#endif
}
@@ -459,7 +469,7 @@ class VideoCaptureDeviceTest
std::unique_ptr<MockVideoCaptureDeviceClient> video_capture_client_;
const scoped_refptr<MockImageCaptureClient> image_capture_client_;
VideoCaptureFormat last_format_;
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
std::unique_ptr<LocalGpuMemoryBufferManager> local_gpu_memory_buffer_manager_;
#endif
std::unique_ptr<VideoCaptureDeviceFactory> video_capture_device_factory_;
@@ -467,7 +477,7 @@ class VideoCaptureDeviceTest
// Causes a flaky crash on Chrome OS. https://crbug.com/1069608
// Cause hangs on Windows Debug. http://crbug.com/417824
-#if BUILDFLAG(IS_ASH) || (defined(OS_WIN) && !defined(NDEBUG))
+#if BUILDFLAG(IS_CHROMEOS_ASH) || (defined(OS_WIN) && !defined(NDEBUG))
#define MAYBE_OpenInvalidDevice DISABLED_OpenInvalidDevice
#else
#define MAYBE_OpenInvalidDevice OpenInvalidDevice
@@ -654,7 +664,7 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_UsingRealWebcam_CaptureMjpeg) {
base::Unretained(this)));
}
void VideoCaptureDeviceTest::RunCaptureMjpegTestCase() {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
if (media::ShouldUseCrosCameraService()) {
VLOG(1)
<< "Skipped on Chrome OS device where HAL v3 camera service is used";
@@ -695,7 +705,7 @@ void VideoCaptureDeviceTest::RunCaptureMjpegTestCase() {
}
// Flaky on ChromeOS. See https://crbug.com/1096082
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#define MAYBE_NoCameraSupportsPixelFormatMax \
DISABLED_NoCameraSupportsPixelFormatMax
#else
diff --git a/chromium/media/capture/video/video_capture_metrics.cc b/chromium/media/capture/video/video_capture_metrics.cc
new file mode 100644
index 00000000000..23e222bca44
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_metrics.cc
@@ -0,0 +1,180 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/video_capture_metrics.h"
+
+#include "base/containers/fixed_flat_map.h"
+#include "base/containers/flat_set.h"
+#include "base/containers/span.h"
+#include "base/logging.h"
+#include "base/metrics/histogram_functions.h"
+#include "base/metrics/histogram_macros.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace {
+
+// These resolutions are ones supported on a test webcam. Names given
+// where possible, from https://en.wikipedia.org/wiki/List_of_common_resolutions
+enum class VideoResolutionDesignation {
+ kUnknown = 0, // Catch-all for resolutions not understood.
+ // Video Graphics Array resolutions
+ kQQVGA = 1, // 160x120
+ kHQVGA = 2, // 240x160
+ kQVGA = 3, // 320x240
+ kWQVGA = 4, // 432x240
+ kHVGA = 5, // 480x320
+ kVGA = 6, // 640x480
+ kWVGA = 7, // 720x480
+ kWSVGA = 8, // 1024x576
+ kSVGA = 9, // 800x600
+
+ // Extended Graphics Array resolutions
+ kSXGA_MINUS = 10, // 1280x960
+ kUXGA = 11, // 1600x1200
+ kQXGA = 12, // 2048x1536
+
+ // Common Intermediate Format resolutions
+ kQCIF = 13, // 176x144
+ kCIF = 14, // 352x288
+
+ // High-definition resolutions.
+ kNHD = 15, // 640x360
+ kQHD = 16, // 960x540
+ kHD_FULLSCREEN = 17, // 960x720
+ kHD = 18, // 1280x720
+ kHD_PLUS = 19, // 1600x900
+ kFHD = 20, // 1920x1080
+ kWQHD = 21, // 2560x1440
+ kQHD_PLUS = 22, // 3200x1800
+ k4K_UHD = 23, // 3840x2160
+ kDCI_4K = 24, // 4096x2160
+ k5K = 25, // 5120x2880
+ k8K_UHD = 26, // 7680x4320
+
+ // Odd resolutions with no name
+ k160x90 = 27,
+ k320x176 = 28,
+ k320x180 = 29,
+ k480x270 = 30,
+ k544x288 = 31,
+ k752x416 = 32,
+ k864x480 = 33,
+ k800x448 = 34,
+ k960x544 = 35,
+ k1184x656 = 36,
+ k1392x768 = 37,
+ k1504x832 = 38,
+ k1600x896 = 39,
+ k1712x960 = 40,
+ k1792x1008 = 41,
+ k2592x1944 = 42,
+
+ kMaxValue = k2592x1944,
+};
+
+struct FrameSizeCompare {
+ // Return true iff lhs < rhs.
+ constexpr bool operator()(const gfx::Size& lhs, const gfx::Size& rhs) const {
+ return (lhs.height() < rhs.height() ||
+ (lhs.height() == rhs.height() && lhs.width() < rhs.width()));
+ }
+};
+
+constexpr auto kResolutions =
+ base::MakeFixedFlatMap<gfx::Size, VideoResolutionDesignation>(
+ {
+ {{160, 120}, VideoResolutionDesignation::kQQVGA},
+ {{240, 160}, VideoResolutionDesignation::kHQVGA},
+ {{320, 240}, VideoResolutionDesignation::kQVGA},
+ {{432, 240}, VideoResolutionDesignation::kWQVGA},
+ {{480, 320}, VideoResolutionDesignation::kHVGA},
+ {{640, 480}, VideoResolutionDesignation::kVGA},
+ {{720, 480}, VideoResolutionDesignation::kWVGA},
+ {{1024, 576}, VideoResolutionDesignation::kWSVGA},
+ {{800, 600}, VideoResolutionDesignation::kSVGA},
+ {{1280, 960}, VideoResolutionDesignation::kSXGA_MINUS},
+ {{1600, 1200}, VideoResolutionDesignation::kUXGA},
+ {{2048, 1536}, VideoResolutionDesignation::kQXGA},
+ {{176, 144}, VideoResolutionDesignation::kQCIF},
+ {{352, 288}, VideoResolutionDesignation::kCIF},
+ {{640, 360}, VideoResolutionDesignation::kNHD},
+ {{960, 540}, VideoResolutionDesignation::kQHD},
+ {{960, 720}, VideoResolutionDesignation::kHD_FULLSCREEN},
+ {{1280, 720}, VideoResolutionDesignation::kHD},
+ {{1600, 900}, VideoResolutionDesignation::kHD_PLUS},
+ {{1920, 1080}, VideoResolutionDesignation::kFHD},
+ {{2560, 1440}, VideoResolutionDesignation::kWQHD},
+ {{3200, 1800}, VideoResolutionDesignation::kQHD_PLUS},
+ {{3840, 2160}, VideoResolutionDesignation::k4K_UHD},
+ {{4096, 2160}, VideoResolutionDesignation::kDCI_4K},
+ {{5120, 2880}, VideoResolutionDesignation::k5K},
+ {{7680, 4320}, VideoResolutionDesignation::k8K_UHD},
+ {{160, 90}, VideoResolutionDesignation::k160x90},
+ {{320, 176}, VideoResolutionDesignation::k320x176},
+ {{320, 180}, VideoResolutionDesignation::k320x180},
+ {{480, 270}, VideoResolutionDesignation::k480x270},
+ {{544, 288}, VideoResolutionDesignation::k544x288},
+ {{752, 416}, VideoResolutionDesignation::k752x416},
+ {{864, 480}, VideoResolutionDesignation::k864x480},
+ {{800, 448}, VideoResolutionDesignation::k800x448},
+ {{960, 544}, VideoResolutionDesignation::k960x544},
+ {{1184, 656}, VideoResolutionDesignation::k1184x656},
+ {{1392, 768}, VideoResolutionDesignation::k1392x768},
+ {{1504, 832}, VideoResolutionDesignation::k1504x832},
+ {{1600, 896}, VideoResolutionDesignation::k1600x896},
+ {{1712, 960}, VideoResolutionDesignation::k1712x960},
+ {{1792, 1008}, VideoResolutionDesignation::k1792x1008},
+ {{2592, 1944}, VideoResolutionDesignation::k2592x1944},
+ },
+ FrameSizeCompare());
+
+static_assert(kResolutions.size() ==
+ static_cast<size_t>(VideoResolutionDesignation::kMaxValue),
+ "Each resolution must have one entry in kResolutions.");
+
+VideoResolutionDesignation ResolutionNameFromSize(gfx::Size frame_size) {
+ // Rotate such that we are always in landscape.
+ if (frame_size.width() < frame_size.height()) {
+ int tmp = frame_size.width();
+ frame_size.set_width(frame_size.height());
+ frame_size.set_width(tmp);
+ }
+ auto* it = kResolutions.find(frame_size);
+ return it != kResolutions.end() ? it->second
+ : VideoResolutionDesignation::kUnknown;
+}
+
+} // namespace
+
+namespace media {
+
+void LogCaptureDeviceMetrics(
+ base::span<const media::VideoCaptureDeviceInfo> devices_info) {
+ for (const auto& device : devices_info) {
+ base::flat_set<media::VideoPixelFormat> supported_pixel_formats;
+ base::flat_set<gfx::Size, FrameSizeCompare> resolutions;
+ for (const auto& format : device.supported_formats) {
+ VLOG(2) << "Device supports "
+ << media::VideoPixelFormatToString(format.pixel_format) << " at "
+ << format.frame_size.ToString() << " ("
+ << static_cast<int>(ResolutionNameFromSize(format.frame_size))
+ << ")";
+ media::VideoPixelFormat pixel_format = format.pixel_format;
+ bool inserted = supported_pixel_formats.insert(pixel_format).second;
+ if (inserted) {
+ base::UmaHistogramEnumeration(
+ "Media.VideoCapture.Device.SupportedPixelFormat", pixel_format,
+ media::VideoPixelFormat::PIXEL_FORMAT_MAX);
+ }
+ if (!resolutions.contains(format.frame_size)) {
+ resolutions.insert(format.frame_size);
+ base::UmaHistogramEnumeration(
+ "Media.VideoCapture.Device.SupportedResolution",
+ ResolutionNameFromSize(format.frame_size));
+ }
+ }
+ }
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/video_capture_metrics.h b/chromium/media/capture/video/video_capture_metrics.h
new file mode 100644
index 00000000000..f9a963c5a65
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_metrics.h
@@ -0,0 +1,19 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_METRICS_H_
+#define MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_METRICS_H_
+
+#include "base/containers/span.h"
+#include "media/capture/video/video_capture_device_info.h"
+
+namespace media {
+
+CAPTURE_EXPORT
+void LogCaptureDeviceMetrics(
+ base::span<const media::VideoCaptureDeviceInfo> devices_info);
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_METRICS_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/video_capture_metrics_unittest.cc b/chromium/media/capture/video/video_capture_metrics_unittest.cc
new file mode 100644
index 00000000000..d259f2098f2
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_metrics_unittest.cc
@@ -0,0 +1,59 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/video_capture_metrics.h"
+
+#include "base/test/metrics/histogram_tester.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+namespace test {
+
+TEST(VideoCaptureMetricsTest, TestLogCaptureDeviceMetrics) {
+ base::HistogramTester histogram_tester;
+ std::vector<media::VideoCaptureDeviceInfo> devices_info;
+ // First device
+ VideoCaptureDeviceInfo first_device;
+ first_device.supported_formats = {
+ // NV12 QQVGA at 30fps, 15fps
+ {{160, 120}, 30.0, media::PIXEL_FORMAT_NV12},
+ {{160, 120}, 15.0, media::PIXEL_FORMAT_NV12},
+ // NV12 VGA
+ {{640, 480}, 30.0, media::PIXEL_FORMAT_NV12},
+ // UYVY VGA
+ {{640, 480}, 30.0, media::PIXEL_FORMAT_UYVY},
+ // MJPEG 4K
+ {{3840, 2160}, 30.0, media::PIXEL_FORMAT_MJPEG},
+ // Odd resolution
+ {{844, 400}, 30.0, media::PIXEL_FORMAT_NV12},
+ // HD at unknown pixel format
+ {{1280, 720}, 30.0, media::PIXEL_FORMAT_UNKNOWN}};
+ devices_info.push_back(first_device);
+ VideoCaptureDeviceInfo second_device;
+ second_device.supported_formats = {
+ // UYVY VGA to test that we get 2 UYVY and 2 VGA in metrics.
+ {{640, 480}, 30.0, media::PIXEL_FORMAT_UYVY}};
+ devices_info.push_back(second_device);
+
+ LogCaptureDeviceMetrics(devices_info);
+
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Device.SupportedPixelFormat"),
+ testing::UnorderedElementsAre(
+ base::Bucket(media::PIXEL_FORMAT_NV12, 1),
+ base::Bucket(media::PIXEL_FORMAT_UYVY, 2),
+ base::Bucket(media::PIXEL_FORMAT_MJPEG, 1),
+ base::Bucket(media::PIXEL_FORMAT_UNKNOWN, 1)));
+
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Device.SupportedResolution"),
+ testing::UnorderedElementsAre(
+ base::Bucket(0 /*other*/, 1), base::Bucket(1 /*qqvga*/, 1),
+ base::Bucket(6 /*vga*/, 2), base::Bucket(23 /*4k_UHD*/, 1),
+ base::Bucket(18 /*hd*/, 1)));
+}
+
+} // namespace test
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/video_capture_system_impl.cc b/chromium/media/capture/video/video_capture_system_impl.cc
index 6f13af9ff0e..5ff37450c12 100644
--- a/chromium/media/capture/video/video_capture_system_impl.cc
+++ b/chromium/media/capture/video/video_capture_system_impl.cc
@@ -10,6 +10,7 @@
#include "base/callback_helpers.h"
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/capture/video/video_capture_metrics.h"
namespace {
@@ -111,6 +112,11 @@ void VideoCaptureSystemImpl::DevicesInfoReady(
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!device_enum_request_queue_.empty());
+ // Only save metrics the first time device infos are populated.
+ if (devices_info_cache_.empty()) {
+ LogCaptureDeviceMetrics(devices_info);
+ }
+
for (auto& device_info : devices_info) {
ConsolidateCaptureFormats(&device_info.supported_formats);
}
diff --git a/chromium/media/capture/video/video_frame_receiver.cc b/chromium/media/capture/video/video_frame_receiver.cc
new file mode 100644
index 00000000000..627143e4487
--- /dev/null
+++ b/chromium/media/capture/video/video_frame_receiver.cc
@@ -0,0 +1,36 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/video_frame_receiver.h"
+
+namespace media {
+
+ReadyFrameInBuffer::ReadyFrameInBuffer(
+ int buffer_id,
+ int frame_feedback_id,
+ std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
+ buffer_read_permission,
+ mojom::VideoFrameInfoPtr frame_info)
+ : buffer_id(buffer_id),
+ frame_feedback_id(frame_feedback_id),
+ buffer_read_permission(std::move(buffer_read_permission)),
+ frame_info(std::move(frame_info)) {}
+
+ReadyFrameInBuffer::ReadyFrameInBuffer(ReadyFrameInBuffer&& other)
+ : buffer_id(other.buffer_id),
+ frame_feedback_id(other.frame_feedback_id),
+ buffer_read_permission(std::move(other.buffer_read_permission)),
+ frame_info(std::move(other.frame_info)) {}
+
+ReadyFrameInBuffer::~ReadyFrameInBuffer() = default;
+
+ReadyFrameInBuffer& ReadyFrameInBuffer::operator=(ReadyFrameInBuffer&& other) {
+ buffer_id = other.buffer_id;
+ frame_feedback_id = other.frame_feedback_id;
+ buffer_read_permission = std::move(other.buffer_read_permission);
+ frame_info = std::move(other.frame_info);
+ return *this;
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/video_frame_receiver.h b/chromium/media/capture/video/video_frame_receiver.h
index e153aa99885..05794e7e715 100644
--- a/chromium/media/capture/video/video_frame_receiver.h
+++ b/chromium/media/capture/video/video_frame_receiver.h
@@ -12,6 +12,26 @@
namespace media {
+struct CAPTURE_EXPORT ReadyFrameInBuffer {
+ ReadyFrameInBuffer(
+ int buffer_id,
+ int frame_feedback_id,
+ std::unique_ptr<
+ VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
+ buffer_read_permission,
+ mojom::VideoFrameInfoPtr frame_info);
+ ReadyFrameInBuffer(ReadyFrameInBuffer&& other);
+ ~ReadyFrameInBuffer();
+
+ ReadyFrameInBuffer& operator=(ReadyFrameInBuffer&& other);
+
+ int buffer_id;
+ int frame_feedback_id;
+ std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
+ buffer_read_permission;
+ mojom::VideoFrameInfoPtr frame_info;
+};
+
// Callback interface for VideoCaptureDeviceClient to communicate with its
// clients. On some platforms, VideoCaptureDeviceClient calls these methods from
// OS or capture driver provided threads which do not have a task runner and
@@ -40,12 +60,8 @@ class CAPTURE_EXPORT VideoFrameReceiver {
// alive and unchanged until VideoFrameReceiver releases the given
// |buffer_read_permission|.
virtual void OnFrameReadyInBuffer(
- int buffer_id,
- int frame_feedback_id,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- buffer_read_permission,
- mojom::VideoFrameInfoPtr frame_info) = 0;
+ ReadyFrameInBuffer frame,
+ std::vector<ReadyFrameInBuffer> scaled_frames) = 0;
// Tells the VideoFrameReceiver that the producer is no longer going to use
// the buffer with id |buffer_id| for frame delivery. This may be called even
diff --git a/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc b/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
index d563ed5e2ac..db8990cf1c7 100644
--- a/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
+++ b/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
@@ -27,16 +27,12 @@ void VideoFrameReceiverOnTaskRunner::OnNewBuffer(
}
void VideoFrameReceiverOnTaskRunner::OnFrameReadyInBuffer(
- int buffer_id,
- int frame_feedback_id,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- buffer_read_permission,
- mojom::VideoFrameInfoPtr frame_info) {
+ ReadyFrameInBuffer frame,
+ std::vector<ReadyFrameInBuffer> scaled_frames) {
task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VideoFrameReceiver::OnFrameReadyInBuffer, receiver_,
- buffer_id, frame_feedback_id,
- std::move(buffer_read_permission), std::move(frame_info)));
+ std::move(frame), std::move(scaled_frames)));
}
void VideoFrameReceiverOnTaskRunner::OnBufferRetired(int buffer_id) {
diff --git a/chromium/media/capture/video/video_frame_receiver_on_task_runner.h b/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
index cb472218258..ff2d8113d3a 100644
--- a/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
+++ b/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
@@ -26,12 +26,8 @@ class CAPTURE_EXPORT VideoFrameReceiverOnTaskRunner
void OnNewBuffer(int32_t buffer_id,
media::mojom::VideoBufferHandlePtr buffer_handle) override;
void OnFrameReadyInBuffer(
- int buffer_id,
- int frame_feedback_id,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- buffer_read_permission,
- mojom::VideoFrameInfoPtr frame_info) override;
+ ReadyFrameInBuffer frame,
+ std::vector<ReadyFrameInBuffer> scaled_frames) override;
void OnBufferRetired(int buffer_id) override;
void OnError(VideoCaptureError error) override;
void OnFrameDropped(VideoCaptureFrameDropReason reason) override;
diff --git a/chromium/media/capture/video/win/OWNERS b/chromium/media/capture/video/win/OWNERS
new file mode 100644
index 00000000000..1829bc129d7
--- /dev/null
+++ b/chromium/media/capture/video/win/OWNERS
@@ -0,0 +1,6 @@
+ilnik@chromium.org
+
+# Original (legacy) owners.
+chfremer@chromium.org
+emircan@chromium.org
+mcasas@chromium.org
diff --git a/chromium/media/capture/video/win/d3d_capture_test_utils.cc b/chromium/media/capture/video/win/d3d_capture_test_utils.cc
new file mode 100644
index 00000000000..6f82581cc79
--- /dev/null
+++ b/chromium/media/capture/video/win/d3d_capture_test_utils.cc
@@ -0,0 +1,918 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/win/d3d_capture_test_utils.h"
+
+namespace media {
+
+MockD3D11DeviceContext::MockD3D11DeviceContext() = default;
+MockD3D11DeviceContext::~MockD3D11DeviceContext() = default;
+
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers_out) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views_out) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSSetShader(ID3D11PixelShader* pixel_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers_out) {
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSSetShader(ID3D11VertexShader* vertex_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawIndexed(UINT index_count,
+ UINT start_index_location,
+ INT base_vertex_location) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::Draw(UINT vertex_count, UINT start_vertex_location) {}
+IFACEMETHODIMP MockD3D11DeviceContext::Map(
+ ID3D11Resource* resource,
+ UINT subresource,
+ D3D11_MAP MapType,
+ UINT MapFlags,
+ D3D11_MAPPED_SUBRESOURCE* mapped_resource) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::Unmap(ID3D11Resource* resource, UINT subresource) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IASetInputLayout(ID3D11InputLayout* input_layout) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IASetVertexBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* vertex_buffers,
+ const UINT* strides,
+ const UINT* offsets) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IASetIndexBuffer(ID3D11Buffer* index_buffer,
+ DXGI_FORMAT format,
+ UINT offset) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawIndexedInstanced(UINT index_count_per_instance,
+ UINT instance_count,
+ UINT start_index_location,
+ INT base_vertex_location,
+ UINT start_instance_location) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawInstanced(UINT vertex_count_per_instance,
+ UINT instance_count,
+ UINT start_vertex_location,
+ UINT start_instance_location) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSSetShader(ID3D11GeometryShader* shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IASetPrimitiveTopology(
+ D3D11_PRIMITIVE_TOPOLOGY topology) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::Begin(ID3D11Asynchronous* async) {}
+IFACEMETHODIMP_(void) MockD3D11DeviceContext::End(ID3D11Asynchronous* async) {}
+IFACEMETHODIMP MockD3D11DeviceContext::GetData(ID3D11Asynchronous* async,
+ void* data,
+ UINT data_size,
+ UINT get_data_flags) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::SetPredication(ID3D11Predicate* pPredicate,
+ BOOL PredicateValue) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMSetRenderTargets(
+ UINT num_views,
+ ID3D11RenderTargetView* const* render_target_views,
+ ID3D11DepthStencilView* depth_stencil_view) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMSetRenderTargetsAndUnorderedAccessViews(
+ UINT num_rtvs,
+ ID3D11RenderTargetView* const* render_target_views,
+ ID3D11DepthStencilView* depth_stencil_view,
+ UINT uav_start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView* const* unordered_access_views,
+ const UINT* uav_initial_counts) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMSetBlendState(ID3D11BlendState* blend_state,
+ const FLOAT blend_factor[4],
+ UINT sample_mask) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMSetDepthStencilState(
+ ID3D11DepthStencilState* depth_stencil_state,
+ UINT stencil_ref) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::SOSetTargets(UINT num_buffers,
+ ID3D11Buffer* const* so_targets,
+ const UINT* offsets) {}
+IFACEMETHODIMP_(void) MockD3D11DeviceContext::DrawAuto() {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawIndexedInstancedIndirect(
+ ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawInstancedIndirect(
+ ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::Dispatch(UINT thread_group_count_x,
+ UINT thread_group_count_y,
+ UINT thread_group_count_z) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DispatchIndirect(ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSSetState(ID3D11RasterizerState* rasterizer_state) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSSetViewports(UINT num_viewports,
+ const D3D11_VIEWPORT* viewports) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSSetScissorRects(UINT num_rects,
+ const D3D11_RECT* rects) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CopySubresourceRegion(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ UINT dest_x,
+ UINT dest_y,
+ UINT dest_z,
+ ID3D11Resource* source_resource,
+ UINT source_subresource,
+ const D3D11_BOX* source_box) {
+ OnCopySubresourceRegion(dest_resource, dest_subresource, dest_x, dest_y,
+ dest_z, source_resource, source_subresource,
+ source_box);
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CopyResource(ID3D11Resource* dest_resource,
+ ID3D11Resource* source_resource) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::UpdateSubresource(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ const D3D11_BOX* dest_box,
+ const void* source_data,
+ UINT source_row_pitch,
+ UINT source_depth_pitch) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CopyStructureCount(
+ ID3D11Buffer* dest_buffer,
+ UINT dest_aligned_byte_offset,
+ ID3D11UnorderedAccessView* source_view) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ClearRenderTargetView(
+ ID3D11RenderTargetView* render_target_view,
+ const FLOAT color_rgba[4]) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ClearUnorderedAccessViewUint(
+ ID3D11UnorderedAccessView* unordered_access_view,
+ const UINT values[4]) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ClearUnorderedAccessViewFloat(
+ ID3D11UnorderedAccessView* unordered_access_view,
+ const FLOAT values[4]) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ClearDepthStencilView(
+ ID3D11DepthStencilView* depth_stencil_view,
+ UINT clear_flags,
+ FLOAT depth,
+ UINT8 stencil) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GenerateMips(
+ ID3D11ShaderResourceView* shader_resource_view) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::SetResourceMinLOD(ID3D11Resource* resource,
+ FLOAT min_lod) {}
+IFACEMETHODIMP_(FLOAT)
+MockD3D11DeviceContext::GetResourceMinLOD(ID3D11Resource* resource) {
+ return 0;
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ResolveSubresource(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ ID3D11Resource* source_resource,
+ UINT source_subresource,
+ DXGI_FORMAT format) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ExecuteCommandList(ID3D11CommandList* command_list,
+ BOOL restore_context_state) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSSetShader(ID3D11HullShader* hull_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSSetShader(ID3D11DomainShader* domain_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetUnorderedAccessViews(
+ UINT start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView* const* unordered_access_views,
+ const UINT* uav_initial_counts) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetShader(ID3D11ComputeShader* computer_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSGetShader(ID3D11PixelShader** pixel_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSGetShader(ID3D11VertexShader** vertex_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IAGetInputLayout(ID3D11InputLayout** input_layout) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IAGetVertexBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** vertex_buffers,
+ UINT* strides,
+ UINT* offsets) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IAGetIndexBuffer(ID3D11Buffer** index_buffer,
+ DXGI_FORMAT* format,
+ UINT* offset) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSGetShader(ID3D11GeometryShader** geometry_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IAGetPrimitiveTopology(
+ D3D11_PRIMITIVE_TOPOLOGY* topology) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GetPredication(ID3D11Predicate** predicate,
+ BOOL* predicate_value) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMGetRenderTargets(
+ UINT num_views,
+ ID3D11RenderTargetView** render_target_views,
+ ID3D11DepthStencilView** depth_stencil_view) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMGetRenderTargetsAndUnorderedAccessViews(
+ UINT num_rtvs,
+ ID3D11RenderTargetView** render_target_views,
+ ID3D11DepthStencilView** depth_stencil_view,
+ UINT uav_start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView** unordered_access_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMGetBlendState(ID3D11BlendState** blend_state,
+ FLOAT blend_factor[4],
+ UINT* sample_mask) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMGetDepthStencilState(
+ ID3D11DepthStencilState** depth_stencil_state,
+ UINT* stencil_ref) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::SOGetTargets(UINT num_buffers,
+ ID3D11Buffer** so_targets) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSGetState(ID3D11RasterizerState** rasterizer_state) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSGetViewports(UINT* num_viewports,
+ D3D11_VIEWPORT* viewports) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSGetScissorRects(UINT* num_rects, D3D11_RECT* rects) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSGetShader(ID3D11HullShader** hull_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSGetShader(ID3D11DomainShader** domain_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetUnorderedAccessViews(
+ UINT start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView** unordered_access_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetShader(ID3D11ComputeShader** pcomputer_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void) MockD3D11DeviceContext::ClearState() {}
+IFACEMETHODIMP_(void) MockD3D11DeviceContext::Flush() {}
+IFACEMETHODIMP_(D3D11_DEVICE_CONTEXT_TYPE) MockD3D11DeviceContext::GetType() {
+ return D3D11_DEVICE_CONTEXT_IMMEDIATE;
+}
+IFACEMETHODIMP_(UINT) MockD3D11DeviceContext::GetContextFlags() {
+ return 0;
+}
+IFACEMETHODIMP MockD3D11DeviceContext::FinishCommandList(
+ BOOL restore_deferred_context_state,
+ ID3D11CommandList** command_list) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GetDevice(ID3D11Device** device) {}
+IFACEMETHODIMP MockD3D11DeviceContext::GetPrivateData(REFGUID guid,
+ UINT* data_size,
+ void* data) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP MockD3D11DeviceContext::SetPrivateData(REFGUID guid,
+ UINT data_size,
+ const void* data) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP MockD3D11DeviceContext::SetPrivateDataInterface(
+ REFGUID guid,
+ const IUnknown* data) {
+ return E_NOTIMPL;
+}
+
+MockD3D11Device::MockD3D11Device()
+ : mock_immediate_context_(new MockD3D11DeviceContext()) {}
+MockD3D11Device::~MockD3D11Device() {}
+
+IFACEMETHODIMP MockD3D11Device::CreateBuffer(
+ const D3D11_BUFFER_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Buffer** ppBuffer) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateTexture1D(
+ const D3D11_TEXTURE1D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture1D** texture1D) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateTexture2D(
+ const D3D11_TEXTURE2D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture2D** texture2D) {
+ OnCreateTexture2D(desc, initial_data, texture2D);
+ Microsoft::WRL::ComPtr<MockD3D11Texture2D> mock_texture(
+ new MockD3D11Texture2D());
+ return mock_texture.CopyTo(IID_PPV_ARGS(texture2D));
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateTexture3D(
+ const D3D11_TEXTURE3D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture3D** texture2D) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateShaderResourceView(
+ ID3D11Resource* resource,
+ const D3D11_SHADER_RESOURCE_VIEW_DESC* desc,
+ ID3D11ShaderResourceView** srv) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateUnorderedAccessView(
+ ID3D11Resource* resource,
+ const D3D11_UNORDERED_ACCESS_VIEW_DESC* desc,
+ ID3D11UnorderedAccessView** uaview) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateRenderTargetView(
+ ID3D11Resource* resource,
+ const D3D11_RENDER_TARGET_VIEW_DESC* desc,
+ ID3D11RenderTargetView** rtv) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDepthStencilView(
+ ID3D11Resource* resource,
+ const D3D11_DEPTH_STENCIL_VIEW_DESC* desc,
+ ID3D11DepthStencilView** depth_stencil_view) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateInputLayout(
+ const D3D11_INPUT_ELEMENT_DESC* input_element_descs,
+ UINT num_elements,
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11InputLayout** input_layout) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateVertexShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11VertexShader** vertex_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateGeometryShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11GeometryShader** geometry_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateGeometryShaderWithStreamOutput(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ const D3D11_SO_DECLARATION_ENTRY* so_declaration,
+ UINT num_entries,
+ const UINT* buffer_strides,
+ UINT num_strides,
+ UINT rasterized_stream,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11GeometryShader** geometry_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreatePixelShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11PixelShader** pixel_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateHullShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11HullShader** hull_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDomainShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11DomainShader** domain_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateComputeShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11ComputeShader** compute_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateClassLinkage(
+ ID3D11ClassLinkage** linkage) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateBlendState(
+ const D3D11_BLEND_DESC* blend_state_desc,
+ ID3D11BlendState** blend_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDepthStencilState(
+ const D3D11_DEPTH_STENCIL_DESC* depth_stencil_desc,
+ ID3D11DepthStencilState** depth_stencil_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateRasterizerState(
+ const D3D11_RASTERIZER_DESC* rasterizer_desc,
+ ID3D11RasterizerState** rasterizer_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateSamplerState(
+ const D3D11_SAMPLER_DESC* sampler_desc,
+ ID3D11SamplerState** sampler_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateQuery(const D3D11_QUERY_DESC* query_desc,
+ ID3D11Query** query) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreatePredicate(
+ const D3D11_QUERY_DESC* predicate_desc,
+ ID3D11Predicate** predicate) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateCounter(
+ const D3D11_COUNTER_DESC* counter_desc,
+ ID3D11Counter** counter) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDeferredContext(
+ UINT context_flags,
+ ID3D11DeviceContext** deferred_context) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::OpenSharedResource(HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out) {
+ return DoOpenSharedResource1(resource, returned_interface, resource_out);
+}
+
+IFACEMETHODIMP MockD3D11Device::CheckFormatSupport(DXGI_FORMAT format,
+ UINT* format_support) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CheckMultisampleQualityLevels(
+ DXGI_FORMAT format,
+ UINT sample_count,
+ UINT* num_quality_levels) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP_(void)
+MockD3D11Device::CheckCounterInfo(D3D11_COUNTER_INFO* counter_info) {}
+
+IFACEMETHODIMP MockD3D11Device::CheckCounter(const D3D11_COUNTER_DESC* desc,
+ D3D11_COUNTER_TYPE* type,
+ UINT* active_counters,
+ LPSTR name,
+ UINT* name_length,
+ LPSTR units,
+ UINT* units_length,
+ LPSTR description,
+ UINT* description_length) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CheckFeatureSupport(
+ D3D11_FEATURE feature,
+ void* feature_support_data,
+ UINT feature_support_data_size) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::GetPrivateData(REFGUID guid,
+ UINT* data_size,
+ void* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::SetPrivateData(REFGUID guid,
+ UINT data_size,
+ const void* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::SetPrivateDataInterface(REFGUID guid,
+ const IUnknown* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP_(D3D_FEATURE_LEVEL) MockD3D11Device::GetFeatureLevel() {
+ return D3D_FEATURE_LEVEL_11_1;
+}
+
+IFACEMETHODIMP_(UINT) MockD3D11Device::GetCreationFlags() {
+ return 0;
+}
+
+IFACEMETHODIMP MockD3D11Device::GetDeviceRemovedReason() {
+ return OnGetDeviceRemovedReason();
+}
+
+IFACEMETHODIMP_(void)
+MockD3D11Device::GetImmediateContext(ID3D11DeviceContext** immediate_context) {
+ mock_immediate_context_.CopyTo(immediate_context);
+}
+
+IFACEMETHODIMP MockD3D11Device::SetExceptionMode(UINT raise_flags) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP_(UINT) MockD3D11Device::GetExceptionMode() {
+ return 0;
+}
+
+IFACEMETHODIMP_(void)
+MockD3D11Device::GetImmediateContext1(
+ ID3D11DeviceContext1** immediate_context) {}
+
+IFACEMETHODIMP MockD3D11Device::CreateDeferredContext1(
+ UINT context_flags,
+ ID3D11DeviceContext1** deferred_context) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateBlendState1(
+ const D3D11_BLEND_DESC1* blend_state_desc,
+ ID3D11BlendState1** blend_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateRasterizerState1(
+ const D3D11_RASTERIZER_DESC1* rasterizer_desc,
+ ID3D11RasterizerState1** rasterizer_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDeviceContextState(
+ UINT flags,
+ const D3D_FEATURE_LEVEL* feature_levels,
+ UINT feature_level_count,
+ UINT sdk_version,
+ REFIID emulated_interface,
+ D3D_FEATURE_LEVEL* chosen_feature_level,
+ ID3DDeviceContextState** context_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::OpenSharedResource1(HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out) {
+ return DoOpenSharedResource1(resource, returned_interface, resource_out);
+}
+
+IFACEMETHODIMP MockD3D11Device::OpenSharedResourceByName(
+ LPCWSTR name,
+ DWORD desired_access,
+ REFIID returned_interface,
+ void** resource_out) {
+ return E_NOTIMPL;
+}
+
+// Setup default actions for mocked methods
+void MockD3D11Device::SetupDefaultMocks() {
+ ON_CALL(*this, OnGetDeviceRemovedReason).WillByDefault([]() { return S_OK; });
+ ON_CALL(*this, DoOpenSharedResource1)
+ .WillByDefault([](HANDLE, REFIID, void**) { return E_NOTIMPL; });
+}
+
+IFACEMETHODIMP MockDXGIResource::CreateSubresourceSurface(
+ UINT index,
+ IDXGISurface2** surface) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::CreateSharedHandle(
+ const SECURITY_ATTRIBUTES* attributes,
+ DWORD access,
+ LPCWSTR name,
+ HANDLE* handle) {
+ // Need to provide a real handle to client, so create an event handle
+ *handle = CreateEvent(nullptr, FALSE, FALSE, nullptr);
+ return S_OK;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetSharedHandle(HANDLE* shared_handle) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetUsage(DXGI_USAGE* usage) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::SetEvictionPriority(UINT eviction_priority) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetEvictionPriority(UINT* eviction_priority) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetDevice(REFIID riid, void** device) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::SetPrivateData(REFGUID name,
+ UINT data_size,
+ const void* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::SetPrivateDataInterface(
+ REFGUID name,
+ const IUnknown* unknown) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetPrivateData(REFGUID name,
+ UINT* data_size,
+ void* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetParent(REFIID riid, void** parent) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::AcquireSync(UINT64 key, DWORD milliseconds) {
+ return S_OK;
+}
+IFACEMETHODIMP MockDXGIResource::ReleaseSync(UINT64 key) {
+ return S_OK;
+}
+
+MockDXGIResource::~MockDXGIResource() {}
+
+MockD3D11Texture2D::MockD3D11Texture2D(D3D11_TEXTURE2D_DESC desc,
+ ID3D11Device* device)
+ : desc_(desc), device_(device) {}
+MockD3D11Texture2D::MockD3D11Texture2D() {}
+
+IFACEMETHODIMP MockD3D11Texture2D::QueryInterface(REFIID riid, void** object) {
+ if (riid == __uuidof(IDXGIResource1) || riid == __uuidof(IDXGIKeyedMutex)) {
+ if (!mock_resource_) {
+ mock_resource_ = new MockDXGIResource();
+ }
+ return mock_resource_.CopyTo(riid, object);
+ }
+ return MockInterface::QueryInterface(riid, object);
+}
+
+IFACEMETHODIMP_(void) MockD3D11Texture2D::GetDesc(D3D11_TEXTURE2D_DESC* desc) {
+ *desc = desc_;
+}
+IFACEMETHODIMP_(void)
+MockD3D11Texture2D::GetType(D3D11_RESOURCE_DIMENSION* resource_dimension) {}
+IFACEMETHODIMP_(void)
+MockD3D11Texture2D::SetEvictionPriority(UINT eviction_priority) {}
+IFACEMETHODIMP_(UINT) MockD3D11Texture2D::GetEvictionPriority() {
+ return 0;
+}
+IFACEMETHODIMP_(void) MockD3D11Texture2D::GetDevice(ID3D11Device** device) {
+ if (device_) {
+ device_.CopyTo(device);
+ }
+}
+IFACEMETHODIMP MockD3D11Texture2D::GetPrivateData(REFGUID guid,
+ UINT* data_size,
+ void* data) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP MockD3D11Texture2D::SetPrivateData(REFGUID guid,
+ UINT data_size,
+ const void* data) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP MockD3D11Texture2D::SetPrivateDataInterface(
+ REFGUID guid,
+ const IUnknown* data) {
+ return E_NOTIMPL;
+}
+MockD3D11Texture2D::~MockD3D11Texture2D() {}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/d3d_capture_test_utils.h b/chromium/media/capture/video/win/d3d_capture_test_utils.h
new file mode 100644
index 00000000000..c9dc9198086
--- /dev/null
+++ b/chromium/media/capture/video/win/d3d_capture_test_utils.h
@@ -0,0 +1,731 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_WIN_D3D_CAPTURE_TEST_UTILS_H_
+#define MEDIA_CAPTURE_VIDEO_WIN_D3D_CAPTURE_TEST_UTILS_H_
+
+#include <d3d11_4.h>
+#include <wrl.h>
+#include "base/memory/ref_counted.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+template <class... Interface>
+class MockInterface
+ : public base::RefCountedThreadSafe<MockInterface<Interface...>> {
+ public:
+ // IUnknown
+ IFACEMETHODIMP QueryInterface(REFIID riid, void** object) {
+ if (riid == __uuidof(IUnknown)) {
+ this->AddRef();
+ *object = this;
+ return S_OK;
+ }
+ return E_NOINTERFACE;
+ }
+ IFACEMETHODIMP_(ULONG) AddRef() {
+ base::RefCountedThreadSafe<MockInterface<Interface...>>::AddRef();
+ return 1U;
+ }
+ IFACEMETHODIMP_(ULONG) Release() {
+ base::RefCountedThreadSafe<MockInterface<Interface...>>::Release();
+ return 1U;
+ }
+
+ protected:
+ friend class base::RefCountedThreadSafe<MockInterface<Interface...>>;
+ virtual ~MockInterface() = default;
+};
+
+template <class Interface, class... Interfaces>
+class MockInterface<Interface, Interfaces...>
+ : public MockInterface<Interfaces...>, public Interface {
+ public:
+ IFACEMETHODIMP QueryInterface(REFIID riid, void** object) override {
+ if (riid == __uuidof(Interface)) {
+ this->AddRef();
+ *object = static_cast<Interface*>(this);
+ return S_OK;
+ }
+ return MockInterface<Interfaces...>::QueryInterface(riid, object);
+ }
+
+ IFACEMETHODIMP_(ULONG) AddRef() override {
+ return MockInterface<Interfaces...>::AddRef();
+ }
+
+ IFACEMETHODIMP_(ULONG) Release() override {
+ return MockInterface<Interfaces...>::Release();
+ }
+};
+
+class MockD3D11DeviceContext final : public MockInterface<ID3D11DeviceContext> {
+ public:
+ MockD3D11DeviceContext();
+
+ // ID3D11DeviceContext
+ IFACEMETHODIMP_(void)
+ VSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers_out) override;
+ IFACEMETHODIMP_(void)
+ PSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views_out) override;
+ IFACEMETHODIMP_(void)
+ PSSetShader(ID3D11PixelShader* pixel_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ PSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers_out) override;
+ IFACEMETHODIMP_(void)
+ VSSetShader(ID3D11VertexShader* vertex_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ DrawIndexed(UINT index_count,
+ UINT start_index_location,
+ INT base_vertex_location) override;
+ IFACEMETHODIMP_(void)
+ Draw(UINT vertex_count, UINT start_vertex_location) override;
+ IFACEMETHODIMP Map(ID3D11Resource* resource,
+ UINT subresource,
+ D3D11_MAP MapType,
+ UINT MapFlags,
+ D3D11_MAPPED_SUBRESOURCE* mapped_resource) override;
+ IFACEMETHODIMP_(void)
+ Unmap(ID3D11Resource* resource, UINT subresource) override;
+ IFACEMETHODIMP_(void)
+ PSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ IASetInputLayout(ID3D11InputLayout* input_layout) override;
+ IFACEMETHODIMP_(void)
+ IASetVertexBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* vertex_buffers,
+ const UINT* strides,
+ const UINT* offsets) override;
+ IFACEMETHODIMP_(void)
+ IASetIndexBuffer(ID3D11Buffer* index_buffer,
+ DXGI_FORMAT format,
+ UINT offset) override;
+ IFACEMETHODIMP_(void)
+ DrawIndexedInstanced(UINT index_count_per_instance,
+ UINT instance_count,
+ UINT start_index_location,
+ INT base_vertex_location,
+ UINT start_instance_location) override;
+ IFACEMETHODIMP_(void)
+ DrawInstanced(UINT vertex_count_per_instance,
+ UINT instance_count,
+ UINT start_vertex_location,
+ UINT start_instance_location) override;
+ IFACEMETHODIMP_(void)
+ GSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ GSSetShader(ID3D11GeometryShader* shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY topology) override;
+ IFACEMETHODIMP_(void)
+ VSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ VSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void) Begin(ID3D11Asynchronous* async) override;
+ IFACEMETHODIMP_(void) End(ID3D11Asynchronous* async) override;
+ IFACEMETHODIMP GetData(ID3D11Asynchronous* async,
+ void* data,
+ UINT data_size,
+ UINT get_data_flags) override;
+ IFACEMETHODIMP_(void)
+ SetPredication(ID3D11Predicate* pPredicate, BOOL PredicateValue) override;
+ IFACEMETHODIMP_(void)
+ GSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ GSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void)
+ OMSetRenderTargets(UINT num_views,
+ ID3D11RenderTargetView* const* render_target_views,
+ ID3D11DepthStencilView* depth_stencil_view) override;
+ IFACEMETHODIMP_(void)
+ OMSetRenderTargetsAndUnorderedAccessViews(
+ UINT num_rtvs,
+ ID3D11RenderTargetView* const* render_target_views,
+ ID3D11DepthStencilView* depth_stencil_view,
+ UINT uav_start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView* const* unordered_access_views,
+ const UINT* uav_initial_counts) override;
+ IFACEMETHODIMP_(void)
+ OMSetBlendState(ID3D11BlendState* blend_state,
+ const FLOAT blend_factor[4],
+ UINT sample_mask) override;
+ IFACEMETHODIMP_(void)
+ OMSetDepthStencilState(ID3D11DepthStencilState* depth_stencil_state,
+ UINT stencil_ref) override;
+ IFACEMETHODIMP_(void)
+ SOSetTargets(UINT num_buffers,
+ ID3D11Buffer* const* so_targets,
+ const UINT* offsets) override;
+ IFACEMETHODIMP_(void) DrawAuto() override;
+ IFACEMETHODIMP_(void)
+ DrawIndexedInstancedIndirect(ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) override;
+ IFACEMETHODIMP_(void)
+ DrawInstancedIndirect(ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) override;
+ IFACEMETHODIMP_(void)
+ Dispatch(UINT thread_group_count_x,
+ UINT thread_group_count_y,
+ UINT thread_group_count_z) override;
+ IFACEMETHODIMP_(void)
+ DispatchIndirect(ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) override;
+ IFACEMETHODIMP_(void)
+ RSSetState(ID3D11RasterizerState* rasterizer_state) override;
+ IFACEMETHODIMP_(void)
+ RSSetViewports(UINT num_viewports, const D3D11_VIEWPORT* viewports) override;
+ IFACEMETHODIMP_(void)
+ RSSetScissorRects(UINT num_rects, const D3D11_RECT* rects) override;
+ IFACEMETHODIMP_(void)
+ CopySubresourceRegion(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ UINT dest_x,
+ UINT dest_y,
+ UINT dest_z,
+ ID3D11Resource* source_resource,
+ UINT source_subresource,
+ const D3D11_BOX* source_box) override;
+ MOCK_METHOD8(OnCopySubresourceRegion,
+ void(ID3D11Resource*,
+ UINT,
+ UINT,
+ UINT,
+ UINT,
+ ID3D11Resource*,
+ UINT,
+ const D3D11_BOX*));
+ IFACEMETHODIMP_(void)
+ CopyResource(ID3D11Resource* dest_resource,
+ ID3D11Resource* source_resource) override;
+ IFACEMETHODIMP_(void)
+ UpdateSubresource(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ const D3D11_BOX* dest_box,
+ const void* source_data,
+ UINT source_row_pitch,
+ UINT source_depth_pitch) override;
+ IFACEMETHODIMP_(void)
+ CopyStructureCount(ID3D11Buffer* dest_buffer,
+ UINT dest_aligned_byte_offset,
+ ID3D11UnorderedAccessView* source_view) override;
+ IFACEMETHODIMP_(void)
+ ClearRenderTargetView(ID3D11RenderTargetView* render_target_view,
+ const FLOAT color_rgba[4]) override;
+ IFACEMETHODIMP_(void)
+ ClearUnorderedAccessViewUint(ID3D11UnorderedAccessView* unordered_access_view,
+ const UINT values[4]) override;
+ IFACEMETHODIMP_(void)
+ ClearUnorderedAccessViewFloat(
+ ID3D11UnorderedAccessView* unordered_access_view,
+ const FLOAT values[4]) override;
+ IFACEMETHODIMP_(void)
+ ClearDepthStencilView(ID3D11DepthStencilView* depth_stencil_view,
+ UINT clear_flags,
+ FLOAT depth,
+ UINT8 stencil) override;
+ IFACEMETHODIMP_(void)
+ GenerateMips(ID3D11ShaderResourceView* shader_resource_view) override;
+ IFACEMETHODIMP_(void)
+ SetResourceMinLOD(ID3D11Resource* resource, FLOAT min_lod) override;
+ IFACEMETHODIMP_(FLOAT) GetResourceMinLOD(ID3D11Resource* resource) override;
+ IFACEMETHODIMP_(void)
+ ResolveSubresource(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ ID3D11Resource* source_resource,
+ UINT source_subresource,
+ DXGI_FORMAT format) override;
+ IFACEMETHODIMP_(void)
+ ExecuteCommandList(ID3D11CommandList* command_list,
+ BOOL restore_context_state) override;
+ IFACEMETHODIMP_(void)
+ HSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ HSSetShader(ID3D11HullShader* hull_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ HSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void)
+ HSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ DSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ DSSetShader(ID3D11DomainShader* domain_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ DSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void)
+ DSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ CSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ CSSetUnorderedAccessViews(
+ UINT start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView* const* unordered_access_views,
+ const UINT* uav_initial_counts) override;
+ IFACEMETHODIMP_(void)
+ CSSetShader(ID3D11ComputeShader* computer_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ CSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void)
+ CSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ VSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ PSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ PSGetShader(ID3D11PixelShader** pixel_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ PSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ VSGetShader(ID3D11VertexShader** vertex_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ PSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ IAGetInputLayout(ID3D11InputLayout** input_layout) override;
+ IFACEMETHODIMP_(void)
+ IAGetVertexBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** vertex_buffers,
+ UINT* strides,
+ UINT* offsets) override;
+ IFACEMETHODIMP_(void)
+ IAGetIndexBuffer(ID3D11Buffer** index_buffer,
+ DXGI_FORMAT* format,
+ UINT* offset) override;
+ IFACEMETHODIMP_(void)
+ GSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ GSGetShader(ID3D11GeometryShader** geometry_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ IAGetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY* topology) override;
+ IFACEMETHODIMP_(void)
+ VSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ VSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ GetPredication(ID3D11Predicate** predicate, BOOL* predicate_value) override;
+ IFACEMETHODIMP_(void)
+ GSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ GSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ OMGetRenderTargets(UINT num_views,
+ ID3D11RenderTargetView** render_target_views,
+ ID3D11DepthStencilView** depth_stencil_view) override;
+ IFACEMETHODIMP_(void)
+ OMGetRenderTargetsAndUnorderedAccessViews(
+ UINT num_rtvs,
+ ID3D11RenderTargetView** render_target_views,
+ ID3D11DepthStencilView** depth_stencil_view,
+ UINT uav_start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView** unordered_access_views) override;
+ IFACEMETHODIMP_(void)
+ OMGetBlendState(ID3D11BlendState** blend_state,
+ FLOAT blend_factor[4],
+ UINT* sample_mask) override;
+ IFACEMETHODIMP_(void)
+ OMGetDepthStencilState(ID3D11DepthStencilState** depth_stencil_state,
+ UINT* stencil_ref) override;
+ IFACEMETHODIMP_(void)
+ SOGetTargets(UINT num_buffers, ID3D11Buffer** so_targets) override;
+ IFACEMETHODIMP_(void)
+ RSGetState(ID3D11RasterizerState** rasterizer_state) override;
+ IFACEMETHODIMP_(void)
+ RSGetViewports(UINT* num_viewports, D3D11_VIEWPORT* viewports) override;
+ IFACEMETHODIMP_(void)
+ RSGetScissorRects(UINT* num_rects, D3D11_RECT* rects) override;
+ IFACEMETHODIMP_(void)
+ HSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ HSGetShader(ID3D11HullShader** hull_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ HSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ HSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ DSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ DSGetShader(ID3D11DomainShader** domain_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ DSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ DSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ CSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ CSGetUnorderedAccessViews(
+ UINT start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView** unordered_access_views) override;
+ IFACEMETHODIMP_(void)
+ CSGetShader(ID3D11ComputeShader** pcomputer_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ CSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ CSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void) ClearState() override;
+ IFACEMETHODIMP_(void) Flush() override;
+ IFACEMETHODIMP_(D3D11_DEVICE_CONTEXT_TYPE) GetType() override;
+ IFACEMETHODIMP_(UINT) GetContextFlags() override;
+ IFACEMETHODIMP FinishCommandList(BOOL restore_deferred_context_state,
+ ID3D11CommandList** command_list) override;
+
+ // ID3D11DeviceChild
+ IFACEMETHODIMP_(void) GetDevice(ID3D11Device** device) override;
+ IFACEMETHODIMP GetPrivateData(REFGUID guid,
+ UINT* data_size,
+ void* data) override;
+ IFACEMETHODIMP SetPrivateData(REFGUID guid,
+ UINT data_size,
+ const void* data) override;
+ IFACEMETHODIMP SetPrivateDataInterface(REFGUID guid,
+ const IUnknown* data) override;
+
+ private:
+ ~MockD3D11DeviceContext() override;
+};
+
+class MockD3D11Device final : public MockInterface<ID3D11Device1> {
+ public:
+ MockD3D11Device();
+
+ IFACEMETHODIMP QueryInterface(REFIID riid, void** object) override {
+ if (riid == __uuidof(ID3D11Device)) {
+ this->AddRef();
+ *object = static_cast<ID3D11Device*>(this);
+ return S_OK;
+ }
+ return MockInterface::QueryInterface(riid, object);
+ }
+
+ // ID3D11Device
+ IFACEMETHODIMP CreateBuffer(const D3D11_BUFFER_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Buffer** ppBuffer);
+ IFACEMETHODIMP CreateTexture1D(const D3D11_TEXTURE1D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture1D** texture1D);
+ IFACEMETHODIMP CreateTexture2D(const D3D11_TEXTURE2D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture2D** texture2D);
+ MOCK_METHOD3(OnCreateTexture2D,
+ HRESULT(const D3D11_TEXTURE2D_DESC*,
+ const D3D11_SUBRESOURCE_DATA*,
+ ID3D11Texture2D**));
+ IFACEMETHODIMP CreateTexture3D(const D3D11_TEXTURE3D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture3D** texture2D);
+ IFACEMETHODIMP CreateShaderResourceView(
+ ID3D11Resource* resource,
+ const D3D11_SHADER_RESOURCE_VIEW_DESC* desc,
+ ID3D11ShaderResourceView** srv);
+ IFACEMETHODIMP CreateUnorderedAccessView(
+ ID3D11Resource* resource,
+ const D3D11_UNORDERED_ACCESS_VIEW_DESC* desc,
+ ID3D11UnorderedAccessView** uaview);
+ IFACEMETHODIMP CreateRenderTargetView(
+ ID3D11Resource* resource,
+ const D3D11_RENDER_TARGET_VIEW_DESC* desc,
+ ID3D11RenderTargetView** rtv);
+ IFACEMETHODIMP CreateDepthStencilView(
+ ID3D11Resource* resource,
+ const D3D11_DEPTH_STENCIL_VIEW_DESC* desc,
+ ID3D11DepthStencilView** depth_stencil_view);
+ IFACEMETHODIMP CreateInputLayout(
+ const D3D11_INPUT_ELEMENT_DESC* input_element_descs,
+ UINT num_elements,
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11InputLayout** input_layout);
+ IFACEMETHODIMP CreateVertexShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11VertexShader** vertex_shader);
+ IFACEMETHODIMP CreateGeometryShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11GeometryShader** geometry_shader);
+ IFACEMETHODIMP CreateGeometryShaderWithStreamOutput(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ const D3D11_SO_DECLARATION_ENTRY* so_declaration,
+ UINT num_entries,
+ const UINT* buffer_strides,
+ UINT num_strides,
+ UINT rasterized_stream,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11GeometryShader** geometry_shader);
+ IFACEMETHODIMP CreatePixelShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11PixelShader** pixel_shader);
+ IFACEMETHODIMP CreateHullShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11HullShader** hull_shader);
+ IFACEMETHODIMP CreateDomainShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11DomainShader** domain_shader);
+ IFACEMETHODIMP CreateComputeShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11ComputeShader** compute_shader);
+ IFACEMETHODIMP CreateClassLinkage(ID3D11ClassLinkage** linkage);
+ IFACEMETHODIMP CreateBlendState(const D3D11_BLEND_DESC* blend_state_desc,
+ ID3D11BlendState** blend_state);
+ IFACEMETHODIMP CreateDepthStencilState(
+ const D3D11_DEPTH_STENCIL_DESC* depth_stencil_desc,
+ ID3D11DepthStencilState** depth_stencil_state);
+ IFACEMETHODIMP CreateRasterizerState(
+ const D3D11_RASTERIZER_DESC* rasterizer_desc,
+ ID3D11RasterizerState** rasterizer_state);
+ IFACEMETHODIMP CreateSamplerState(const D3D11_SAMPLER_DESC* sampler_desc,
+ ID3D11SamplerState** sampler_state);
+ IFACEMETHODIMP CreateQuery(const D3D11_QUERY_DESC* query_desc,
+ ID3D11Query** query);
+ IFACEMETHODIMP CreatePredicate(const D3D11_QUERY_DESC* predicate_desc,
+ ID3D11Predicate** predicate);
+ IFACEMETHODIMP CreateCounter(const D3D11_COUNTER_DESC* counter_desc,
+ ID3D11Counter** counter);
+ IFACEMETHODIMP CreateDeferredContext(UINT context_flags,
+ ID3D11DeviceContext** deferred_context);
+ IFACEMETHODIMP OpenSharedResource(HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out);
+ IFACEMETHODIMP CheckFormatSupport(DXGI_FORMAT format, UINT* format_support);
+ IFACEMETHODIMP CheckMultisampleQualityLevels(DXGI_FORMAT format,
+ UINT sample_count,
+ UINT* num_quality_levels);
+ IFACEMETHODIMP_(void) CheckCounterInfo(D3D11_COUNTER_INFO* counter_info);
+ IFACEMETHODIMP CheckCounter(const D3D11_COUNTER_DESC* desc,
+ D3D11_COUNTER_TYPE* type,
+ UINT* active_counters,
+ LPSTR name,
+ UINT* name_length,
+ LPSTR units,
+ UINT* units_length,
+ LPSTR description,
+ UINT* description_length);
+ IFACEMETHODIMP CheckFeatureSupport(D3D11_FEATURE feature,
+ void* feature_support_data,
+ UINT feature_support_data_size);
+ IFACEMETHODIMP GetPrivateData(REFGUID guid, UINT* data_size, void* data);
+ IFACEMETHODIMP SetPrivateData(REFGUID guid, UINT data_size, const void* data);
+ IFACEMETHODIMP SetPrivateDataInterface(REFGUID guid, const IUnknown* data);
+ IFACEMETHODIMP_(D3D_FEATURE_LEVEL) GetFeatureLevel();
+ IFACEMETHODIMP_(UINT) GetCreationFlags();
+ IFACEMETHODIMP GetDeviceRemovedReason();
+ MOCK_METHOD0(OnGetDeviceRemovedReason, HRESULT());
+ IFACEMETHODIMP_(void)
+ GetImmediateContext(ID3D11DeviceContext** immediate_context);
+ IFACEMETHODIMP SetExceptionMode(UINT raise_flags);
+ IFACEMETHODIMP_(UINT) GetExceptionMode();
+
+ // ID3D11Device1
+ IFACEMETHODIMP_(void)
+ GetImmediateContext1(ID3D11DeviceContext1** immediate_context);
+ IFACEMETHODIMP CreateDeferredContext1(
+ UINT context_flags,
+ ID3D11DeviceContext1** deferred_context);
+ IFACEMETHODIMP CreateBlendState1(const D3D11_BLEND_DESC1* blend_state_desc,
+ ID3D11BlendState1** blend_state);
+ IFACEMETHODIMP CreateRasterizerState1(
+ const D3D11_RASTERIZER_DESC1* rasterizer_desc,
+ ID3D11RasterizerState1** rasterizer_state);
+ IFACEMETHODIMP CreateDeviceContextState(
+ UINT flags,
+ const D3D_FEATURE_LEVEL* feature_levels,
+ UINT feature_level_count,
+ UINT sdk_version,
+ REFIID emulated_interface,
+ D3D_FEATURE_LEVEL* chosen_feature_level,
+ ID3DDeviceContextState** context_state);
+ IFACEMETHODIMP OpenSharedResource1(HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out);
+ MOCK_METHOD3(DoOpenSharedResource1, HRESULT(HANDLE, REFIID, void**));
+ IFACEMETHODIMP OpenSharedResourceByName(LPCWSTR name,
+ DWORD desired_access,
+ REFIID returned_interface,
+ void** resource_out);
+
+ void SetupDefaultMocks();
+
+ Microsoft::WRL::ComPtr<MockD3D11DeviceContext> mock_immediate_context_;
+
+ private:
+ ~MockD3D11Device();
+};
+
+class MockDXGIResource final
+ : public MockInterface<IDXGIResource1, IDXGIKeyedMutex> {
+ public:
+ // IDXGIResource1
+ IFACEMETHODIMP CreateSubresourceSurface(UINT index, IDXGISurface2** surface);
+ IFACEMETHODIMP CreateSharedHandle(const SECURITY_ATTRIBUTES* attributes,
+ DWORD access,
+ LPCWSTR name,
+ HANDLE* handle);
+ // IDXGIResource
+ IFACEMETHODIMP GetSharedHandle(HANDLE* shared_handle);
+ IFACEMETHODIMP GetUsage(DXGI_USAGE* usage);
+ IFACEMETHODIMP SetEvictionPriority(UINT eviction_priority);
+ IFACEMETHODIMP GetEvictionPriority(UINT* eviction_priority);
+ // IDXGIDeviceSubObject
+ IFACEMETHODIMP GetDevice(REFIID riid, void** device);
+ // IDXGIObject
+ IFACEMETHODIMP SetPrivateData(REFGUID name, UINT data_size, const void* data);
+ IFACEMETHODIMP SetPrivateDataInterface(REFGUID name, const IUnknown* unknown);
+ IFACEMETHODIMP GetPrivateData(REFGUID name, UINT* data_size, void* data);
+ IFACEMETHODIMP GetParent(REFIID riid, void** parent);
+ // IDXGIKeyedMutex
+ IFACEMETHODIMP AcquireSync(UINT64 key, DWORD milliseconds) override;
+ IFACEMETHODIMP ReleaseSync(UINT64 key) override;
+
+ private:
+ ~MockDXGIResource() override;
+};
+
+class MockD3D11Texture2D final : public MockInterface<ID3D11Texture2D> {
+ public:
+ MockD3D11Texture2D(D3D11_TEXTURE2D_DESC desc, ID3D11Device* device);
+ MockD3D11Texture2D();
+ // IUnknown
+ IFACEMETHODIMP QueryInterface(REFIID riid, void** object) override;
+ // ID3D11Texture2D
+ IFACEMETHODIMP_(void) GetDesc(D3D11_TEXTURE2D_DESC* desc);
+ // ID3D11Resource
+ IFACEMETHODIMP_(void) GetType(D3D11_RESOURCE_DIMENSION* resource_dimension);
+ IFACEMETHODIMP_(void) SetEvictionPriority(UINT eviction_priority);
+ IFACEMETHODIMP_(UINT) GetEvictionPriority();
+ // ID3D11DeviceChild
+ IFACEMETHODIMP_(void) GetDevice(ID3D11Device** device);
+ IFACEMETHODIMP GetPrivateData(REFGUID guid, UINT* data_size, void* data);
+ IFACEMETHODIMP SetPrivateData(REFGUID guid, UINT data_size, const void* data);
+ IFACEMETHODIMP SetPrivateDataInterface(REFGUID guid, const IUnknown* data);
+
+ Microsoft::WRL::ComPtr<MockDXGIResource> mock_resource_;
+
+ private:
+ ~MockD3D11Texture2D() override;
+ D3D11_TEXTURE2D_DESC desc_ = {};
+ Microsoft::WRL::ComPtr<ID3D11Device> device_;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_WIN_D3D_CAPTURE_TEST_UTILS_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/win/gpu_memory_buffer_tracker.cc b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.cc
new file mode 100644
index 00000000000..c0c4b84694d
--- /dev/null
+++ b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.cc
@@ -0,0 +1,151 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/win/gpu_memory_buffer_tracker.h"
+
+#include "base/check.h"
+#include "base/notreached.h"
+#include "base/win/scoped_handle.h"
+#include "gpu/ipc/common/gpu_memory_buffer_impl_dxgi.h"
+#include "media/capture/video/video_capture_buffer_handle.h"
+#include "ui/gfx/geometry/size.h"
+
+#include <dxgi1_2.h>
+
+namespace media {
+
+namespace {
+
+base::win::ScopedHandle CreateNV12Texture(ID3D11Device* d3d11_device,
+ const gfx::Size& size) {
+ const DXGI_FORMAT dxgi_format = DXGI_FORMAT_NV12;
+ D3D11_TEXTURE2D_DESC desc = {
+ .Width = size.width(),
+ .Height = size.height(),
+ .MipLevels = 1,
+ .ArraySize = 1,
+ .Format = dxgi_format,
+ .SampleDesc = {1, 0},
+ .Usage = D3D11_USAGE_DEFAULT,
+ .BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET,
+ .CPUAccessFlags = 0,
+ .MiscFlags = D3D11_RESOURCE_MISC_SHARED_NTHANDLE |
+ D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX};
+
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> d3d11_texture;
+
+ HRESULT hr = d3d11_device->CreateTexture2D(&desc, nullptr, &d3d11_texture);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to create D3D11 texture: "
+ << logging::SystemErrorCodeToString(hr);
+ return base::win::ScopedHandle();
+ }
+
+ Microsoft::WRL::ComPtr<IDXGIResource1> dxgi_resource;
+ hr = d3d11_texture.As(&dxgi_resource);
+ CHECK(SUCCEEDED(hr));
+
+ HANDLE texture_handle;
+ hr = dxgi_resource->CreateSharedHandle(
+ nullptr, DXGI_SHARED_RESOURCE_READ | DXGI_SHARED_RESOURCE_WRITE, nullptr,
+ &texture_handle);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to create shared D3D11 texture handle: "
+ << logging::SystemErrorCodeToString(hr);
+ return base::win::ScopedHandle();
+ }
+ return base::win::ScopedHandle(texture_handle);
+}
+
+} // namespace
+
+GpuMemoryBufferTracker::GpuMemoryBufferTracker(
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager)
+ : dxgi_device_manager_(std::move(dxgi_device_manager)),
+ d3d_device_(dxgi_device_manager_->GetDevice()) {}
+
+GpuMemoryBufferTracker::~GpuMemoryBufferTracker() = default;
+
+bool GpuMemoryBufferTracker::Init(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) {
+ // Only support NV12
+ if (format != PIXEL_FORMAT_NV12) {
+ NOTREACHED() << "Unsupported VideoPixelFormat " << format;
+ return false;
+ }
+
+ buffer_size_ = dimensions;
+
+ return CreateBufferInternal();
+}
+
+bool GpuMemoryBufferTracker::CreateBufferInternal() {
+ gfx::GpuMemoryBufferHandle buffer_handle;
+ buffer_handle.dxgi_handle =
+ CreateNV12Texture(d3d_device_.Get(), buffer_size_);
+
+ buffer_ = gpu::GpuMemoryBufferImplDXGI::CreateFromHandle(
+ std::move(buffer_handle), buffer_size_,
+ gfx::BufferFormat::YUV_420_BIPLANAR, gfx::BufferUsage::GPU_READ,
+ gpu::GpuMemoryBufferImpl::DestructionCallback());
+ if (!buffer_) {
+ NOTREACHED() << "Failed to create GPU memory buffer";
+ return false;
+ }
+ return true;
+}
+
+bool GpuMemoryBufferTracker::EnsureD3DDevice() {
+ // Check for and handle device loss by recreating the texture
+ if (FAILED(d3d_device_->GetDeviceRemovedReason())) {
+ DVLOG(1) << "Detected device loss.";
+ dxgi_device_manager_->ResetDevice();
+ d3d_device_ = dxgi_device_manager_->GetDevice();
+ if (!d3d_device_) {
+ return false;
+ }
+
+ return CreateBufferInternal();
+ }
+ return true;
+}
+
+bool GpuMemoryBufferTracker::IsReusableForFormat(
+ const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) {
+ return (format == PIXEL_FORMAT_NV12) && (dimensions == buffer_->GetSize());
+}
+
+std::unique_ptr<VideoCaptureBufferHandle>
+GpuMemoryBufferTracker::GetMemoryMappedAccess() {
+ NOTREACHED() << "Unsupported operation";
+ return std::make_unique<NullHandle>();
+}
+
+base::UnsafeSharedMemoryRegion
+GpuMemoryBufferTracker::DuplicateAsUnsafeRegion() {
+ NOTREACHED() << "Unsupported operation";
+ return base::UnsafeSharedMemoryRegion();
+}
+
+mojo::ScopedSharedBufferHandle GpuMemoryBufferTracker::DuplicateAsMojoBuffer() {
+ NOTREACHED() << "Unsupported operation";
+ return mojo::ScopedSharedBufferHandle();
+}
+
+gfx::GpuMemoryBufferHandle GpuMemoryBufferTracker::GetGpuMemoryBufferHandle() {
+ if (!EnsureD3DDevice()) {
+ return gfx::GpuMemoryBufferHandle();
+ }
+ return buffer_->CloneHandle();
+}
+
+uint32_t GpuMemoryBufferTracker::GetMemorySizeInBytes() {
+ DCHECK(buffer_);
+ return (buffer_->GetSize().width() * buffer_->GetSize().height() * 3) / 2;
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h
new file mode 100644
index 00000000000..c960abc6596
--- /dev/null
+++ b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h
@@ -0,0 +1,54 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_WIN_GPU_MEMORY_BUFFER_TRACKER_H_
+#define MEDIA_CAPTURE_VIDEO_WIN_GPU_MEMORY_BUFFER_TRACKER_H_
+
+#include "media/base/win/dxgi_device_manager.h"
+#include "media/capture/video/video_capture_buffer_tracker.h"
+
+#include <d3d11.h>
+#include <wrl.h>
+
+namespace gfx {
+class Size;
+} // namespace gfx
+
+namespace media {
+
+// Tracker specifics for Windows GpuMemoryBuffer.
+class CAPTURE_EXPORT GpuMemoryBufferTracker final
+ : public VideoCaptureBufferTracker {
+ public:
+ explicit GpuMemoryBufferTracker(
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager);
+ ~GpuMemoryBufferTracker() override;
+
+ // Implementation of VideoCaptureBufferTracker:
+ bool Init(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) override;
+ bool IsReusableForFormat(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) override;
+ uint32_t GetMemorySizeInBytes() override;
+ std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() override;
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override;
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override;
+ gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override;
+
+ private:
+ std::unique_ptr<gfx::GpuMemoryBuffer> buffer_;
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
+ Microsoft::WRL::ComPtr<ID3D11Device> d3d_device_;
+ gfx::Size buffer_size_;
+ bool CreateBufferInternal();
+ bool EnsureD3DDevice();
+
+ DISALLOW_COPY_AND_ASSIGN(GpuMemoryBufferTracker);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_WIN_GPU_MEMORY_BUFFER_TRACKER_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/win/gpu_memory_buffer_tracker_unittest.cc b/chromium/media/capture/video/win/gpu_memory_buffer_tracker_unittest.cc
new file mode 100644
index 00000000000..267cfc5e696
--- /dev/null
+++ b/chromium/media/capture/video/win/gpu_memory_buffer_tracker_unittest.cc
@@ -0,0 +1,167 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <mfidl.h>
+
+#include <dxgi1_2.h>
+#include <mfapi.h>
+#include <mferror.h>
+#include <wrl.h>
+#include <wrl/client.h>
+
+#include "base/memory/scoped_refptr.h"
+#include "base/test/task_environment.h"
+#include "base/win/windows_version.h"
+#include "media/capture/video/win/d3d_capture_test_utils.h"
+#include "media/capture/video/win/gpu_memory_buffer_tracker.h"
+#include "media/capture/video/win/video_capture_device_factory_win.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::AllOf;
+using ::testing::Field;
+using ::testing::Invoke;
+using ::testing::Mock;
+using ::testing::Pointee;
+
+namespace media {
+
+namespace {
+
+class MockDXGIDeviceManager : public DXGIDeviceManager {
+ public:
+ MockDXGIDeviceManager()
+ : DXGIDeviceManager(nullptr, 0),
+ mock_d3d_device_(new MockD3D11Device()) {}
+
+ // Associates a new D3D device with the DXGI Device Manager
+ HRESULT ResetDevice() override { return S_OK; }
+
+ // Directly access D3D device stored in DXGI device manager
+ Microsoft::WRL::ComPtr<ID3D11Device> GetDevice() override {
+ Microsoft::WRL::ComPtr<ID3D11Device> device;
+ mock_d3d_device_.As(&device);
+ return device;
+ }
+
+ Microsoft::WRL::ComPtr<MockD3D11Device> GetMockDevice() {
+ return mock_d3d_device_;
+ }
+
+ protected:
+ ~MockDXGIDeviceManager() override {}
+ Microsoft::WRL::ComPtr<MockD3D11Device> mock_d3d_device_;
+};
+
+} // namespace
+
+class GpuMemoryBufferTrackerTest : public ::testing::Test {
+ protected:
+ GpuMemoryBufferTrackerTest()
+ : media_foundation_supported_(
+ VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation()) {}
+
+ bool ShouldSkipTest() {
+ if (!media_foundation_supported_) {
+ DVLOG(1) << "Media foundation is not supported by the current platform. "
+ "Skipping test.";
+ return true;
+ }
+ // D3D11 is only supported with Media Foundation on Windows 8 or later
+ if (base::win::GetVersion() < base::win::Version::WIN8) {
+ DVLOG(1) << "D3D11 with Media foundation is not supported by the current "
+ "platform. "
+ "Skipping test.";
+ return true;
+ }
+ return false;
+ }
+
+ void SetUp() override {
+ if (ShouldSkipTest()) {
+ GTEST_SKIP();
+ }
+
+ dxgi_device_manager_ =
+ scoped_refptr<MockDXGIDeviceManager>(new MockDXGIDeviceManager());
+ }
+
+ base::test::TaskEnvironment task_environment_;
+ const bool media_foundation_supported_;
+ scoped_refptr<MockDXGIDeviceManager> dxgi_device_manager_;
+};
+
+TEST_F(GpuMemoryBufferTrackerTest, TextureCreation) {
+ // Verify that GpuMemoryBufferTracker creates a D3D11 texture with the correct
+ // properties
+ const gfx::Size expected_buffer_size = {1920, 1080};
+ const DXGI_FORMAT expected_buffer_format = DXGI_FORMAT_NV12;
+ dxgi_device_manager_->GetMockDevice()->SetupDefaultMocks();
+ EXPECT_CALL(*(dxgi_device_manager_->GetMockDevice().Get()),
+ OnCreateTexture2D(
+ Pointee(AllOf(Field(&D3D11_TEXTURE2D_DESC::Format,
+ expected_buffer_format),
+ Field(&D3D11_TEXTURE2D_DESC::Width,
+ static_cast<const unsigned int>(
+ expected_buffer_size.width())),
+ Field(&D3D11_TEXTURE2D_DESC::Height,
+ static_cast<const unsigned int>(
+ expected_buffer_size.height())))),
+ _, _));
+ std::unique_ptr<VideoCaptureBufferTracker> tracker =
+ std::make_unique<GpuMemoryBufferTracker>(dxgi_device_manager_);
+ EXPECT_EQ(tracker->Init(expected_buffer_size, PIXEL_FORMAT_NV12, nullptr),
+ true);
+}
+
+TEST_F(GpuMemoryBufferTrackerTest, TextureRecreationOnDeviceLoss) {
+ // Verify that GpuMemoryBufferTracker recreates a D3D11 texture with the
+ // correct properties when there is a device loss
+ const gfx::Size expected_buffer_size = {1920, 1080};
+ const DXGI_FORMAT expected_buffer_format = DXGI_FORMAT_NV12;
+ dxgi_device_manager_->GetMockDevice()->SetupDefaultMocks();
+ // Expect two texture creation calls (the second occurs on device loss
+ // recovery)
+ EXPECT_CALL(*(dxgi_device_manager_->GetMockDevice().Get()),
+ OnCreateTexture2D(
+ Pointee(AllOf(Field(&D3D11_TEXTURE2D_DESC::Format,
+ expected_buffer_format),
+ Field(&D3D11_TEXTURE2D_DESC::Width,
+ static_cast<const unsigned int>(
+ expected_buffer_size.width())),
+ Field(&D3D11_TEXTURE2D_DESC::Height,
+ static_cast<const unsigned int>(
+ expected_buffer_size.height())))),
+ _, _))
+ .Times(2);
+ // Mock device loss
+ EXPECT_CALL(*(dxgi_device_manager_->GetMockDevice().Get()),
+ OnGetDeviceRemovedReason())
+ .WillOnce(Invoke([]() { return DXGI_ERROR_DEVICE_REMOVED; }));
+ // Create and init tracker (causes initial texture creation)
+ std::unique_ptr<VideoCaptureBufferTracker> tracker =
+ std::make_unique<GpuMemoryBufferTracker>(dxgi_device_manager_);
+ EXPECT_EQ(tracker->Init(expected_buffer_size, PIXEL_FORMAT_NV12, nullptr),
+ true);
+ // Get GpuMemoryBufferHandle (should trigger device/texture recreation)
+ gfx::GpuMemoryBufferHandle gmb = tracker->GetGpuMemoryBufferHandle();
+}
+
+TEST_F(GpuMemoryBufferTrackerTest, GetMemorySizeInBytes) {
+ // Verify that GpuMemoryBufferTracker returns an expected value from
+ // GetMemorySizeInBytes
+ const gfx::Size expected_buffer_size = {1920, 1080};
+ dxgi_device_manager_->GetMockDevice()->SetupDefaultMocks();
+ std::unique_ptr<VideoCaptureBufferTracker> tracker =
+ std::make_unique<GpuMemoryBufferTracker>(dxgi_device_manager_);
+ EXPECT_EQ(tracker->Init(expected_buffer_size, PIXEL_FORMAT_NV12, nullptr),
+ true);
+
+ const uint32_t expectedSizeInBytes =
+ (expected_buffer_size.width() * expected_buffer_size.height() * 3) / 2;
+ EXPECT_EQ(tracker->GetMemorySizeInBytes(), expectedSizeInBytes);
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.cc b/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.cc
new file mode 100644
index 00000000000..25f2207c464
--- /dev/null
+++ b/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.cc
@@ -0,0 +1,37 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/win/video_capture_buffer_tracker_factory_win.h"
+
+#include <memory>
+
+#include "media/capture/video/shared_memory_buffer_tracker.h"
+#include "media/capture/video/win/gpu_memory_buffer_tracker.h"
+
+namespace media {
+
+VideoCaptureBufferTrackerFactoryWin::VideoCaptureBufferTrackerFactoryWin()
+ : dxgi_device_manager_(DXGIDeviceManager::Create()) {}
+
+VideoCaptureBufferTrackerFactoryWin::~VideoCaptureBufferTrackerFactoryWin() {}
+
+std::unique_ptr<VideoCaptureBufferTracker>
+VideoCaptureBufferTrackerFactoryWin::CreateTracker(
+ VideoCaptureBufferType buffer_type) {
+ switch (buffer_type) {
+ case VideoCaptureBufferType::kGpuMemoryBuffer:
+ return std::make_unique<GpuMemoryBufferTracker>(dxgi_device_manager_);
+ default:
+ return std::make_unique<SharedMemoryBufferTracker>();
+ }
+}
+
+std::unique_ptr<VideoCaptureBufferTracker>
+VideoCaptureBufferTrackerFactoryWin::CreateTrackerForExternalGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) {
+ // Not supported
+ return nullptr;
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.h b/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.h
new file mode 100644
index 00000000000..d7d5958a261
--- /dev/null
+++ b/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.h
@@ -0,0 +1,35 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_BUFFER_TRACKER_FACTORY_WIN_H_
+#define MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_BUFFER_TRACKER_FACTORY_WIN_H_
+
+#include <memory>
+
+#include "base/memory/weak_ptr.h"
+#include "media/base/win/dxgi_device_manager.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/video_capture_buffer_tracker_factory.h"
+
+namespace media {
+
+class CAPTURE_EXPORT VideoCaptureBufferTrackerFactoryWin
+ : public VideoCaptureBufferTrackerFactory {
+ public:
+ VideoCaptureBufferTrackerFactoryWin();
+ ~VideoCaptureBufferTrackerFactoryWin() override;
+ std::unique_ptr<VideoCaptureBufferTracker> CreateTracker(
+ VideoCaptureBufferType buffer_type) override;
+ std::unique_ptr<VideoCaptureBufferTracker>
+ CreateTrackerForExternalGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) override;
+
+ private:
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
+ base::WeakPtrFactory<VideoCaptureBufferTrackerFactoryWin> weak_factory_{this};
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_BUFFER_TRACKER_FACTORY_WIN_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index fe412a57c28..c82499b8ae1 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -133,18 +133,30 @@ const char* const kDisplayNamesBlockedForMediaFoundation[] = {
const std::vector<
std::pair<VideoCaptureApi, std::vector<std::pair<GUID, GUID>>>>&
GetMFAttributes() {
+ if (base::FeatureList::IsEnabled(
+ media::kIncludeIRCamerasInDeviceEnumeration)) {
+ static const base::NoDestructor<std::vector<
+ std::pair<VideoCaptureApi, std::vector<std::pair<GUID, GUID>>>>>
+ mf_attributes({{{VideoCaptureApi::WIN_MEDIA_FOUNDATION,
+ {
+ {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
+ }},
+ {VideoCaptureApi::WIN_MEDIA_FOUNDATION_SENSOR,
+ {{MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
+ {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY,
+ KSCATEGORY_SENSOR_CAMERA}}}}});
+ return *mf_attributes;
+ }
+
static const base::NoDestructor<std::vector<
std::pair<VideoCaptureApi, std::vector<std::pair<GUID, GUID>>>>>
- mf_attributes({{{VideoCaptureApi::WIN_MEDIA_FOUNDATION,
- {
- {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
- }},
- {VideoCaptureApi::WIN_MEDIA_FOUNDATION_SENSOR,
- {{MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
- {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY,
- KSCATEGORY_SENSOR_CAMERA}}}}});
+ mf_attributes({{VideoCaptureApi::WIN_MEDIA_FOUNDATION,
+ {
+ {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
+ }}});
return *mf_attributes;
}
@@ -312,6 +324,8 @@ bool VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation() {
VideoCaptureDeviceFactoryWin::VideoCaptureDeviceFactoryWin()
: use_media_foundation_(
base::FeatureList::IsEnabled(media::kMediaFoundationVideoCapture)),
+ use_d3d11_with_media_foundation_(base::FeatureList::IsEnabled(
+ media::kMediaFoundationD3D11VideoCapture)),
com_thread_("Windows Video Capture COM Thread") {
if (use_media_foundation_ && !PlatformSupportsMediaFoundation()) {
use_media_foundation_ = false;
@@ -705,7 +719,7 @@ DevicesInfo VideoCaptureDeviceFactoryWin::GetDevicesInfoMediaFoundation() {
DevicesInfo devices_info;
if (use_d3d11_with_media_foundation_ && !dxgi_device_manager_) {
- dxgi_device_manager_ = VideoCaptureDXGIDeviceManager::Create();
+ dxgi_device_manager_ = DXGIDeviceManager::Create();
}
// Recent non-RGB (depth, IR) cameras could be marked as sensor cameras in
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.h b/chromium/media/capture/video/win/video_capture_device_factory_win.h
index bd8b86ce161..43047d7b53f 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.h
@@ -17,9 +17,9 @@
#include "base/macros.h"
#include "base/threading/thread.h"
+#include "media/base/win/dxgi_device_manager.h"
#include "media/base/win/mf_initializer.h"
#include "media/capture/video/video_capture_device_factory.h"
-#include "media/capture/video/win/video_capture_dxgi_device_manager.h"
namespace media {
@@ -77,8 +77,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
return use_d3d11_with_media_foundation_;
}
- scoped_refptr<VideoCaptureDXGIDeviceManager>
- dxgi_device_manager_for_testing() {
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_for_testing() {
return dxgi_device_manager_;
}
@@ -97,7 +96,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
std::vector<VideoCaptureDeviceInfo> GetDevicesInfoDirectShow();
bool use_media_foundation_;
- bool use_d3d11_with_media_foundation_ = false;
+ bool use_d3d11_with_media_foundation_;
MFSessionLifetime session_;
// For calling WinRT methods on a COM initiated thread.
@@ -105,7 +104,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
std::unordered_set<IAsyncOperation<DeviceInformationCollection*>*> async_ops_;
// For hardware acceleration in MediaFoundation capture engine
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager_;
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
base::WeakPtrFactory<VideoCaptureDeviceFactoryWin> weak_ptr_factory_{this};
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryWin);
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
index 7cd97c083b3..274bab4fdac 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
@@ -18,8 +18,10 @@
#include "base/run_loop.h"
#include "base/strings/sys_string_conversions.h"
#include "base/test/bind.h"
+#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
#include "base/win/windows_version.h"
+#include "media/base/media_switches.h"
#include "media/capture/video/win/video_capture_device_factory_win.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -1353,6 +1355,104 @@ TEST_P(VideoCaptureDeviceFactoryMFWinTest, GetDevicesInfo) {
}));
run_loop.Run();
+ EXPECT_EQ(devices_info.size(), 6U);
+ for (auto it = devices_info.begin(); it != devices_info.end(); it++) {
+ // Verify that there are no duplicates.
+ EXPECT_EQ(
+ FindDeviceInRange(devices_info.begin(), it, it->descriptor.device_id),
+ it);
+ }
+ iterator it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kMFDeviceId0));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_MEDIA_FOUNDATION);
+ EXPECT_EQ(it->descriptor.display_name(), base::SysWideToUTF8(kMFDeviceName0));
+ // No IAMCameraControl and no IAMVideoProcAmp interfaces.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kMFDeviceId1));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_MEDIA_FOUNDATION);
+ EXPECT_EQ(it->descriptor.display_name(), base::SysWideToUTF8(kMFDeviceName1));
+ // No pan/tilt/zoom in IAMCameraControl interface.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId3));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->descriptor.display_name(),
+ base::SysWideToUTF8(kDirectShowDeviceName3));
+ // No ICameraControl interface.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId4));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->descriptor.display_name(),
+ base::SysWideToUTF8(kDirectShowDeviceName4));
+ // No IVideoProcAmp interface.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ // Devices that are listed in MediaFoundation but only report supported
+ // formats in DirectShow are expected to get enumerated with
+ // VideoCaptureApi::WIN_DIRECT_SHOW
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId5));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->descriptor.display_name(),
+ base::SysWideToUTF8(kDirectShowDeviceName5));
+ // No pan, tilt, or zoom ranges in ICameraControl interface.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ // Devices that are listed in both MediaFoundation and DirectShow but are
+ // blocked for use with MediaFoundation are expected to get enumerated with
+ // VideoCaptureApi::WIN_DIRECT_SHOW.
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId6));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->descriptor.display_name(),
+ base::SysWideToUTF8(kDirectShowDeviceName6));
+ EXPECT_TRUE(it->descriptor.control_support().pan);
+ EXPECT_TRUE(it->descriptor.control_support().tilt);
+ EXPECT_TRUE(it->descriptor.control_support().zoom);
+}
+
+TEST_P(VideoCaptureDeviceFactoryMFWinTest, GetDevicesInfo_IncludeIRCameras) {
+ base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeature(kIncludeIRCamerasInDeviceEnumeration);
+
+ if (ShouldSkipMFTest())
+ return;
+
+ const bool use_d3d11 = GetParam();
+ if (use_d3d11 && ShouldSkipD3D11Test())
+ return;
+ factory_.set_use_d3d11_with_media_foundation_for_testing(use_d3d11);
+
+ std::vector<VideoCaptureDeviceInfo> devices_info;
+ base::RunLoop run_loop;
+ factory_.GetDevicesInfo(base::BindLambdaForTesting(
+ [&devices_info, &run_loop](std::vector<VideoCaptureDeviceInfo> result) {
+ devices_info = std::move(result);
+ run_loop.Quit();
+ }));
+ run_loop.Run();
+
EXPECT_EQ(devices_info.size(), 7U);
for (auto it = devices_info.begin(); it != devices_info.end(); it++) {
// Verify that there are no duplicates.
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.cc b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
index 18c9acd16ee..f56c71b4c10 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
@@ -4,6 +4,7 @@
#include "media/capture/video/win/video_capture_device_mf_win.h"
+#include <d3d11_4.h>
#include <mfapi.h>
#include <mferror.h>
#include <stddef.h>
@@ -464,6 +465,88 @@ mojom::RangePtr RetrieveControlRangeAndCurrent(
},
supported_modes, current_mode, value_converter, step_converter);
}
+
+HRESULT GetTextureFromMFBuffer(IMFMediaBuffer* mf_buffer,
+ ID3D11Texture2D** texture_out) {
+ Microsoft::WRL::ComPtr<IMFDXGIBuffer> dxgi_buffer;
+ HRESULT hr = mf_buffer->QueryInterface(IID_PPV_ARGS(&dxgi_buffer));
+ DLOG_IF_FAILED_WITH_HRESULT("Failed to retrieve IMFDXGIBuffer", hr);
+
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> d3d_texture;
+ if (SUCCEEDED(hr)) {
+ hr = dxgi_buffer->GetResource(IID_PPV_ARGS(&d3d_texture));
+ DLOG_IF_FAILED_WITH_HRESULT("Failed to retrieve ID3D11Texture2D", hr);
+ }
+
+ *texture_out = d3d_texture.Detach();
+ if (SUCCEEDED(hr)) {
+ CHECK(*texture_out);
+ }
+ return hr;
+}
+
+void GetTextureSizeAndFormat(ID3D11Texture2D* texture,
+ gfx::Size& size,
+ VideoPixelFormat& format) {
+ D3D11_TEXTURE2D_DESC desc;
+ texture->GetDesc(&desc);
+ size.set_width(desc.Width);
+ size.set_height(desc.Height);
+
+ switch (desc.Format) {
+ // Only support NV12
+ case DXGI_FORMAT_NV12:
+ format = PIXEL_FORMAT_NV12;
+ break;
+ default:
+ DLOG(ERROR) << "Unsupported camera DXGI texture format: " << desc.Format;
+ format = PIXEL_FORMAT_UNKNOWN;
+ break;
+ }
+}
+
+HRESULT CopyTextureToGpuMemoryBuffer(ID3D11Texture2D* texture,
+ gfx::GpuMemoryBufferHandle gmb_handle) {
+ Microsoft::WRL::ComPtr<ID3D11Device> texture_device;
+ texture->GetDevice(&texture_device);
+
+ Microsoft::WRL::ComPtr<ID3D11Device1> device1;
+ HRESULT hr = texture_device.As(&device1);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to get ID3D11Device1: "
+ << logging::SystemErrorCodeToString(hr);
+ return hr;
+ }
+
+ // Open shared resource from GpuMemoryBuffer on source texture D3D11 device
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> target_texture;
+ hr = device1->OpenSharedResource1(gmb_handle.dxgi_handle.Get(),
+ IID_PPV_ARGS(&target_texture));
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to open shared camera target texture: "
+ << logging::SystemErrorCodeToString(hr);
+ return hr;
+ }
+
+ Microsoft::WRL::ComPtr<ID3D11DeviceContext> device_context;
+ texture_device->GetImmediateContext(&device_context);
+
+ Microsoft::WRL::ComPtr<IDXGIKeyedMutex> keyed_mutex;
+ hr = target_texture.As(&keyed_mutex);
+ CHECK(SUCCEEDED(hr));
+
+ keyed_mutex->AcquireSync(0, INFINITE);
+ device_context->CopySubresourceRegion(target_texture.Get(), 0, 0, 0, 0,
+ texture, 0, nullptr);
+ keyed_mutex->ReleaseSync(0);
+
+ // Need to flush context to ensure that other devices receive updated contents
+ // of shared resource
+ device_context->Flush();
+
+ return S_OK;
+}
+
} // namespace
class MFVideoCallback final
@@ -534,16 +617,8 @@ class MFVideoCallback final
ComPtr<IMFMediaBuffer> buffer;
sample->GetBufferByIndex(i, &buffer);
if (buffer) {
- ScopedBufferLock locked_buffer(buffer);
- if (locked_buffer.data()) {
- observer_->OnIncomingCapturedData(locked_buffer.data(),
- locked_buffer.length(),
- reference_time, timestamp);
- } else {
- observer_->OnFrameDropped(
- VideoCaptureFrameDropReason::
- kWinMediaFoundationLockingBufferDelieveredNullptr);
- }
+ observer_->OnIncomingCapturedData(buffer.Get(), reference_time,
+ timestamp);
} else {
observer_->OnFrameDropped(
VideoCaptureFrameDropReason::
@@ -738,7 +813,7 @@ HRESULT VideoCaptureDeviceMFWin::FillCapabilities(
VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor,
ComPtr<IMFMediaSource> source,
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager)
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager)
: VideoCaptureDeviceMFWin(device_descriptor,
source,
std::move(dxgi_device_manager),
@@ -747,7 +822,7 @@ VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor,
ComPtr<IMFMediaSource> source,
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager,
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager,
ComPtr<IMFCaptureEngine> engine)
: facing_mode_(device_descriptor.facing),
create_mf_photo_callback_(base::BindRepeating(&CreateMFPhotoCallback)),
@@ -1365,17 +1440,105 @@ void VideoCaptureDeviceMFWin::SetPhotoOptions(
std::move(callback).Run(true);
}
-
void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
- const uint8_t* data,
- int length,
+ IMFMediaBuffer* buffer,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
+ VideoCaptureFrameDropReason frame_drop_reason =
+ VideoCaptureFrameDropReason::kNone;
+ OnIncomingCapturedDataInternal(buffer, reference_time, timestamp,
+ frame_drop_reason);
+ if (frame_drop_reason != VideoCaptureFrameDropReason::kNone) {
+ OnFrameDropped(frame_drop_reason);
+ }
+}
+
+HRESULT VideoCaptureDeviceMFWin::DeliverTextureToClient(
+ ID3D11Texture2D* texture,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp) {
+ // Check for device loss
+ Microsoft::WRL::ComPtr<ID3D11Device> texture_device;
+ texture->GetDevice(&texture_device);
+
+ HRESULT hr = texture_device->GetDeviceRemovedReason();
+
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Camera texture device lost.";
+ DCHECK(dxgi_device_manager_->ResetDevice());
+ return hr;
+ }
+
+ gfx::Size texture_size;
+ VideoPixelFormat pixel_format;
+ GetTextureSizeAndFormat(texture, texture_size, pixel_format);
+
+ if (pixel_format != PIXEL_FORMAT_NV12) {
+ return MF_E_UNSUPPORTED_FORMAT;
+ }
+
+ VideoCaptureDevice::Client::Buffer capture_buffer;
+ constexpr int kDummyFrameFeedbackId = 0;
+ auto result = client_->ReserveOutputBuffer(
+ texture_size, pixel_format, kDummyFrameFeedbackId, &capture_buffer);
+ if (result != VideoCaptureDevice::Client::ReserveResult::kSucceeded) {
+ DLOG(ERROR) << "Failed to reserve output capture buffer: " << (int)result;
+ return MF_E_UNEXPECTED;
+ }
+
+ hr = CopyTextureToGpuMemoryBuffer(
+ texture, capture_buffer.handle_provider->GetGpuMemoryBufferHandle());
+
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to copy camera device texture to output texture: "
+ << logging::SystemErrorCodeToString(hr);
+ return hr;
+ }
+
+ VideoRotation frame_rotation = VIDEO_ROTATION_0;
+ DCHECK(camera_rotation_.has_value());
+ switch (camera_rotation_.value()) {
+ case 0:
+ frame_rotation = VIDEO_ROTATION_0;
+ break;
+ case 90:
+ frame_rotation = VIDEO_ROTATION_90;
+ break;
+ case 180:
+ frame_rotation = VIDEO_ROTATION_180;
+ break;
+ case 270:
+ frame_rotation = VIDEO_ROTATION_270;
+ break;
+ default:
+ break;
+ }
+
+ VideoFrameMetadata frame_metadata;
+ frame_metadata.transformation = VideoTransformation(frame_rotation);
+
+ client_->OnIncomingCapturedBufferExt(
+ std::move(capture_buffer),
+ VideoCaptureFormat(
+ texture_size, selected_video_capability_->supported_format.frame_rate,
+ pixel_format),
+ gfx::ColorSpace(), reference_time, timestamp, gfx::Rect(texture_size),
+ frame_metadata);
+
+ return hr;
+}
+
+void VideoCaptureDeviceMFWin::OnIncomingCapturedDataInternal(
+ IMFMediaBuffer* buffer,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ VideoCaptureFrameDropReason& frame_drop_reason) {
base::AutoLock lock(lock_);
- DCHECK(data);
SendOnStartedIfNotYetSent();
+ bool delivered_texture = false;
+
if (client_.get()) {
if (!has_sent_on_started_to_client_) {
has_sent_on_started_to_client_ = true;
@@ -1387,13 +1550,38 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
if (!camera_rotation_.has_value() || IsAutoRotationEnabled())
camera_rotation_ = GetCameraRotation(facing_mode_);
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> texture;
+ if (dxgi_device_manager_ &&
+ SUCCEEDED(GetTextureFromMFBuffer(buffer, &texture))) {
+ HRESULT hr =
+ DeliverTextureToClient(texture.Get(), reference_time, timestamp);
+ DLOG_IF_FAILED_WITH_HRESULT("Failed to deliver D3D11 texture to client.",
+ hr);
+ delivered_texture = SUCCEEDED(hr);
+ }
+ }
+
+ if (delivered_texture && video_stream_take_photo_callbacks_.empty()) {
+ return;
+ }
+
+ ScopedBufferLock locked_buffer(buffer);
+ if (!locked_buffer.data()) {
+ DLOG(ERROR) << "Locked buffer delivered nullptr";
+ frame_drop_reason = VideoCaptureFrameDropReason::
+ kWinMediaFoundationLockingBufferDelieveredNullptr;
+ return;
+ }
+
+ if (!delivered_texture && client_.get()) {
// TODO(julien.isorce): retrieve the color space information using Media
// Foundation api, MFGetAttributeSize/MF_MT_VIDEO_PRIMARIES,in order to
// build a gfx::ColorSpace. See http://crbug.com/959988.
client_->OnIncomingCapturedData(
- data, length, selected_video_capability_->supported_format,
- gfx::ColorSpace(), camera_rotation_.value(), false /* flip_y */,
- reference_time, timestamp);
+ locked_buffer.data(), locked_buffer.length(),
+ selected_video_capability_->supported_format, gfx::ColorSpace(),
+ camera_rotation_.value(), false /* flip_y */, reference_time,
+ timestamp);
}
while (!video_stream_take_photo_callbacks_.empty()) {
@@ -1401,8 +1589,9 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
std::move(video_stream_take_photo_callbacks_.front());
video_stream_take_photo_callbacks_.pop();
- mojom::BlobPtr blob = RotateAndBlobify(
- data, length, selected_video_capability_->supported_format, 0);
+ mojom::BlobPtr blob =
+ RotateAndBlobify(locked_buffer.data(), locked_buffer.length(),
+ selected_video_capability_->supported_format, 0);
if (!blob) {
LogWindowsImageCaptureOutcome(
VideoCaptureWinBackend::kMediaFoundation,
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.h b/chromium/media/capture/video/win/video_capture_device_mf_win.h
index 116b6d964b2..817c697e93d 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.h
@@ -22,11 +22,11 @@
#include "base/macros.h"
#include "base/optional.h"
#include "base/sequence_checker.h"
+#include "media/base/win/dxgi_device_manager.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video/win/capability_list_win.h"
#include "media/capture/video/win/metrics.h"
-#include "media/capture/video/win/video_capture_dxgi_device_manager.h"
interface IMFSourceReader;
@@ -49,11 +49,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
explicit VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor,
Microsoft::WRL::ComPtr<IMFMediaSource> source,
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager);
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager);
explicit VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor,
Microsoft::WRL::ComPtr<IMFMediaSource> source,
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager,
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager,
Microsoft::WRL::ComPtr<IMFCaptureEngine> engine);
~VideoCaptureDeviceMFWin() override;
@@ -72,8 +72,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
SetPhotoOptionsCallback callback) override;
// Captured new video data.
- void OnIncomingCapturedData(const uint8_t* data,
- int length,
+ void OnIncomingCapturedData(IMFMediaBuffer* buffer,
base::TimeTicks reference_time,
base::TimeDelta timestamp);
void OnFrameDropped(VideoCaptureFrameDropReason reason);
@@ -101,7 +100,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
}
void set_dxgi_device_manager_for_testing(
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager) {
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager) {
dxgi_device_manager_ = std::move(dxgi_device_manager);
}
@@ -132,6 +131,14 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
const char* message);
void SendOnStartedIfNotYetSent();
HRESULT WaitOnCaptureEvent(GUID capture_event_guid);
+ HRESULT DeliverTextureToClient(ID3D11Texture2D* texture,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp);
+ void OnIncomingCapturedDataInternal(
+ IMFMediaBuffer* buffer,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ VideoCaptureFrameDropReason& frame_drop_reason);
VideoFacingMode facing_mode_;
CreateMFPhotoCallbackCB create_mf_photo_callback_;
@@ -162,7 +169,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
base::queue<TakePhotoCallback> video_stream_take_photo_callbacks_;
base::WaitableEvent capture_initialize_;
base::WaitableEvent capture_error_;
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager_;
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
base::Optional<int> camera_rotation_;
SEQUENCE_CHECKER(sequence_checker_);
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
index d38980b0924..4e6fef1f5ce 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
@@ -13,12 +13,13 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/test/task_environment.h"
+#include "base/win/scoped_handle.h"
#include "base/win/windows_version.h"
#include "media/base/win/mf_helpers.h"
+#include "media/capture/video/win/d3d_capture_test_utils.h"
#include "media/capture/video/win/sink_filter_win.h"
#include "media/capture/video/win/video_capture_device_factory_win.h"
#include "media/capture/video/win/video_capture_device_mf_win.h"
-#include "media/capture/video/win/video_capture_dxgi_device_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -67,9 +68,8 @@ class MockClient : public VideoCaptureDevice::Client {
int frame_feedback_id = 0) override {}
void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) override {}
@@ -81,14 +81,14 @@ class MockClient : public VideoCaptureDevice::Client {
base::TimeTicks reference_,
base::TimeDelta timestamp) override {}
- void OnIncomingCapturedBufferExt(
- Buffer buffer,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
- base::TimeTicks reference_time,
- base::TimeDelta timestamp,
- gfx::Rect visible_rect,
- const VideoFrameMetadata& additional_metadata) override {}
+ MOCK_METHOD7(OnIncomingCapturedBufferExt,
+ void(Buffer,
+ const VideoCaptureFormat&,
+ const gfx::ColorSpace&,
+ base::TimeTicks,
+ base::TimeDelta,
+ gfx::Rect,
+ const VideoFrameMetadata&));
MOCK_METHOD3(OnError,
void(VideoCaptureError,
@@ -970,6 +970,40 @@ struct DepthDeviceParams {
// Depth device sometimes provides multiple video streams.
bool additional_i420_video_stream;
};
+
+class MockCaptureHandleProvider
+ : public VideoCaptureDevice::Client::Buffer::HandleProvider {
+ public:
+ // Duplicate as an writable (unsafe) shared memory region.
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override {
+ return base::UnsafeSharedMemoryRegion();
+ }
+
+ // Duplicate as a writable (unsafe) mojo buffer.
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override {
+ return mojo::ScopedSharedBufferHandle();
+ }
+
+ // Access a |VideoCaptureBufferHandle| for local, writable memory.
+ std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess()
+ override {
+ return nullptr;
+ }
+
+ // Clone a |GpuMemoryBufferHandle| for IPC.
+ gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override {
+ // Create a fake DXGI buffer handle
+ // (ensure that the fake is still a valid NT handle by using an event
+ // handle)
+ base::win::ScopedHandle fake_dxgi_handle(
+ CreateEvent(nullptr, FALSE, FALSE, nullptr));
+ gfx::GpuMemoryBufferHandle handle;
+ handle.type = gfx::GpuMemoryBufferType::DXGI_SHARED_HANDLE;
+ handle.dxgi_handle = std::move(fake_dxgi_handle);
+ return handle;
+ }
+};
+
} // namespace
const int kArbitraryValidVideoWidth = 1920;
@@ -1222,7 +1256,7 @@ class VideoCaptureDeviceMFWinTest : public ::testing::Test {
scoped_refptr<MockMFCaptureSource> capture_source_;
scoped_refptr<MockCapturePreviewSink> capture_preview_sink_;
base::test::TaskEnvironment task_environment_;
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager_;
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
private:
const bool media_foundation_supported_;
@@ -1731,7 +1765,7 @@ class VideoCaptureDeviceMFWinTestWithDXGI : public VideoCaptureDeviceMFWinTest {
if (ShouldSkipD3D11Test())
GTEST_SKIP();
- dxgi_device_manager_ = VideoCaptureDXGIDeviceManager::Create();
+ dxgi_device_manager_ = DXGIDeviceManager::Create();
VideoCaptureDeviceMFWinTest::SetUp();
}
};
@@ -1784,4 +1818,95 @@ TEST_F(VideoCaptureDeviceMFWinTestWithDXGI, EnsureNV12SinkSubtype) {
capture_preview_sink_->sample_callback->OnSample(nullptr);
}
+TEST_F(VideoCaptureDeviceMFWinTestWithDXGI, DeliverGMBCaptureBuffers) {
+ if (ShouldSkipTest())
+ return;
+
+ const GUID expected_subtype = MFVideoFormat_NV12;
+ PrepareMFDeviceWithOneVideoStream(expected_subtype);
+
+ const gfx::Size expected_size(640, 480);
+
+ // Verify that an output capture buffer is reserved from the client
+ EXPECT_CALL(*client_, ReserveOutputBuffer)
+ .WillOnce(Invoke(
+ [expected_size](const gfx::Size& size, VideoPixelFormat format,
+ int feedback_id,
+ VideoCaptureDevice::Client::Buffer* capture_buffer) {
+ EXPECT_EQ(size.width(), expected_size.width());
+ EXPECT_EQ(size.height(), expected_size.height());
+ EXPECT_EQ(format, PIXEL_FORMAT_NV12);
+ capture_buffer->handle_provider =
+ std::make_unique<MockCaptureHandleProvider>();
+ return VideoCaptureDevice::Client::ReserveResult::kSucceeded;
+ }));
+
+ Microsoft::WRL::ComPtr<MockD3D11Device> mock_device(new MockD3D11Device());
+
+ // Create mock source texture (to be provided to capture device from MF
+ // capture API)
+ D3D11_TEXTURE2D_DESC mock_desc = {};
+ mock_desc.Format = DXGI_FORMAT_NV12;
+ mock_desc.Width = expected_size.width();
+ mock_desc.Height = expected_size.height();
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> mock_source_texture_2d;
+ Microsoft::WRL::ComPtr<MockD3D11Texture2D> mock_source_texture(
+ new MockD3D11Texture2D(mock_desc, mock_device.Get()));
+ EXPECT_TRUE(SUCCEEDED(
+ mock_source_texture.CopyTo(IID_PPV_ARGS(&mock_source_texture_2d))));
+
+ // Create mock target texture with matching dimensions/format
+ // (to be provided from the capture device to the capture client)
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> mock_target_texture_2d;
+ Microsoft::WRL::ComPtr<MockD3D11Texture2D> mock_target_texture(
+ new MockD3D11Texture2D(mock_desc, mock_device.Get()));
+ EXPECT_TRUE(SUCCEEDED(
+ mock_target_texture.CopyTo(IID_PPV_ARGS(&mock_target_texture_2d))));
+ // Mock OpenSharedResource call on mock D3D device to return target texture
+ EXPECT_CALL(*mock_device.Get(), DoOpenSharedResource1)
+ .WillOnce(Invoke([&mock_target_texture_2d](HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out) {
+ return mock_target_texture_2d.CopyTo(returned_interface, resource_out);
+ }));
+ // Expect call to copy source texture to target on immediate context
+ ID3D11Resource* expected_source =
+ static_cast<ID3D11Resource*>(mock_source_texture_2d.Get());
+ ID3D11Resource* expected_target =
+ static_cast<ID3D11Resource*>(mock_target_texture_2d.Get());
+ EXPECT_CALL(*mock_device->mock_immediate_context_.Get(),
+ OnCopySubresourceRegion(expected_target, _, _, _, _,
+ expected_source, _, _))
+ .Times(1);
+ // Expect the client to receive a buffer containing a GMB containing the
+ // expected fake DXGI handle
+ EXPECT_CALL(*client_, OnIncomingCapturedBufferExt)
+ .WillOnce(Invoke([](VideoCaptureDevice::Client::Buffer buffer,
+ const VideoCaptureFormat&, const gfx::ColorSpace&,
+ base::TimeTicks, base::TimeDelta, gfx::Rect,
+ const VideoFrameMetadata&) {
+ gfx::GpuMemoryBufferHandle gmb_handle =
+ buffer.handle_provider->GetGpuMemoryBufferHandle();
+ EXPECT_EQ(gmb_handle.type,
+ gfx::GpuMemoryBufferType::DXGI_SHARED_HANDLE);
+ }));
+
+ // Init capture
+ VideoCaptureFormat format(expected_size, 30, media::PIXEL_FORMAT_NV12);
+ VideoCaptureParams video_capture_params;
+ video_capture_params.requested_format = format;
+ device_->AllocateAndStart(video_capture_params, std::move(client_));
+
+ // Create MF sample and provide to sample callback on capture device
+ Microsoft::WRL::ComPtr<IMFSample> sample;
+ EXPECT_TRUE(SUCCEEDED(MFCreateSample(&sample)));
+ Microsoft::WRL::ComPtr<IMFMediaBuffer> dxgi_buffer;
+ EXPECT_TRUE(SUCCEEDED(MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D),
+ mock_source_texture_2d.Get(),
+ 0, FALSE, &dxgi_buffer)));
+ EXPECT_TRUE(SUCCEEDED(sample->AddBuffer(dxgi_buffer.Get())));
+
+ capture_preview_sink_->sample_callback->OnSample(sample.Get());
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc b/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc
deleted file mode 100644
index 94650aabece..00000000000
--- a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc
+++ /dev/null
@@ -1,92 +0,0 @@
-// Copyright (c) 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/win/video_capture_dxgi_device_manager.h"
-
-#include <d3d11.h>
-#include <mfcaptureengine.h>
-#include <mfreadwrite.h>
-#include "base/logging.h"
-
-using Microsoft::WRL::ComPtr;
-
-namespace media {
-
-scoped_refptr<VideoCaptureDXGIDeviceManager>
-VideoCaptureDXGIDeviceManager::Create() {
- ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager;
- UINT d3d_device_reset_token = 0;
- HRESULT hr = MFCreateDXGIDeviceManager(&d3d_device_reset_token,
- &mf_dxgi_device_manager);
- if (FAILED(hr)) {
- DLOG(ERROR) << "Failed to create MF DXGI device manager: "
- << logging::SystemErrorCodeToString(hr);
- return scoped_refptr<VideoCaptureDXGIDeviceManager>();
- }
- scoped_refptr<VideoCaptureDXGIDeviceManager>
- video_capture_dxgi_device_manager(new VideoCaptureDXGIDeviceManager(
- std::move(mf_dxgi_device_manager), d3d_device_reset_token));
- if (!video_capture_dxgi_device_manager->ResetDevice()) {
- // If setting a device failed, ensure that an empty scoped_refptr is
- // returned so that we fall back to software mode
- return scoped_refptr<VideoCaptureDXGIDeviceManager>();
- }
- return video_capture_dxgi_device_manager;
-}
-
-VideoCaptureDXGIDeviceManager::VideoCaptureDXGIDeviceManager(
- Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager,
- UINT d3d_device_reset_token)
- : mf_dxgi_device_manager_(std::move(mf_dxgi_device_manager)),
- d3d_device_reset_token_(d3d_device_reset_token) {}
-
-VideoCaptureDXGIDeviceManager::~VideoCaptureDXGIDeviceManager() {}
-
-bool VideoCaptureDXGIDeviceManager::ResetDevice() {
- Microsoft::WRL::ComPtr<ID3D11Device> d3d_device;
- constexpr uint32_t device_flags =
- (D3D11_CREATE_DEVICE_VIDEO_SUPPORT | D3D11_CREATE_DEVICE_BGRA_SUPPORT);
- HRESULT hr = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr,
- device_flags, nullptr, 0, D3D11_SDK_VERSION,
- &d3d_device, nullptr, nullptr);
- if (FAILED(hr)) {
- DLOG(ERROR) << "D3D11 device creation failed: "
- << logging::SystemErrorCodeToString(hr);
- return false;
- }
- hr = mf_dxgi_device_manager_->ResetDevice(d3d_device.Get(),
- d3d_device_reset_token_);
- if (FAILED(hr)) {
- DLOG(ERROR) << "Failed to reset device on MF DXGI device manager: "
- << logging::SystemErrorCodeToString(hr);
- return false;
- }
- return true;
-}
-
-void VideoCaptureDXGIDeviceManager::RegisterInCaptureEngineAttributes(
- IMFAttributes* attributes) {
- HRESULT result = attributes->SetUnknown(MF_CAPTURE_ENGINE_D3D_MANAGER,
- mf_dxgi_device_manager_.Get());
- DCHECK(SUCCEEDED(result));
-}
-
-void VideoCaptureDXGIDeviceManager::RegisterInSourceReaderAttributes(
- IMFAttributes* attributes) {
- HRESULT result = attributes->SetUnknown(MF_SOURCE_READER_D3D_MANAGER,
- mf_dxgi_device_manager_.Get());
- DCHECK(SUCCEEDED(result));
-}
-
-void VideoCaptureDXGIDeviceManager::RegisterWithMediaSource(
- ComPtr<IMFMediaSource> media_source) {
- ComPtr<IMFMediaSourceEx> source_ext;
- if (FAILED(media_source.As(&source_ext))) {
- DCHECK(false);
- return;
- }
- source_ext->SetD3DManager(mf_dxgi_device_manager_.Get());
-}
-
-} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h b/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h
deleted file mode 100644
index d4c1bde2d44..00000000000
--- a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright (c) 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DXGI_DEVICE_MANAGER_H_
-#define MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DXGI_DEVICE_MANAGER_H_
-
-#include <mfapi.h>
-#include <mfidl.h>
-#include <wrl/client.h>
-#include "base/memory/ref_counted.h"
-#include "base/memory/scoped_refptr.h"
-#include "media/capture/capture_export.h"
-
-namespace media {
-
-class CAPTURE_EXPORT VideoCaptureDXGIDeviceManager
- : public base::RefCounted<VideoCaptureDXGIDeviceManager> {
- public:
- // Returns a VideoCaptureDXGIDeviceManager with associated D3D device set, or
- // nullptr on failure.
- static scoped_refptr<VideoCaptureDXGIDeviceManager> Create();
-
- // Associates a new D3D device with the DXGI Device Manager
- bool ResetDevice();
-
- // Registers this manager in capture engine attributes.
- void RegisterInCaptureEngineAttributes(IMFAttributes* attributes);
-
- // Registers this manager in source reader attributes.
- void RegisterInSourceReaderAttributes(IMFAttributes* attributes);
-
- // Registers this manager with a media source
- void RegisterWithMediaSource(
- Microsoft::WRL::ComPtr<IMFMediaSource> media_source);
-
- protected:
- friend class base::RefCounted<VideoCaptureDXGIDeviceManager>;
- VideoCaptureDXGIDeviceManager(
- Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager,
- UINT d3d_device_reset_token);
- virtual ~VideoCaptureDXGIDeviceManager();
-
- Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager_;
- UINT d3d_device_reset_token_ = 0;
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DXGI_DEVICE_MANAGER_H_ \ No newline at end of file