summaryrefslogtreecommitdiff
path: root/chromium/media/capture
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2018-05-03 13:42:47 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2018-05-15 10:27:51 +0000
commit8c5c43c7b138c9b4b0bf56d946e61d3bbc111bec (patch)
treed29d987c4d7b173cf853279b79a51598f104b403 /chromium/media/capture
parent830c9e163d31a9180fadca926b3e1d7dfffb5021 (diff)
downloadqtwebengine-chromium-8c5c43c7b138c9b4b0bf56d946e61d3bbc111bec.tar.gz
BASELINE: Update Chromium to 66.0.3359.156
Change-Id: I0c9831ad39911a086b6377b16f995ad75a51e441 Reviewed-by: Michal Klocek <michal.klocek@qt.io>
Diffstat (limited to 'chromium/media/capture')
-rw-r--r--chromium/media/capture/BUILD.gn44
-rw-r--r--chromium/media/capture/content/android/screen_capture_machine_android.cc3
-rw-r--r--chromium/media/capture/content/screen_capture_device_core.cc19
-rw-r--r--chromium/media/capture/content/screen_capture_device_core.h15
-rw-r--r--chromium/media/capture/content/thread_safe_capture_oracle.cc32
-rw-r--r--chromium/media/capture/content/thread_safe_capture_oracle.h6
-rw-r--r--chromium/media/capture/content/video_capture_oracle.cc27
-rw-r--r--chromium/media/capture/content/video_capture_oracle.h9
-rw-r--r--chromium/media/capture/content/video_capture_oracle_unittest.cc53
-rw-r--r--chromium/media/capture/mojo/BUILD.gn14
-rw-r--r--chromium/media/capture/mojo/image_capture_types.cc27
-rw-r--r--chromium/media/capture/mojo/image_capture_types.h16
-rw-r--r--chromium/media/capture/mojo/video_capture.mojom109
-rw-r--r--chromium/media/capture/mojo/video_capture_types.mojom1
-rw-r--r--chromium/media/capture/mojo/video_capture_types_struct_traits.cc9
-rw-r--r--chromium/media/capture/mojo/video_capture_types_struct_traits.h2
-rw-r--r--chromium/media/capture/video/android/video_capture_device_factory_android.cc11
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc6
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc9
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h7
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.h5
-rw-r--r--chromium/media/capture/video/chromeos/mojo/BUILD.gn4
-rw-r--r--chromium/media/capture/video/chromeos/mojo/arc_camera3_service.mojom5
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_arc_chromeos.cc12
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc22
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h5
-rw-r--r--chromium/media/capture/video/file_video_capture_device.cc66
-rw-r--r--chromium/media/capture/video/file_video_capture_device.h9
-rw-r--r--chromium/media/capture/video/file_video_capture_device_factory.cc4
-rw-r--r--chromium/media/capture/video/file_video_capture_device_unittest.cc143
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.cc3
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm21
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm9
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac.mm7
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_mac.mm18
-rw-r--r--chromium/media/capture/video/video_capture_device.h7
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc3
-rw-r--r--chromium/media/capture/video/video_capture_device_descriptor.cc31
-rw-r--r--chromium/media/capture/video/video_capture_device_descriptor.h14
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.cc9
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.h10
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc186
-rw-r--r--chromium/media/capture/video/win/capability_list_win.h28
-rw-r--r--chromium/media/capture/video/win/metrics.cc82
-rw-r--r--chromium/media/capture/video/win/metrics.h49
-rw-r--r--chromium/media/capture/video/win/sink_filter_win.cc30
-rw-r--r--chromium/media/capture/video/win/sink_filter_win.h31
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc62
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.h8
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.cc1015
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.h92
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc1188
-rw-r--r--chromium/media/capture/video/win/video_capture_device_win.cc26
53 files changed, 3101 insertions, 522 deletions
diff --git a/chromium/media/capture/BUILD.gn b/chromium/media/capture/BUILD.gn
index c7d100e6a75..9b68d54cffe 100644
--- a/chromium/media/capture/BUILD.gn
+++ b/chromium/media/capture/BUILD.gn
@@ -9,12 +9,14 @@ import("//testing/test.gni")
group("capture") {
public_deps = [
":capture_lib",
- "//media/capture/mojo:capture_types",
+ "//media/capture/mojo:image_capture",
+ "//media/capture/mojo:image_capture_types",
+ "//media/capture/mojo:video_capture",
"//services/service_manager/public/cpp",
]
}
-# Things needed by //media/capture/mojo:capture_types.
+# Things needed by //media/capture/mojo/video_capture_types.mojom.
component("capture_base") {
defines = [ "CAPTURE_IMPLEMENTATION" ]
sources = [
@@ -59,6 +61,8 @@ source_set("capture_device_specific") {
"content/thread_safe_capture_oracle.h",
"content/video_capture_oracle.cc",
"content/video_capture_oracle.h",
+ "video/blob_utils.cc",
+ "video/blob_utils.h",
"video/fake_video_capture_device.cc",
"video/fake_video_capture_device.h",
"video/fake_video_capture_device_factory.cc",
@@ -73,6 +77,7 @@ source_set("capture_device_specific") {
"video/video_capture_device_factory.cc",
"video/video_capture_device_factory.h",
]
+
public_deps = [
":capture_base",
]
@@ -82,6 +87,9 @@ source_set("capture_device_specific") {
"//gpu/command_buffer/client",
"//media",
"//media/capture/mojo:image_capture",
+ "//media/capture/mojo:image_capture_types",
+ "//media/mojo/interfaces:interfaces",
+ "//third_party/libyuv",
"//ui/gfx",
]
}
@@ -132,6 +140,8 @@ component("capture_lib") {
"video/win/capability_list_win.h",
"video/win/filter_base_win.cc",
"video/win/filter_base_win.h",
+ "video/win/metrics.cc",
+ "video/win/metrics.h",
"video/win/pin_base_win.cc",
"video/win/pin_base_win.h",
"video/win/sink_filter_observer_win.h",
@@ -160,8 +170,9 @@ component("capture_lib") {
"//base",
"//base:i18n",
"//media",
- "//media/capture/mojo:capture_types",
"//media/capture/mojo:image_capture",
+ "//media/capture/mojo:image_capture_types",
+ "//media/capture/mojo:video_capture",
"//media/mojo/interfaces:interfaces",
"//services/service_manager/public/cpp",
"//third_party/libyuv",
@@ -210,13 +221,6 @@ component("capture_lib") {
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
}
- if (is_linux || is_chromeos || is_win) {
- sources += [
- "video/blob_utils.cc",
- "video/blob_utils.h",
- ]
- }
-
if (is_chromeos) {
sources += [
"video/chromeos/camera_buffer_factory.cc",
@@ -251,6 +255,7 @@ source_set("test_support") {
"//base/test:test_support",
"//media:test_support",
"//media/capture/mojo:image_capture",
+ "//media/capture/mojo:image_capture_types",
"//testing/gmock",
"//ui/gfx:test_support",
]
@@ -266,6 +271,7 @@ test("capture_unittests") {
"content/video_capture_oracle_unittest.cc",
"run_all_unittests.cc",
"video/fake_video_capture_device_unittest.cc",
+ "video/file_video_capture_device_unittest.cc",
"video/linux/camera_config_chromeos_unittest.cc",
"video/linux/v4l2_capture_delegate_unittest.cc",
"video/mac/video_capture_device_factory_mac_unittest.mm",
@@ -275,12 +281,17 @@ test("capture_unittests") {
"video_capture_types_unittest.cc",
]
+ data = [
+ "//media/test/data/bear.mjpeg",
+ ]
+
deps = [
":capture",
":test_support",
"//base/test:test_support",
"//media:test_support",
"//media/capture/mojo:image_capture",
+ "//media/capture/mojo:image_capture_types",
"//mojo/edk/system",
"//testing/gmock",
"//testing/gtest",
@@ -295,6 +306,19 @@ test("capture_unittests") {
}
if (is_win) {
+ sources += [ "video/win/video_capture_device_mf_win_unittest.cc" ]
+ libs = [
+ "mf.lib",
+ "mfplat.lib",
+ "mfreadwrite.lib",
+ "mfuuid.lib",
+ ]
+ ldflags = [
+ "/DELAYLOAD:mf.dll",
+ "/DELAYLOAD:mfplat.dll",
+ "/DELAYLOAD:mfreadwrite.dll",
+ ]
+
# TODO(jschuh): https://crbug.com/167187 fix size_t to int truncations.
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
}
diff --git a/chromium/media/capture/content/android/screen_capture_machine_android.cc b/chromium/media/capture/content/android/screen_capture_machine_android.cc
index 96b141c3a84..734048e68ac 100644
--- a/chromium/media/capture/content/android/screen_capture_machine_android.cc
+++ b/chromium/media/capture/content/android/screen_capture_machine_android.cc
@@ -278,8 +278,7 @@ void ScreenCaptureMachineAndroid::MaybeCaptureForRefresh() {
if (lastFrame_.get() == nullptr)
return;
- const VideoCaptureOracle::Event event =
- VideoCaptureOracle::kActiveRefreshRequest;
+ const VideoCaptureOracle::Event event = VideoCaptureOracle::kRefreshRequest;
const base::TimeTicks start_time = base::TimeTicks::Now();
scoped_refptr<VideoFrame> frame;
ThreadSafeCaptureOracle::CaptureFrameCallback capture_frame_cb;
diff --git a/chromium/media/capture/content/screen_capture_device_core.cc b/chromium/media/capture/content/screen_capture_device_core.cc
index 8fb1c885677..38a03882a58 100644
--- a/chromium/media/capture/content/screen_capture_device_core.cc
+++ b/chromium/media/capture/content/screen_capture_device_core.cc
@@ -70,18 +70,6 @@ void ScreenCaptureDeviceCore::RequestRefreshFrame() {
if (state_ != kCapturing)
return;
- // Try to use the less resource-intensive "passive" refresh mechanism, unless
- // this is the first refresh following a Resume().
- if (force_active_refresh_once_) {
- capture_machine_->MaybeCaptureForRefresh();
- force_active_refresh_once_ = false;
- return;
- }
-
- // Make a best-effort attempt at a passive refresh, but fall-back to an active
- // refresh if that fails.
- if (oracle_proxy_->AttemptPassiveRefresh())
- return;
capture_machine_->MaybeCaptureForRefresh();
}
@@ -102,7 +90,6 @@ void ScreenCaptureDeviceCore::Resume() {
if (state_ != kSuspended)
return;
- force_active_refresh_once_ = true;
TransitionStateTo(kCapturing);
capture_machine_->Resume();
@@ -119,7 +106,7 @@ void ScreenCaptureDeviceCore::StopAndDeAllocate() {
TransitionStateTo(kIdle);
- capture_machine_->Stop(base::Bind(&base::DoNothing));
+ capture_machine_->Stop(base::DoNothing());
}
void ScreenCaptureDeviceCore::OnConsumerReportingUtilization(
@@ -140,9 +127,7 @@ void ScreenCaptureDeviceCore::CaptureStarted(bool success) {
ScreenCaptureDeviceCore::ScreenCaptureDeviceCore(
std::unique_ptr<VideoCaptureMachine> capture_machine)
- : state_(kIdle),
- capture_machine_(std::move(capture_machine)),
- force_active_refresh_once_(false) {
+ : state_(kIdle), capture_machine_(std::move(capture_machine)) {
DCHECK(capture_machine_.get());
}
diff --git a/chromium/media/capture/content/screen_capture_device_core.h b/chromium/media/capture/content/screen_capture_device_core.h
index ac3f6ca9e27..5f5bdcbc32f 100644
--- a/chromium/media/capture/content/screen_capture_device_core.h
+++ b/chromium/media/capture/content/screen_capture_device_core.h
@@ -50,15 +50,8 @@ class CAPTURE_EXPORT VideoCaptureMachine {
// overloading or under-utilization.
virtual bool IsAutoThrottlingEnabled() const;
- // Called by ScreenCaptureDeviceCore when it failed to satisfy a "refresh
- // frame" request by attempting to resurrect the last video frame from the
- // buffer pool (this is referred to as the "passive" refresh approach). The
- // failure can happen for a number of reasons (e.g., the oracle decided to
- // change resolution, or consumers of the last video frame are not yet
- // finished with it).
- //
// The implementation of this method should consult the oracle, using the
- // kActiveRefreshRequest event type, to decide whether to initiate a new frame
+ // kRefreshRequest event type, to decide whether to initiate a new frame
// capture, and then do so if the oracle agrees.
virtual void MaybeCaptureForRefresh() = 0;
@@ -121,12 +114,6 @@ class CAPTURE_EXPORT ScreenCaptureDeviceCore
// component of the system with direct access to |client_|.
scoped_refptr<ThreadSafeCaptureOracle> oracle_proxy_;
- // After Resume(), some unknown amount of time has passed, and the content of
- // the capture source may have changed. This flag is used to ensure that the
- // passive refresh mechanism is not used for the first refresh frame following
- // a Resume().
- bool force_active_refresh_once_;
-
DISALLOW_COPY_AND_ASSIGN(ScreenCaptureDeviceCore);
};
diff --git a/chromium/media/capture/content/thread_safe_capture_oracle.cc b/chromium/media/capture/content/thread_safe_capture_oracle.cc
index cb11937dec4..81be8964c3f 100644
--- a/chromium/media/capture/content/thread_safe_capture_oracle.cc
+++ b/chromium/media/capture/content/thread_safe_capture_oracle.cc
@@ -100,20 +100,9 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
coded_size.SetSize(base::bits::Align(visible_size.width(), 16),
base::bits::Align(visible_size.height(), 16));
- if (event == VideoCaptureOracle::kPassiveRefreshRequest) {
- output_buffer = client_->ResurrectLastOutputBuffer(
- coded_size, params_.requested_format.pixel_format,
- params_.requested_format.pixel_storage, frame_number);
- if (!output_buffer.is_valid()) {
- TRACE_EVENT_INSTANT0("gpu.capture", "ResurrectionFailed",
- TRACE_EVENT_SCOPE_THREAD);
- return false;
- }
- } else {
- output_buffer = client_->ReserveOutputBuffer(
- coded_size, params_.requested_format.pixel_format,
- params_.requested_format.pixel_storage, frame_number);
- }
+ output_buffer = client_->ReserveOutputBuffer(
+ coded_size, params_.requested_format.pixel_format,
+ params_.requested_format.pixel_storage, frame_number);
// Get the current buffer pool utilization and attenuate it: The utilization
// reported to the oracle is in terms of a maximum sustainable amount (not
@@ -188,21 +177,6 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
return true;
}
-bool ThreadSafeCaptureOracle::AttemptPassiveRefresh() {
- const base::TimeTicks refresh_time = base::TimeTicks::Now();
-
- scoped_refptr<VideoFrame> frame;
- CaptureFrameCallback capture_callback;
- if (!ObserveEventAndDecideCapture(VideoCaptureOracle::kPassiveRefreshRequest,
- gfx::Rect(), refresh_time, &frame,
- &capture_callback)) {
- return false;
- }
-
- capture_callback.Run(std::move(frame), refresh_time, true);
- return true;
-}
-
gfx::Size ThreadSafeCaptureOracle::GetCaptureSize() const {
base::AutoLock guard(lock_);
return oracle_.capture_size();
diff --git a/chromium/media/capture/content/thread_safe_capture_oracle.h b/chromium/media/capture/content/thread_safe_capture_oracle.h
index f36da6ac318..cab73733cae 100644
--- a/chromium/media/capture/content/thread_safe_capture_oracle.h
+++ b/chromium/media/capture/content/thread_safe_capture_oracle.h
@@ -60,12 +60,6 @@ class CAPTURE_EXPORT ThreadSafeCaptureOracle
scoped_refptr<VideoFrame>* storage,
CaptureFrameCallback* callback);
- // Attempt to re-send the last frame to the VideoCaptureDevice::Client.
- // Returns true if successful. This can fail if the last frame is no longer
- // available in the buffer pool, or if the VideoCaptureOracle decides to
- // reject the "passive" refresh.
- bool AttemptPassiveRefresh();
-
base::TimeDelta min_capture_period() const {
return oracle_.min_capture_period();
}
diff --git a/chromium/media/capture/content/video_capture_oracle.cc b/chromium/media/capture/content/video_capture_oracle.cc
index b0ffd8b0be4..5e0469f45f3 100644
--- a/chromium/media/capture/content/video_capture_oracle.cc
+++ b/chromium/media/capture/content/video_capture_oracle.cc
@@ -101,7 +101,6 @@ constexpr base::TimeDelta VideoCaptureOracle::kDefaultMinCapturePeriod;
VideoCaptureOracle::VideoCaptureOracle(bool enable_auto_throttling)
: auto_throttling_enabled_(enable_auto_throttling),
next_frame_number_(0),
- source_is_dirty_(true),
last_successfully_delivered_frame_number_(-1),
num_frames_pending_(0),
smoothing_sampler_(kDefaultMinCapturePeriod),
@@ -151,11 +150,6 @@ bool VideoCaptureOracle::ObserveEventAndDecideCapture(
}
last_event_time_[event] = event_time;
- // If the event indicates a change to the source content, set a flag that will
- // prevent passive refresh requests until a capture is made.
- if (event != kActiveRefreshRequest && event != kPassiveRefreshRequest)
- source_is_dirty_ = true;
-
bool should_sample = false;
duration_of_next_frame_ = base::TimeDelta();
switch (event) {
@@ -178,12 +172,7 @@ bool VideoCaptureOracle::ObserveEventAndDecideCapture(
break;
}
- case kPassiveRefreshRequest:
- if (source_is_dirty_)
- break;
- // Intentional flow-through to next case here!
- case kActiveRefreshRequest:
- case kMouseCursorUpdate:
+ case kRefreshRequest:
// Only allow non-compositor samplings when content has not recently been
// animating, and only if there are no samplings currently in progress.
if (num_frames_pending_ == 0) {
@@ -237,8 +226,6 @@ bool VideoCaptureOracle::ObserveEventAndDecideCapture(
void VideoCaptureOracle::RecordCapture(double pool_utilization) {
DCHECK(std::isfinite(pool_utilization) && pool_utilization >= 0.0);
- source_is_dirty_ = false;
-
smoothing_sampler_.RecordSample();
const base::TimeTicks timestamp = GetFrameTimestamp(next_frame_number_);
content_sampler_.RecordSample(timestamp);
@@ -289,9 +276,6 @@ bool VideoCaptureOracle::CompleteCapture(int frame_number,
if (!capture_was_successful) {
VLOG(2) << "Capture of frame #" << frame_number << " was not successful.";
- // Since capture of this frame might have been required for capturing an
- // update to the source content, set the dirty flag.
- source_is_dirty_ = true;
return false;
}
@@ -343,7 +327,6 @@ void VideoCaptureOracle::CancelAllCaptures() {
//
// ...which simplifies to:
num_frames_pending_ = 0;
- source_is_dirty_ = true;
}
void VideoCaptureOracle::RecordConsumerFeedback(int frame_number,
@@ -380,12 +363,8 @@ const char* VideoCaptureOracle::EventAsString(Event event) {
switch (event) {
case kCompositorUpdate:
return "compositor";
- case kActiveRefreshRequest:
- return "active_refresh";
- case kPassiveRefreshRequest:
- return "passive_refresh";
- case kMouseCursorUpdate:
- return "mouse";
+ case kRefreshRequest:
+ return "refresh";
case kNumEvents:
break;
}
diff --git a/chromium/media/capture/content/video_capture_oracle.h b/chromium/media/capture/content/video_capture_oracle.h
index 92f82885d27..e6bbc0ebb75 100644
--- a/chromium/media/capture/content/video_capture_oracle.h
+++ b/chromium/media/capture/content/video_capture_oracle.h
@@ -24,9 +24,7 @@ class CAPTURE_EXPORT VideoCaptureOracle {
public:
enum Event {
kCompositorUpdate,
- kActiveRefreshRequest,
- kPassiveRefreshRequest,
- kMouseCursorUpdate,
+ kRefreshRequest,
kNumEvents,
};
@@ -169,11 +167,6 @@ class CAPTURE_EXPORT VideoCaptureOracle {
// sanity-check that event times are monotonically non-decreasing.
base::TimeTicks last_event_time_[kNumEvents];
- // Set to true if there have been updates to the source content that were not
- // sampled. This will prevent passive refresh requests from being satisfied
- // when an active refresh should be used instead.
- bool source_is_dirty_;
-
// Updated by the last call to ObserveEventAndDecideCapture() with the
// estimated duration of the next frame to sample. This is zero if the method
// returned false.
diff --git a/chromium/media/capture/content/video_capture_oracle_unittest.cc b/chromium/media/capture/content/video_capture_oracle_unittest.cc
index 873ed0515fb..912ab8888b6 100644
--- a/chromium/media/capture/content/video_capture_oracle_unittest.cc
+++ b/chromium/media/capture/content/video_capture_oracle_unittest.cc
@@ -257,7 +257,7 @@ TEST(VideoCaptureOracleTest, SamplesAtCorrectTimesAroundRefreshRequests) {
for (int i = 0; i < 10; ++i) {
t += refresh_interval;
ASSERT_FALSE(oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kPassiveRefreshRequest, gfx::Rect(), t));
+ VideoCaptureOracle::kRefreshRequest, gfx::Rect(), t));
}
// Now, complete the oustanding compositor-based capture and continue
@@ -267,8 +267,8 @@ TEST(VideoCaptureOracleTest, SamplesAtCorrectTimesAroundRefreshRequests) {
did_complete_a_capture = false;
for (int i = 0; i < 10; ++i) {
t += refresh_interval;
- if (oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kPassiveRefreshRequest, gfx::Rect(), t)) {
+ if (oracle.ObserveEventAndDecideCapture(VideoCaptureOracle::kRefreshRequest,
+ gfx::Rect(), t)) {
const int frame_number = oracle.next_frame_number();
oracle.RecordCapture(0.0);
ASSERT_TRUE(oracle.CompleteCapture(frame_number, true, &ignored));
@@ -281,8 +281,8 @@ TEST(VideoCaptureOracleTest, SamplesAtCorrectTimesAroundRefreshRequests) {
for (int i = 0; i <= 10; ++i) {
ASSERT_GT(10, i) << "BUG: Seems like it'll never happen!";
t += refresh_interval;
- if (oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kPassiveRefreshRequest, gfx::Rect(), t)) {
+ if (oracle.ObserveEventAndDecideCapture(VideoCaptureOracle::kRefreshRequest,
+ gfx::Rect(), t)) {
break;
}
}
@@ -294,14 +294,14 @@ TEST(VideoCaptureOracleTest, SamplesAtCorrectTimesAroundRefreshRequests) {
for (int i = 0; i < 10; ++i) {
t += refresh_interval;
ASSERT_FALSE(oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kPassiveRefreshRequest, gfx::Rect(), t));
+ VideoCaptureOracle::kRefreshRequest, gfx::Rect(), t));
}
ASSERT_TRUE(oracle.CompleteCapture(frame_number, true, &ignored));
for (int i = 0; i <= 10; ++i) {
ASSERT_GT(10, i) << "BUG: Seems like it'll never happen!";
t += refresh_interval;
- if (oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kPassiveRefreshRequest, gfx::Rect(), t)) {
+ if (oracle.ObserveEventAndDecideCapture(VideoCaptureOracle::kRefreshRequest,
+ gfx::Rect(), t)) {
break;
}
}
@@ -361,43 +361,6 @@ TEST(VideoCaptureOracleTest, DoesNotRapidlyChangeCaptureSize) {
}
}
-// Tests that un-sampled compositor update event will fail the next passive
-// refresh request, forcing an active refresh.
-TEST(VideoCaptureOracleTest, EnforceActiveRefreshForUnsampledCompositorUpdate) {
- const gfx::Rect damage_rect(Get720pSize());
- const base::TimeDelta event_increment = Get30HzPeriod() * 2;
- const base::TimeDelta short_event_increment = Get30HzPeriod() / 4;
-
- VideoCaptureOracle oracle(false);
- oracle.SetMinCapturePeriod(Get30HzPeriod());
- oracle.SetCaptureSizeConstraints(Get720pSize(), Get720pSize(), false);
-
- base::TimeTicks t = InitialTestTimeTicks();
- int last_frame_number;
- base::TimeTicks ignored;
-
- // CompositorUpdate is sampled normally.
- t += event_increment;
- ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kCompositorUpdate, damage_rect, t));
- last_frame_number = oracle.next_frame_number();
- oracle.RecordCapture(0.0);
- ASSERT_TRUE(oracle.CompleteCapture(last_frame_number, true, &ignored));
-
- // Next CompositorUpdate comes too soon and won't be sampled.
- t += short_event_increment;
- ASSERT_FALSE(oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kCompositorUpdate, damage_rect, t));
-
- // Then the next valid PassiveRefreshRequest will fail to enforce an
- // ActiveRefreshRequest to capture the updated content.
- t += event_increment;
- ASSERT_FALSE(oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kPassiveRefreshRequest, damage_rect, t));
- ASSERT_TRUE(oracle.ObserveEventAndDecideCapture(
- VideoCaptureOracle::kActiveRefreshRequest, damage_rect, t));
-}
-
namespace {
// Tests that VideoCaptureOracle can auto-throttle by stepping the capture size
diff --git a/chromium/media/capture/mojo/BUILD.gn b/chromium/media/capture/mojo/BUILD.gn
index fc19ee1920d..1d8d22185f0 100644
--- a/chromium/media/capture/mojo/BUILD.gn
+++ b/chromium/media/capture/mojo/BUILD.gn
@@ -4,8 +4,9 @@
import("//mojo/public/tools/bindings/mojom.gni")
-mojom("capture_types") {
+mojom("video_capture") {
sources = [
+ "video_capture.mojom",
"video_capture_types.mojom",
]
@@ -21,3 +22,14 @@ mojom("image_capture") {
"image_capture.mojom",
]
}
+
+source_set("image_capture_types") {
+ sources = [
+ "image_capture_types.cc",
+ "image_capture_types.h",
+ ]
+
+ deps = [
+ "//media/capture/mojo:image_capture",
+ ]
+}
diff --git a/chromium/media/capture/mojo/image_capture_types.cc b/chromium/media/capture/mojo/image_capture_types.cc
new file mode 100644
index 00000000000..daec7bc003e
--- /dev/null
+++ b/chromium/media/capture/mojo/image_capture_types.cc
@@ -0,0 +1,27 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/mojo/image_capture_types.h"
+
+namespace mojo {
+
+media::mojom::PhotoStatePtr CreateEmptyPhotoState() {
+ media::mojom::PhotoStatePtr photo_capabilities =
+ media::mojom::PhotoState::New();
+ photo_capabilities->height = media::mojom::Range::New();
+ photo_capabilities->width = media::mojom::Range::New();
+ photo_capabilities->exposure_compensation = media::mojom::Range::New();
+ photo_capabilities->color_temperature = media::mojom::Range::New();
+ photo_capabilities->iso = media::mojom::Range::New();
+ photo_capabilities->brightness = media::mojom::Range::New();
+ photo_capabilities->contrast = media::mojom::Range::New();
+ photo_capabilities->saturation = media::mojom::Range::New();
+ photo_capabilities->sharpness = media::mojom::Range::New();
+ photo_capabilities->zoom = media::mojom::Range::New();
+ photo_capabilities->torch = false;
+ photo_capabilities->red_eye_reduction = media::mojom::RedEyeReduction::NEVER;
+ return photo_capabilities;
+}
+
+} // namespace mojo \ No newline at end of file
diff --git a/chromium/media/capture/mojo/image_capture_types.h b/chromium/media/capture/mojo/image_capture_types.h
new file mode 100644
index 00000000000..4f030f63a34
--- /dev/null
+++ b/chromium/media/capture/mojo/image_capture_types.h
@@ -0,0 +1,16 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_MOJO_IMAGE_CAPTURE_TYPES_H_
+#define MEDIA_CAPTURE_MOJO_IMAGE_CAPTURE_TYPES_H_
+
+#include "media/capture/mojo/image_capture.mojom.h"
+
+namespace mojo {
+
+media::mojom::PhotoStatePtr CreateEmptyPhotoState();
+
+} // namespace mojo
+
+#endif // MEDIA_CAPTURE_MOJO_IMAGE_CAPTURE_TYPES_H_ \ No newline at end of file
diff --git a/chromium/media/capture/mojo/video_capture.mojom b/chromium/media/capture/mojo/video_capture.mojom
new file mode 100644
index 00000000000..41531c59884
--- /dev/null
+++ b/chromium/media/capture/mojo/video_capture.mojom
@@ -0,0 +1,109 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+module media.mojom;
+
+import "media/mojo/interfaces/media_types.mojom";
+import "media/capture/mojo/video_capture_types.mojom";
+import "ui/gfx/geometry/mojo/geometry.mojom";
+
+// This file decribes the communication between a given Renderer Host interface
+// implementation (VideoCaptureHost) and a remote VideoCaptureObserver.
+// VideoCaptureHost offers a stateless part (GetDeviceSupportedFormats() and
+// GetDeviceFormatsInUse()) that can be invoked at any time, and a stateful part
+// sandwiched between Start() and Stop(). A Client's OnStateChanged() can be
+// notified any time during the stateful part. The stateful part is composed of
+// a preamble where a Renderer client sends a command to Start() the capture,
+// registering itself as the associated remote VideoCaptureObserver. The Host
+// will then create and pre- share a number of buffers:
+//
+// Observer VideoCaptureHost
+// | ---> StartCapture |
+// | OnStateChanged(STARTED) <--- |
+// | OnBufferCreated(1) <--- |
+// | OnBufferCreated(2) <--- |
+// = =
+// and capture will then refer to those preallocated buffers:
+// | OnBufferReady(1) <--- |
+// | OnBufferReady(2) <--- |
+// | ---> ReleaseBuffer(1) |
+// | OnBufferReady(1) <--- |
+// | ---> ReleaseBuffer(2) |
+// | OnBufferReady(2) <--- |
+// | ---> ReleaseBuffer(1) |
+// | ... |
+// = =
+// Buffers can be reallocated with a larger size, if e.g. resolution changes.
+// | (resolution change) |
+// | OnBufferDestroyed(1) <--- |
+// | OnBufferCreated(3) <--- |
+// | OnBufferReady(3) <--- |
+// | ---> ReleaseBuffer(2) |
+// | OnBufferDestroyed(2) <--- |
+// | OnBufferCreated(5) <--- |
+// | OnBufferReady(5) <--- |
+// = =
+// In the communication epilogue, the client Stop()s capture, receiving a last
+// status update:
+// | ---> StopCapture |
+// | OnStateChanged(STOPPED) <--- |
+
+enum VideoCaptureState {
+ STARTED,
+ PAUSED,
+ RESUMED,
+ STOPPED,
+ FAILED,
+ ENDED,
+};
+
+// Interface for notifications from Browser/Host back to Renderer/Client. This
+// interface is used between VideoCaptureHost.Start() and Stop().
+interface VideoCaptureObserver {
+ // Gets notified about a VideoCaptureState update.
+ OnStateChanged(VideoCaptureState state);
+
+ // A new buffer identified by |buffer_id| has been created for video capture.
+ OnBufferCreated(int32 buffer_id, handle<shared_buffer> handle_fd);
+
+ // |buffer_id| has video capture data with |info| containing the associated
+ // VideoFrame constituent parts.
+ OnBufferReady(int32 buffer_id, VideoFrameInfo info);
+
+ // |buffer_id| has been released by VideoCaptureHost and must not be used.
+ OnBufferDestroyed(int32 buffer_id);
+};
+
+interface VideoCaptureHost {
+ // Start the |session_id| session with |params|. The video capture will be
+ // identified as |device_id|, a new id picked by the renderer process.
+ // |observer| will be used for notifications.
+ Start(int32 device_id, int32 session_id, VideoCaptureParams params,
+ VideoCaptureObserver observer);
+
+ // Closes the video capture specified by |device_id|.
+ Stop(int32 device_id);
+
+ // Pauses the video capture specified by |device_id|.
+ Pause(int32 device_id);
+
+ // Resume |device_id| video capture, in |session_id| and with |params|.
+ Resume(int32 device_id, int32 session_id, VideoCaptureParams params);
+
+ // Requests that the video capturer send a frame "soon" (e.g., to resolve
+ // picture loss or quality issues).
+ RequestRefreshFrame(int32 device_id);
+
+ // Indicates that a renderer has finished using a previously shared buffer.
+ ReleaseBuffer(int32 device_id, int32 buffer_id,
+ double consumer_resource_utilization);
+
+ // Get the formats supported by a device referenced by |session_id|.
+ GetDeviceSupportedFormats(int32 device_id, int32 session_id)
+ => (array<VideoCaptureFormat> formats_supported);
+
+ // Get the format(s) in use by a device referenced by |session_id|.
+ GetDeviceFormatsInUse(int32 device_id, int32 session_id)
+ => (array<VideoCaptureFormat> formats_in_use);
+};
diff --git a/chromium/media/capture/mojo/video_capture_types.mojom b/chromium/media/capture/mojo/video_capture_types.mojom
index 0e85e64b5f2..68b4b19215c 100644
--- a/chromium/media/capture/mojo/video_capture_types.mojom
+++ b/chromium/media/capture/mojo/video_capture_types.mojom
@@ -36,7 +36,6 @@ enum VideoCaptureApi {
ANDROID_API2_LEGACY,
ANDROID_API2_FULL,
ANDROID_API2_LIMITED,
- ANDROID_TANGO,
UNKNOWN
};
diff --git a/chromium/media/capture/mojo/video_capture_types_struct_traits.cc b/chromium/media/capture/mojo/video_capture_types_struct_traits.cc
index 30f347c1d42..2849a812545 100644
--- a/chromium/media/capture/mojo/video_capture_types_struct_traits.cc
+++ b/chromium/media/capture/mojo/video_capture_types_struct_traits.cc
@@ -122,8 +122,6 @@ EnumTraits<media::mojom::VideoCaptureApi, media::VideoCaptureApi>::ToMojom(
return media::mojom::VideoCaptureApi::ANDROID_API2_FULL;
case media::VideoCaptureApi::ANDROID_API2_LIMITED:
return media::mojom::VideoCaptureApi::ANDROID_API2_LIMITED;
- case media::VideoCaptureApi::ANDROID_TANGO:
- return media::mojom::VideoCaptureApi::ANDROID_TANGO;
case media::VideoCaptureApi::UNKNOWN:
return media::mojom::VideoCaptureApi::UNKNOWN;
}
@@ -163,9 +161,6 @@ bool EnumTraits<media::mojom::VideoCaptureApi, media::VideoCaptureApi>::
case media::mojom::VideoCaptureApi::ANDROID_API2_LIMITED:
*output = media::VideoCaptureApi::ANDROID_API2_LIMITED;
return true;
- case media::mojom::VideoCaptureApi::ANDROID_TANGO:
- *output = media::VideoCaptureApi::ANDROID_TANGO;
- return true;
case media::mojom::VideoCaptureApi::UNKNOWN:
*output = media::VideoCaptureApi::UNKNOWN;
return true;
@@ -254,8 +249,10 @@ bool StructTraits<media::mojom::VideoCaptureDeviceDescriptorDataView,
media::VideoCaptureDeviceDescriptor>::
Read(media::mojom::VideoCaptureDeviceDescriptorDataView data,
media::VideoCaptureDeviceDescriptor* output) {
- if (!data.ReadDisplayName(&(output->display_name)))
+ std::string display_name;
+ if (!data.ReadDisplayName(&display_name))
return false;
+ output->set_display_name(display_name);
if (!data.ReadDeviceId(&(output->device_id)))
return false;
if (!data.ReadModelId(&(output->model_id)))
diff --git a/chromium/media/capture/mojo/video_capture_types_struct_traits.h b/chromium/media/capture/mojo/video_capture_types_struct_traits.h
index e78fb29c0d5..e85bbc739c7 100644
--- a/chromium/media/capture/mojo/video_capture_types_struct_traits.h
+++ b/chromium/media/capture/mojo/video_capture_types_struct_traits.h
@@ -137,7 +137,7 @@ struct StructTraits<media::mojom::VideoCaptureDeviceDescriptorDataView,
media::VideoCaptureDeviceDescriptor> {
static const std::string& display_name(
const media::VideoCaptureDeviceDescriptor& input) {
- return input.display_name;
+ return input.display_name();
}
static const std::string& device_id(
diff --git a/chromium/media/capture/video/android/video_capture_device_factory_android.cc b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
index 8f576f17c86..300f7e437c3 100644
--- a/chromium/media/capture/video/android/video_capture_device_factory_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
@@ -116,9 +116,9 @@ void VideoCaptureDeviceFactoryAndroid::GetSupportedFormats(
pixel_format = PIXEL_FORMAT_I420;
break;
default:
- // TODO(mcasas): break here and let the enumeration continue with
- // UNKNOWN pixel format because the platform doesn't know until capture,
- // but some unrelated tests timeout https://crbug.com/644910.
+ // TODO(crbug.com/792260): break here and let the enumeration continue
+ // with UNKNOWN pixel format because the platform doesn't know until
+ // capture, but some unrelated tests timeout https://crbug.com/644910.
continue;
}
VideoCaptureFormat capture_format(
@@ -127,7 +127,7 @@ void VideoCaptureDeviceFactoryAndroid::GetSupportedFormats(
Java_VideoCaptureFactory_getCaptureFormatFramerate(env, format),
pixel_format);
capture_formats->push_back(capture_format);
- DVLOG(1) << device.display_name << " "
+ DVLOG(1) << device.display_name() << " "
<< VideoCaptureFormat::ToString(capture_format);
}
}
@@ -145,7 +145,8 @@ bool VideoCaptureDeviceFactoryAndroid::IsLegacyOrDeprecatedDevice(
VideoCaptureDeviceFactory*
VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager) {
+ gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
return new VideoCaptureDeviceFactoryAndroid();
}
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index b139c7d9e7e..58b411749d2 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -177,15 +177,15 @@ void CameraHalDelegate::GetDeviceDescriptors(
switch (camera_info->facing) {
case arc::mojom::CameraFacing::CAMERA_FACING_BACK:
desc.facing = VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT;
- desc.display_name = std::string("Back Camera");
+ desc.set_display_name("Back Camera");
break;
case arc::mojom::CameraFacing::CAMERA_FACING_FRONT:
desc.facing = VideoFacingMode::MEDIA_VIDEO_FACING_USER;
- desc.display_name = std::string("Front Camera");
+ desc.set_display_name("Front Camera");
break;
case arc::mojom::CameraFacing::CAMERA_FACING_EXTERNAL:
desc.facing = VideoFacingMode::MEDIA_VIDEO_FACING_NONE;
- desc.display_name = std::string("External Camera");
+ desc.set_display_name("External Camera");
break;
// Mojo validates the input parameters for us so we don't need to worry
// about malformed values.
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
index 690a47a0ec7..2b8dc26c951 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
@@ -113,10 +113,12 @@ bool CameraHalDispatcherImpl::StartThreads() {
return true;
}
-bool CameraHalDispatcherImpl::Start() {
+bool CameraHalDispatcherImpl::Start(
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
if (!StartThreads()) {
return false;
}
+ jda_factory_ = jda_factory;
base::WaitableEvent started(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
blocking_io_task_runner_->PostTask(
@@ -192,6 +194,11 @@ void CameraHalDispatcherImpl::RegisterClient(
VLOG(1) << "Camera HAL client registered";
}
+void CameraHalDispatcherImpl::GetJpegDecodeAccelerator(
+ media::mojom::JpegDecodeAcceleratorRequest jda_request) {
+ jda_factory_.Run(std::move(jda_request));
+}
+
void CameraHalDispatcherImpl::CreateSocket(base::WaitableEvent* started) {
DCHECK(blocking_io_task_runner_->BelongsToCurrentThread());
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
index f5da5a51f26..da3591c4a74 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
@@ -12,6 +12,7 @@
#include "base/threading/thread.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/chromeos/mojo/arc_camera3_service.mojom.h"
+#include "media/capture/video/video_capture_device_factory.h"
#include "mojo/edk/embedder/scoped_platform_handle.h"
#include "mojo/public/cpp/bindings/binding_set.h"
#include "mojo/public/cpp/bindings/interface_ptr_set.h"
@@ -46,7 +47,7 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
public:
static CameraHalDispatcherImpl* GetInstance();
- bool Start();
+ bool Start(MojoJpegDecodeAcceleratorFactoryCB jda_factory);
void AddClientObserver(std::unique_ptr<CameraClientObserver> observer);
@@ -55,6 +56,8 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
// CameraHalDispatcher implementations.
void RegisterServer(arc::mojom::CameraHalServerPtr server) final;
void RegisterClient(arc::mojom::CameraHalClientPtr client) final;
+ void GetJpegDecodeAccelerator(
+ media::mojom::JpegDecodeAcceleratorRequest jda_request) final;
private:
friend struct base::DefaultSingletonTraits<CameraHalDispatcherImpl>;
@@ -103,6 +106,8 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
std::set<std::unique_ptr<CameraClientObserver>> client_observers_;
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory_;
+
DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImpl);
};
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.h b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
index a2d27dd227f..f7e38373e4a 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.h
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
@@ -2,6 +2,9 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_METADATA_UTILS_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_METADATA_UTILS_H_
+
#include "media/capture/video/chromeos/mojo/camera_metadata.mojom.h"
namespace media {
@@ -14,3 +17,5 @@ void MergeMetadata(arc::mojom::CameraMetadataPtr* to,
const arc::mojom::CameraMetadataPtr& from);
} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_METADATA_UTILS_H_
diff --git a/chromium/media/capture/video/chromeos/mojo/BUILD.gn b/chromium/media/capture/video/chromeos/mojo/BUILD.gn
index b81323bb436..667353c938d 100644
--- a/chromium/media/capture/video/chromeos/mojo/BUILD.gn
+++ b/chromium/media/capture/video/chromeos/mojo/BUILD.gn
@@ -12,4 +12,8 @@ mojom("arc_camera3") {
"camera_metadata.mojom",
"camera_metadata_tags.mojom",
]
+
+ deps = [
+ "//media/mojo/interfaces",
+ ]
}
diff --git a/chromium/media/capture/video/chromeos/mojo/arc_camera3_service.mojom b/chromium/media/capture/video/chromeos/mojo/arc_camera3_service.mojom
index 4c3056d57cd..dfef34d912d 100644
--- a/chromium/media/capture/video/chromeos/mojo/arc_camera3_service.mojom
+++ b/chromium/media/capture/video/chromeos/mojo/arc_camera3_service.mojom
@@ -7,6 +7,7 @@
module arc.mojom;
import "media/capture/video/chromeos/mojo/camera_common.mojom";
+import "media/mojo/interfaces/jpeg_decode_accelerator.mojom";
// The ARC++ camera HAL v3 Mojo dispatcher. The dispatcher acts as a proxy and
// waits for the server and the clients to register. There can only be one
@@ -24,6 +25,10 @@ interface CameraHalDispatcher {
// A CameraHalClient calls RegisterClient to register itself with the
// dispatcher.
RegisterClient@1(CameraHalClient client);
+
+ // Get JpegDecodeAccelerator from dispatcher.
+ [MinVersion=1] GetJpegDecodeAccelerator@2(
+ media.mojom.JpegDecodeAccelerator& jda_request);
};
// The ARC++ camera HAL v3 Mojo server.
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_arc_chromeos.cc b/chromium/media/capture/video/chromeos/video_capture_device_arc_chromeos.cc
index 934c117cc25..b37a934812d 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_arc_chromeos.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_arc_chromeos.cc
@@ -9,6 +9,7 @@
#include <utility>
#include "base/bind_helpers.h"
+#include "base/location.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/platform_thread.h"
#include "chromeos/dbus/dbus_thread_manager.h"
@@ -119,11 +120,12 @@ void VideoCaptureDeviceArcChromeOS::SuspendImminent(
power_manager::SuspendImminent::Reason reason) {
capture_task_runner_->PostTask(
FROM_HERE,
- base::BindOnce(&VideoCaptureDeviceArcChromeOS::CloseDevice,
- weak_ptr_factory_.GetWeakPtr(),
- BindToCurrentLoop(chromeos::DBusThreadManager::Get()
- ->GetPowerManagerClient()
- ->GetSuspendReadinessCallback())));
+ base::BindOnce(
+ &VideoCaptureDeviceArcChromeOS::CloseDevice,
+ weak_ptr_factory_.GetWeakPtr(),
+ BindToCurrentLoop(chromeos::DBusThreadManager::Get()
+ ->GetPowerManagerClient()
+ ->GetSuspendReadinessCallback(FROM_HERE))));
}
void VideoCaptureDeviceArcChromeOS::SuspendDone(
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
index 9e56361a875..3e3bec8a4ca 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
@@ -19,10 +19,11 @@ gpu::GpuMemoryBufferManager* g_gpu_buffer_manager = nullptr;
VideoCaptureDeviceFactoryChromeOS::VideoCaptureDeviceFactoryChromeOS(
scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager)
+ gpu::GpuMemoryBufferManager* gpu_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory)
: task_runner_for_screen_observer_(task_runner_for_screen_observer),
camera_hal_ipc_thread_("CameraHalIpcThread"),
- initialized_(Init()) {
+ initialized_(Init(jda_factory)) {
g_gpu_buffer_manager = gpu_buffer_manager;
}
@@ -68,8 +69,13 @@ bool VideoCaptureDeviceFactoryChromeOS::ShouldEnable() {
// Checks whether the Chrome OS binary which provides the HAL v3 camera
// service is installed on the device. If the binary exists we assume the
// device is using the new camera HAL v3 stack.
+ //
+ // TODO(jcliang): Remove kArcCamera3Service once we've fully transitioned all
+ // boards to the new package.
const base::FilePath kArcCamera3Service("/usr/bin/arc_camera3_service");
- return base::PathExists(kArcCamera3Service);
+ const base::FilePath kCrosCameraService("/usr/bin/cros_camera_service");
+ return base::PathExists(kArcCamera3Service) ||
+ base::PathExists(kCrosCameraService);
}
// static
@@ -84,14 +90,15 @@ void VideoCaptureDeviceFactoryChromeOS::SetBufferManagerForTesting(
g_gpu_buffer_manager = buffer_manager;
}
-bool VideoCaptureDeviceFactoryChromeOS::Init() {
+bool VideoCaptureDeviceFactoryChromeOS::Init(
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
if (!camera_hal_ipc_thread_.Start()) {
LOG(ERROR) << "Module thread failed to start";
return false;
}
if (!CameraHalDispatcherImpl::GetInstance()->IsStarted() &&
- !CameraHalDispatcherImpl::GetInstance()->Start()) {
+ !CameraHalDispatcherImpl::GetInstance()->Start(jda_factory)) {
LOG(ERROR) << "Failed to start CameraHalDispatcherImpl";
return false;
}
@@ -107,7 +114,8 @@ bool VideoCaptureDeviceFactoryChromeOS::Init() {
VideoCaptureDeviceFactory*
VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager) {
+ gpu::GpuMemoryBufferManager* gpu_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
// On Chrome OS we have to support two use cases:
//
// 1. For devices that have the camera HAL v3 service running on Chrome OS,
@@ -119,7 +127,7 @@ VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
// v3.
if (VideoCaptureDeviceFactoryChromeOS::ShouldEnable()) {
return new VideoCaptureDeviceFactoryChromeOS(
- task_runner_for_screen_observer, gpu_buffer_manager);
+ task_runner_for_screen_observer, gpu_buffer_manager, jda_factory);
} else {
return new VideoCaptureDeviceFactoryLinux(task_runner_for_screen_observer);
}
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
index 2887f49670b..d621591358f 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
@@ -19,7 +19,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
explicit VideoCaptureDeviceFactoryChromeOS(
scoped_refptr<base::SingleThreadTaskRunner>
task_runner_for_screen_observer,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager);
+ gpu::GpuMemoryBufferManager* gpu_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory);
~VideoCaptureDeviceFactoryChromeOS() override;
@@ -45,7 +46,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
private:
// Initializes the factory. The factory is functional only after this call
// succeeds.
- bool Init();
+ bool Init(MojoJpegDecodeAcceleratorFactoryCB jda_factory);
const scoped_refptr<base::SingleThreadTaskRunner>
task_runner_for_screen_observer_;
diff --git a/chromium/media/capture/video/file_video_capture_device.cc b/chromium/media/capture/video/file_video_capture_device.cc
index 57f4b58db6a..b047113d800 100644
--- a/chromium/media/capture/video/file_video_capture_device.cc
+++ b/chromium/media/capture/video/file_video_capture_device.cc
@@ -15,6 +15,8 @@
#include "base/strings/string_piece.h"
#include "base/strings/string_util.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "media/capture/mojo/image_capture_types.h"
+#include "media/capture/video/blob_utils.h"
#include "media/capture/video_capture_types.h"
#include "media/filters/jpeg_parser.h"
@@ -331,6 +333,56 @@ void FileVideoCaptureDevice::StopAndDeAllocate() {
capture_thread_.Stop();
}
+void FileVideoCaptureDevice::GetPhotoState(GetPhotoStateCallback callback) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ auto photo_capabilities = mojo::CreateEmptyPhotoState();
+
+ int height = capture_format_.frame_size.height();
+ photo_capabilities->height = mojom::Range::New(height, height, height, 0);
+ int width = capture_format_.frame_size.width();
+ photo_capabilities->width = mojom::Range::New(width, width, width, 0);
+
+ std::move(callback).Run(std::move(photo_capabilities));
+}
+
+void FileVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings,
+ SetPhotoOptionsCallback callback) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (settings->has_height &&
+ settings->height != capture_format_.frame_size.height()) {
+ return;
+ }
+
+ if (settings->has_width &&
+ settings->width != capture_format_.frame_size.width()) {
+ return;
+ }
+
+ if (settings->has_torch && settings->torch)
+ return;
+
+ if (settings->has_red_eye_reduction && settings->red_eye_reduction)
+ return;
+
+ if (settings->has_exposure_compensation || settings->has_color_temperature ||
+ settings->has_iso || settings->has_brightness || settings->has_contrast ||
+ settings->has_saturation || settings->has_sharpness ||
+ settings->has_zoom || settings->has_fill_light_mode) {
+ return;
+ }
+
+ std::move(callback).Run(true);
+}
+
+void FileVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ base::AutoLock lock(lock_);
+
+ take_photo_callbacks_.push(std::move(callback));
+}
+
void FileVideoCaptureDevice::OnAllocateAndStart(
const VideoCaptureParams& params,
std::unique_ptr<VideoCaptureDevice::Client> client) {
@@ -365,6 +417,7 @@ void FileVideoCaptureDevice::OnCaptureTask() {
DCHECK(capture_thread_.task_runner()->BelongsToCurrentThread());
if (!client_)
return;
+ base::AutoLock lock(lock_);
// Give the captured frame to the client.
int frame_size = 0;
@@ -376,6 +429,19 @@ void FileVideoCaptureDevice::OnCaptureTask() {
first_ref_time_ = current_time;
client_->OnIncomingCapturedData(frame_ptr, frame_size, capture_format_, 0,
current_time, current_time - first_ref_time_);
+
+ // Process waiting photo callbacks
+ while (!take_photo_callbacks_.empty()) {
+ auto cb = std::move(take_photo_callbacks_.front());
+ take_photo_callbacks_.pop();
+
+ mojom::BlobPtr blob = Blobify(frame_ptr, frame_size, capture_format_);
+ if (!blob)
+ continue;
+
+ std::move(cb).Run(std::move(blob));
+ }
+
// Reschedule next CaptureTask.
const base::TimeDelta frame_interval =
base::TimeDelta::FromMicroseconds(1E6 / capture_format_.frame_rate);
diff --git a/chromium/media/capture/video/file_video_capture_device.h b/chromium/media/capture/video/file_video_capture_device.h
index 161efd59616..7149498fa40 100644
--- a/chromium/media/capture/video/file_video_capture_device.h
+++ b/chromium/media/capture/video/file_video_capture_device.h
@@ -51,6 +51,10 @@ class CAPTURE_EXPORT FileVideoCaptureDevice : public VideoCaptureDevice {
const VideoCaptureParams& params,
std::unique_ptr<VideoCaptureDevice::Client> client) override;
void StopAndDeAllocate() override;
+ void GetPhotoState(GetPhotoStateCallback callback) override;
+ void SetPhotoOptions(mojom::PhotoSettingsPtr settings,
+ SetPhotoOptionsCallback callback) override;
+ void TakePhoto(TakePhotoCallback callback) override;
private:
// Opens a given file |file_path| for reading, and stores collected format
@@ -84,6 +88,11 @@ class CAPTURE_EXPORT FileVideoCaptureDevice : public VideoCaptureDevice {
// The system time when we receive the first frame.
base::TimeTicks first_ref_time_;
+ // Guards the below variables from concurrent access between methods running
+ // on the main thread and |capture_thread_|.
+ base::Lock lock_;
+ base::queue<TakePhotoCallback> take_photo_callbacks_;
+
DISALLOW_COPY_AND_ASSIGN(FileVideoCaptureDevice);
};
diff --git a/chromium/media/capture/video/file_video_capture_device_factory.cc b/chromium/media/capture/video/file_video_capture_device_factory.cc
index ea361b243ef..ff643d8488a 100644
--- a/chromium/media/capture/video/file_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/file_video_capture_device_factory.cc
@@ -32,10 +32,10 @@ std::unique_ptr<VideoCaptureDevice> FileVideoCaptureDeviceFactory::CreateDevice(
base::AssertBlockingAllowed();
#if defined(OS_WIN)
return std::unique_ptr<VideoCaptureDevice>(new FileVideoCaptureDevice(
- base::FilePath(base::SysUTF8ToWide(device_descriptor.display_name))));
+ base::FilePath(base::SysUTF8ToWide(device_descriptor.display_name()))));
#else
return std::unique_ptr<VideoCaptureDevice>(new FileVideoCaptureDevice(
- base::FilePath(device_descriptor.display_name)));
+ base::FilePath(device_descriptor.display_name())));
#endif
}
diff --git a/chromium/media/capture/video/file_video_capture_device_unittest.cc b/chromium/media/capture/video/file_video_capture_device_unittest.cc
new file mode 100644
index 00000000000..17df56f94ff
--- /dev/null
+++ b/chromium/media/capture/video/file_video_capture_device_unittest.cc
@@ -0,0 +1,143 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <utility>
+
+#include "base/test/scoped_task_environment.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/base/test_data_util.h"
+#include "media/capture/video/file_video_capture_device.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::InvokeWithoutArgs;
+
+namespace media {
+
+namespace {
+
+class MockClient : public VideoCaptureDevice::Client {
+ public:
+ void OnIncomingCapturedData(const uint8_t* data,
+ int length,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) {}
+
+ MOCK_METHOD4(
+ ReserveOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+
+ void OnIncomingCapturedBuffer(Buffer buffer,
+ const VideoCaptureFormat& format,
+ base::TimeTicks reference_,
+ base::TimeDelta timestamp) override {}
+
+ void OnIncomingCapturedBufferExt(
+ Buffer buffer,
+ const VideoCaptureFormat& format,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ gfx::Rect visible_rect,
+ const VideoFrameMetadata& additional_metadata) override {}
+
+ MOCK_METHOD4(
+ ResurrectLastOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+
+ MOCK_METHOD2(OnError, void(const base::Location&, const std::string&));
+
+ double GetBufferPoolUtilization() const override { return 0.0; }
+
+ MOCK_METHOD0(OnStarted, void());
+};
+
+class MockImageCaptureClient {
+ public:
+ // GMock doesn't support move-only arguments, so we use this forward method.
+ void DoOnGetPhotoState(mojom::PhotoStatePtr state) {
+ state_ = std::move(state);
+ }
+
+ const mojom::PhotoState* state() { return state_.get(); }
+
+ MOCK_METHOD1(OnCorrectSetPhotoOptions, void(bool));
+
+ // GMock doesn't support move-only arguments, so we use this forward method.
+ void DoOnPhotoTaken(mojom::BlobPtr blob) {
+ EXPECT_TRUE(blob);
+ OnCorrectPhotoTaken();
+ }
+ MOCK_METHOD0(OnCorrectPhotoTaken, void(void));
+
+ private:
+ mojom::PhotoStatePtr state_;
+};
+
+} // namespace
+
+class FileVideoCaptureDeviceTest : public ::testing::Test {
+ protected:
+ FileVideoCaptureDeviceTest() : client_(new MockClient()) {}
+
+ void SetUp() override {
+ EXPECT_CALL(*client_, OnError(_, _)).Times(0);
+ EXPECT_CALL(*client_, OnStarted());
+ device_ = std::make_unique<FileVideoCaptureDevice>(
+ GetTestDataFilePath("bear.mjpeg"));
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+ }
+
+ void TearDown() override { device_->StopAndDeAllocate(); }
+
+ std::unique_ptr<MockClient> client_;
+ MockImageCaptureClient image_capture_client_;
+ std::unique_ptr<VideoCaptureDevice> device_;
+ VideoCaptureFormat last_format_;
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
+};
+
+TEST_F(FileVideoCaptureDeviceTest, GetPhotoState) {
+ VideoCaptureDevice::GetPhotoStateCallback scoped_get_callback =
+ base::BindOnce(&MockImageCaptureClient::DoOnGetPhotoState,
+ base::Unretained(&image_capture_client_));
+
+ device_->GetPhotoState(std::move(scoped_get_callback));
+
+ const mojom::PhotoState* state = image_capture_client_.state();
+ EXPECT_TRUE(state);
+}
+
+TEST_F(FileVideoCaptureDeviceTest, SetPhotoOptions) {
+ mojom::PhotoSettingsPtr photo_settings = mojom::PhotoSettings::New();
+ VideoCaptureDevice::SetPhotoOptionsCallback scoped_set_callback =
+ base::BindOnce(&MockImageCaptureClient::OnCorrectSetPhotoOptions,
+ base::Unretained(&image_capture_client_));
+ EXPECT_CALL(image_capture_client_, OnCorrectSetPhotoOptions(true)).Times(1);
+ device_->SetPhotoOptions(std::move(photo_settings),
+ std::move(scoped_set_callback));
+}
+
+TEST_F(FileVideoCaptureDeviceTest, TakePhoto) {
+ VideoCaptureDevice::TakePhotoCallback scoped_callback =
+ base::BindOnce(&MockImageCaptureClient::DoOnPhotoTaken,
+ base::Unretained(&image_capture_client_));
+
+ base::RunLoop run_loop;
+ base::Closure quit_closure = BindToCurrentLoop(run_loop.QuitClosure());
+ EXPECT_CALL(image_capture_client_, OnCorrectPhotoTaken())
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs([quit_closure]() { quit_closure.Run(); }));
+ device_->TakePhoto(std::move(scoped_callback));
+ run_loop.Run();
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
index d363659916d..9dd68b780e0 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
@@ -292,7 +292,8 @@ void VideoCaptureDeviceFactoryLinux::GetSupportedFormats(
VideoCaptureDeviceFactory*
VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager) {
+ gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
return new VideoCaptureDeviceFactoryLinux(ui_task_runner);
}
#endif
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
index 761338bf68f..6395d015f37 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
@@ -190,7 +190,7 @@ void ExtractBaseAddressAndLength(char** base_address,
gfx::Size(dimensions.width, dimensions.height),
frameRate.maxFrameRate, pixelFormat);
formats->push_back(format);
- DVLOG(2) << descriptor.display_name << " "
+ DVLOG(2) << descriptor.display_name() << " "
<< media::VideoCaptureFormat::ToString(format);
}
}
@@ -440,16 +440,22 @@ void ExtractBaseAddressAndLength(char** base_address,
gfx::Size(dimensions.width, dimensions.height), frameRate_,
FourCCToChromiumPixelFormat(fourcc));
+ // We have certain format expectation for capture output:
+ // For MJPEG, |sampleBuffer| is expected to always be a CVBlockBuffer.
+ // For other formats, |sampleBuffer| may be either CVBlockBuffer or
+ // CVImageBuffer. CVBlockBuffer seems to be used in the context of CoreMedia
+ // plugins/virtual cameras. In order to find out whether it is CVBlockBuffer
+ // or CVImageBuffer we call CMSampleBufferGetImageBuffer() and check if the
+ // return value is nil.
char* baseAddress = 0;
size_t frameSize = 0;
CVImageBufferRef videoFrame = nil;
- if (fourcc == kCMVideoCodecType_JPEG_OpenDML) {
- ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
- } else {
+ if (fourcc != kCMVideoCodecType_JPEG_OpenDML) {
videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the frame and calculate frame size.
- if (CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
- kCVReturnSuccess) {
+ if (videoFrame &&
+ CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly) ==
+ kCVReturnSuccess) {
baseAddress = static_cast<char*>(CVPixelBufferGetBaseAddress(videoFrame));
frameSize = CVPixelBufferGetHeight(videoFrame) *
CVPixelBufferGetBytesPerRow(videoFrame);
@@ -457,6 +463,9 @@ void ExtractBaseAddressAndLength(char** base_address,
videoFrame = nil;
}
}
+ if (!videoFrame) {
+ ExtractBaseAddressAndLength(&baseAddress, &frameSize, sampleBuffer);
+ }
{
base::AutoLock lock(lock_);
diff --git a/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm b/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
index 10d598c0f0b..3e9a602ac5c 100644
--- a/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
@@ -386,14 +386,15 @@ void VideoCaptureDeviceDeckLinkMac::EnumerateDevices(
CFStringRef format_name = NULL;
if (display_mode->GetName(&format_name) == S_OK) {
VideoCaptureDeviceDescriptor descriptor;
- descriptor.display_name =
- JoinDeviceNameAndFormat(device_display_name, format_name);
+ descriptor.set_display_name(
+ JoinDeviceNameAndFormat(device_display_name, format_name));
descriptor.device_id =
JoinDeviceNameAndFormat(device_model_name, format_name);
descriptor.capture_api = VideoCaptureApi::MACOSX_DECKLINK;
descriptor.transport_type = VideoCaptureTransportType::OTHER_TRANSPORT;
device_descriptors->push_back(descriptor);
- DVLOG(1) << "Blackmagic camera enumerated: " << descriptor.display_name;
+ DVLOG(1) << "Blackmagic camera enumerated: "
+ << descriptor.display_name();
}
display_mode.Release();
}
@@ -448,7 +449,7 @@ void VideoCaptureDeviceDeckLinkMac::EnumerateDeviceCapabilities(
gfx::Size(display_mode->GetWidth(), display_mode->GetHeight()),
GetDisplayModeFrameRate(display_mode), PIXEL_FORMAT_UNKNOWN);
supported_formats->push_back(format);
- DVLOG(2) << device.display_name << " "
+ DVLOG(2) << device.display_name() << " "
<< VideoCaptureFormat::ToString(format);
display_mode.Release();
}
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm b/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
index d3ff0a405d9..7ce93c4aaec 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
@@ -36,7 +36,7 @@ static bool IsDeviceBlacklisted(
base::CompareCase::INSENSITIVE_ASCII);
}
DVLOG_IF(2, is_device_blacklisted)
- << "Blacklisted camera: " << descriptor.display_name
+ << "Blacklisted camera: " << descriptor.display_name()
<< ", id: " << descriptor.device_id;
return is_device_blacklisted;
}
@@ -111,7 +111,7 @@ void VideoCaptureDeviceFactoryMac::GetSupportedFormats(
break;
case VideoCaptureApi::MACOSX_DECKLINK:
DVLOG(1) << "Enumerating video capture capabilities "
- << device.display_name;
+ << device.display_name();
VideoCaptureDeviceDeckLinkMac::EnumerateDeviceCapabilities(
device, supported_formats);
break;
@@ -124,7 +124,8 @@ void VideoCaptureDeviceFactoryMac::GetSupportedFormats(
VideoCaptureDeviceFactory*
VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager) {
+ gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
return new VideoCaptureDeviceFactoryMac();
}
diff --git a/chromium/media/capture/video/mac/video_capture_device_mac.mm b/chromium/media/capture/video/mac/video_capture_device_mac.mm
index bbba1817dc9..f79ff509ab7 100644
--- a/chromium/media/capture/video/mac/video_capture_device_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_mac.mm
@@ -25,6 +25,7 @@
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
#include "media/base/timestamp_constants.h"
+#include "media/capture/mojo/image_capture_types.h"
#import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
#include "ui/gfx/geometry/size.h"
@@ -315,7 +316,7 @@ void VideoCaptureDeviceMac::AllocateAndStart(
client_ = std::move(client);
if (device_descriptor_.capture_api == VideoCaptureApi::MACOSX_AVFOUNDATION)
LogMessage("Using AVFoundation for device: " +
- device_descriptor_.display_name);
+ device_descriptor_.display_name());
NSString* deviceId =
[NSString stringWithUTF8String:device_descriptor_.device_id.c_str()];
@@ -392,27 +393,14 @@ void VideoCaptureDeviceMac::TakePhoto(TakePhotoCallback callback) {
void VideoCaptureDeviceMac::GetPhotoState(GetPhotoStateCallback callback) {
DCHECK(task_runner_->BelongsToCurrentThread());
- auto photo_state = mojom::PhotoState::New();
+ auto photo_state = mojo::CreateEmptyPhotoState();
- photo_state->exposure_compensation = mojom::Range::New();
- photo_state->color_temperature = mojom::Range::New();
- photo_state->iso = mojom::Range::New();
-
- photo_state->brightness = mojom::Range::New();
- photo_state->contrast = mojom::Range::New();
- photo_state->saturation = mojom::Range::New();
- photo_state->sharpness = mojom::Range::New();
-
- photo_state->zoom = mojom::Range::New();
-
- photo_state->red_eye_reduction = mojom::RedEyeReduction::NEVER;
photo_state->height = mojom::Range::New(
capture_format_.frame_size.height(), capture_format_.frame_size.height(),
capture_format_.frame_size.height(), 0 /* step */);
photo_state->width = mojom::Range::New(
capture_format_.frame_size.width(), capture_format_.frame_size.width(),
capture_format_.frame_size.width(), 0 /* step */);
- photo_state->torch = false;
std::move(callback).Run(std::move(photo_state));
}
diff --git a/chromium/media/capture/video/video_capture_device.h b/chromium/media/capture/video/video_capture_device.h
index c26e5976b5d..e0855d3204c 100644
--- a/chromium/media/capture/video/video_capture_device.h
+++ b/chromium/media/capture/video/video_capture_device.h
@@ -283,9 +283,10 @@ class CAPTURE_EXPORT VideoCaptureDevice
SetPhotoOptionsCallback callback);
// Asynchronously takes a photo, possibly reconfiguring the capture objects
- // and/or interrupting the capture flow. Runs |callback| on the thread
- // where TakePhoto() is called, if the photo was successfully taken. On
- // failure, drops callback without invoking it.
+ // and/or interrupting the capture flow. Runs |callback|, if the photo was
+ // successfully taken. On failure, drops callback without invoking it.
+ // Note that |callback| may be runned on a thread different than the thread
+ // where TakePhoto() was called.
using TakePhotoCallback = base::OnceCallback<void(mojom::BlobPtr blob)>;
virtual void TakePhoto(TakePhotoCallback callback);
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index 3b0063e2ce9..4a064af3df1 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -104,7 +104,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
base::TimeTicks reference_time,
base::TimeDelta timestamp,
int frame_feedback_id) {
- TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData");
+ TRACE_EVENT0("media", "VideoCaptureDeviceClient::OnIncomingCapturedData");
DCHECK_EQ(VideoPixelStorage::CPU, format.pixel_storage);
if (last_captured_pixel_format_ != format.pixel_format) {
@@ -235,6 +235,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
// platforms.
#if defined(OS_WIN)
flip = true;
+ FALLTHROUGH;
#endif
case PIXEL_FORMAT_ARGB:
origin_colorspace = libyuv::FOURCC_ARGB;
diff --git a/chromium/media/capture/video/video_capture_device_descriptor.cc b/chromium/media/capture/video/video_capture_device_descriptor.cc
index b5a34f34e35..8c320b02935 100644
--- a/chromium/media/capture/video/video_capture_device_descriptor.cc
+++ b/chromium/media/capture/video/video_capture_device_descriptor.cc
@@ -5,8 +5,17 @@
#include "media/capture/video/video_capture_device_descriptor.h"
#include "base/logging.h"
+#include "base/strings/string_util.h"
namespace media {
+namespace {
+std::string TrimDisplayName(const std::string& display_name) {
+ std::string trimmed_name;
+ base::TrimWhitespaceASCII(display_name, base::TrimPositions::TRIM_TRAILING,
+ &trimmed_name);
+ return trimmed_name;
+}
+} // namespace
VideoCaptureDeviceDescriptor::VideoCaptureDeviceDescriptor()
: facing(VideoFacingMode::MEDIA_VIDEO_FACING_NONE),
@@ -18,11 +27,11 @@ VideoCaptureDeviceDescriptor::VideoCaptureDeviceDescriptor(
const std::string& device_id,
VideoCaptureApi capture_api,
VideoCaptureTransportType transport_type)
- : display_name(display_name),
- device_id(device_id),
+ : device_id(device_id),
facing(VideoFacingMode::MEDIA_VIDEO_FACING_NONE),
capture_api(capture_api),
- transport_type(transport_type) {}
+ transport_type(transport_type),
+ display_name_(TrimDisplayName(display_name)) {}
VideoCaptureDeviceDescriptor::VideoCaptureDeviceDescriptor(
const std::string& display_name,
@@ -31,12 +40,12 @@ VideoCaptureDeviceDescriptor::VideoCaptureDeviceDescriptor(
VideoCaptureApi capture_api,
VideoCaptureTransportType transport_type,
VideoFacingMode facing)
- : display_name(display_name),
- device_id(device_id),
+ : device_id(device_id),
model_id(model_id),
facing(facing),
capture_api(capture_api),
- transport_type(transport_type) {}
+ transport_type(transport_type),
+ display_name_(TrimDisplayName(display_name)) {}
VideoCaptureDeviceDescriptor::~VideoCaptureDeviceDescriptor() = default;
@@ -79,8 +88,6 @@ const char* VideoCaptureDeviceDescriptor::GetCaptureApiTypeString() const {
return "Camera API2 Full";
case VideoCaptureApi::ANDROID_API2_LIMITED:
return "Camera API2 Limited";
- case VideoCaptureApi::ANDROID_TANGO:
- return "Tango API";
default:
NOTREACHED() << "Unknown Video Capture API type: "
<< static_cast<int>(capture_api);
@@ -90,8 +97,12 @@ const char* VideoCaptureDeviceDescriptor::GetCaptureApiTypeString() const {
std::string VideoCaptureDeviceDescriptor::GetNameAndModel() const {
if (model_id.empty())
- return display_name;
- return display_name + " (" + model_id + ")";
+ return display_name_;
+ return display_name_ + " (" + model_id + ')';
+}
+
+void VideoCaptureDeviceDescriptor::set_display_name(const std::string& name) {
+ display_name_ = TrimDisplayName(name);
}
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_descriptor.h b/chromium/media/capture/video/video_capture_device_descriptor.h
index 465c5f6bf3e..51cd5bb906e 100644
--- a/chromium/media/capture/video/video_capture_device_descriptor.h
+++ b/chromium/media/capture/video/video_capture_device_descriptor.h
@@ -26,7 +26,6 @@ enum class VideoCaptureApi {
ANDROID_API2_LEGACY,
ANDROID_API2_FULL,
ANDROID_API2_LIMITED,
- ANDROID_TANGO,
UNKNOWN
};
@@ -41,6 +40,11 @@ enum class VideoCaptureTransportType {
// |device_id| represents a unique id of a physical device. Since the same
// physical device may be accessible through different APIs |capture_api|
// disambiguates the API.
+// TODO(tommi): Given that this struct has become more complex with private
+// members, methods that are not just direct getters/setters
+// (e.g., GetNameAndModel), let's turn it into a class in order to properly
+// conform with the style guide and protect the integrity of the data that the
+// class owns.
struct CAPTURE_EXPORT VideoCaptureDeviceDescriptor {
public:
VideoCaptureDeviceDescriptor();
@@ -73,7 +77,10 @@ struct CAPTURE_EXPORT VideoCaptureDeviceDescriptor {
// Friendly name of a device, plus the model identifier in parentheses.
std::string GetNameAndModel() const;
- std::string display_name; // Name that is intended for display in the UI
+ // Name that is intended for display in the UI.
+ const std::string& display_name() const { return display_name_; }
+ void set_display_name(const std::string& name);
+
std::string device_id;
// A unique hardware identifier of the capture device.
// It is of the form "[vid]:[pid]" when a USB device is detected, and empty
@@ -99,6 +106,9 @@ struct CAPTURE_EXPORT VideoCaptureDeviceDescriptor {
};
base::Optional<CameraCalibration> camera_calibration;
+
+ private:
+ std::string display_name_; // Name that is intended for display in the UI
};
using VideoCaptureDeviceDescriptors = std::vector<VideoCaptureDeviceDescriptor>;
diff --git a/chromium/media/capture/video/video_capture_device_factory.cc b/chromium/media/capture/video/video_capture_device_factory.cc
index 370189783ab..63e03bcae13 100644
--- a/chromium/media/capture/video/video_capture_device_factory.cc
+++ b/chromium/media/capture/video/video_capture_device_factory.cc
@@ -18,7 +18,8 @@ namespace media {
std::unique_ptr<VideoCaptureDeviceFactory>
VideoCaptureDeviceFactory::CreateFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager) {
+ gpu::GpuMemoryBufferManager* gpu_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
const base::CommandLine* command_line =
base::CommandLine::ForCurrentProcess();
// Use a Fake or File Video Device Factory if the command line flags are
@@ -41,7 +42,8 @@ VideoCaptureDeviceFactory::CreateFactory(
// |ui_task_runner| is needed for the Linux ChromeOS factory to retrieve
// screen rotations.
return std::unique_ptr<VideoCaptureDeviceFactory>(
- CreateVideoCaptureDeviceFactory(ui_task_runner, gpu_buffer_manager));
+ CreateVideoCaptureDeviceFactory(ui_task_runner, gpu_buffer_manager,
+ jda_factory));
}
}
@@ -57,7 +59,8 @@ VideoCaptureDeviceFactory::~VideoCaptureDeviceFactory() = default;
VideoCaptureDeviceFactory*
VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager) {
+ gpu::GpuMemoryBufferManager* gpu_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
NOTIMPLEMENTED();
return NULL;
}
diff --git a/chromium/media/capture/video/video_capture_device_factory.h b/chromium/media/capture/video/video_capture_device_factory.h
index 6eb11d1e7e0..74768524a58 100644
--- a/chromium/media/capture/video/video_capture_device_factory.h
+++ b/chromium/media/capture/video/video_capture_device_factory.h
@@ -10,9 +10,13 @@
#include "base/threading/thread_checker.h"
#include "gpu/command_buffer/client/gpu_memory_buffer_manager.h"
#include "media/capture/video/video_capture_device.h"
+#include "media/mojo/interfaces/jpeg_decode_accelerator.mojom.h"
namespace media {
+using MojoJpegDecodeAcceleratorFactoryCB =
+ base::RepeatingCallback<void(media::mojom::JpegDecodeAcceleratorRequest)>;
+
// VideoCaptureDeviceFactory is the base class for creation of video capture
// devices in the different platforms. VCDFs are created by MediaStreamManager
// on UI thread and plugged into VideoCaptureManager, who owns and operates them
@@ -29,7 +33,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactory {
public:
static std::unique_ptr<VideoCaptureDeviceFactory> CreateFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager);
+ gpu::GpuMemoryBufferManager* gpu_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jpeg_decoder_factory);
VideoCaptureDeviceFactory();
virtual ~VideoCaptureDeviceFactory();
@@ -59,7 +64,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactory {
private:
static VideoCaptureDeviceFactory* CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager);
+ gpu::GpuMemoryBufferManager* gpu_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory);
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactory);
};
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index 92c30161778..9e9f87440f8 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -28,9 +28,11 @@
#include "testing/gtest/include/gtest/gtest.h"
#if defined(OS_WIN)
+#include <mfcaptureengine.h>
#include "base/win/scoped_com_initializer.h"
#include "base/win/windows_version.h" // For fine-grained suppression.
#include "media/capture/video/win/video_capture_device_factory_win.h"
+#include "media/capture/video/win/video_capture_device_mf_win.h"
#endif
#if defined(OS_MACOSX)
@@ -86,9 +88,16 @@
#define MAYBE_GetPhotoState DISABLED_GetPhotoState
#endif
+// Wrap the TEST_P macro into another one to allow to preprocess |test_name|
+// macros. Needed until https://github.com/google/googletest/issues/389 is
+// fixed.
+#define WRAPPED_TEST_P(test_case_name, test_name) \
+ TEST_P(test_case_name, test_name)
+
using ::testing::_;
using ::testing::Invoke;
using ::testing::SaveArg;
+using ::testing::Return;
namespace media {
namespace {
@@ -101,16 +110,32 @@ void DumpError(const base::Location& location, const std::string& message) {
DPLOG(ERROR) << location.ToString() << " " << message;
}
-#if defined(OS_ANDROID)
-static bool IsDeviceUsableForTesting(
- const VideoCaptureDeviceDescriptor& descriptor) {
- // Android deprecated/legacy devices and Tango cameras capture on a single
- // thread, which is occupied by the tests, so nothing gets actually delivered.
- // TODO(mcasas): use those devices' test mode to deliver frames in a
- // background thread, https://crbug.com/626857
- return !VideoCaptureDeviceFactoryAndroid::IsLegacyOrDeprecatedDevice(
- descriptor.device_id) &&
- descriptor.capture_api != VideoCaptureApi::ANDROID_TANGO;
+enum VideoCaptureImplementationTweak {
+ NONE,
+#if defined(OS_WIN)
+ WIN_MEDIA_FOUNDATION
+#endif
+};
+
+#if defined(OS_WIN)
+class MockMFPhotoCallback final : public IMFCaptureEngineOnSampleCallback {
+ public:
+ ~MockMFPhotoCallback() {}
+
+ MOCK_METHOD2(DoQueryInterface, HRESULT(REFIID, void**));
+ MOCK_METHOD0(DoAddRef, ULONG(void));
+ MOCK_METHOD0(DoRelease, ULONG(void));
+ MOCK_METHOD1(DoOnSample, HRESULT(IMFSample*));
+
+ STDMETHOD(QueryInterface)(REFIID riid, void** object) override {
+ return DoQueryInterface(riid, object);
+ }
+
+ STDMETHOD_(ULONG, AddRef)() override { return DoAddRef(); }
+
+ STDMETHOD_(ULONG, Release)() override { return DoRelease(); }
+
+ STDMETHOD(OnSample)(IMFSample* sample) override { return DoOnSample(sample); }
};
#endif
@@ -141,7 +166,7 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
int frame_feedback_id) override {
ASSERT_GT(length, 0);
ASSERT_TRUE(data);
- main_thread_->PostTask(FROM_HERE, base::Bind(frame_cb_, format));
+ main_thread_->PostTask(FROM_HERE, base::BindOnce(frame_cb_, format));
}
// Trampoline methods to workaround GMOCK problems with std::unique_ptr<>.
@@ -226,7 +251,19 @@ class MockImageCaptureClient
} // namespace
-class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
+class VideoCaptureDeviceTest
+ : public testing::TestWithParam<
+ std::tuple<gfx::Size, VideoCaptureImplementationTweak>> {
+ public:
+#if defined(OS_WIN)
+ scoped_refptr<IMFCaptureEngineOnSampleCallback> CreateMockPhotoCallback(
+ MockMFPhotoCallback* mock_photo_callback,
+ VideoCaptureDevice::TakePhotoCallback callback,
+ VideoCaptureFormat format) {
+ return scoped_refptr<IMFCaptureEngineOnSampleCallback>(mock_photo_callback);
+ }
+#endif
+
protected:
typedef VideoCaptureDevice::Client Client;
@@ -243,11 +280,12 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
video_capture_device_factory_(VideoCaptureDeviceFactory::CreateFactory(
base::ThreadTaskRunnerHandle::Get(),
#if defined(OS_CHROMEOS)
- local_gpu_memory_buffer_manager_.get()
+ local_gpu_memory_buffer_manager_.get(),
#else
- nullptr
+ nullptr,
#endif
- )) {
+ base::BindRepeating(
+ [](media::mojom::JpegDecodeAcceleratorRequest) {}))) {
}
void SetUp() override {
@@ -259,6 +297,10 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
static_cast<VideoCaptureDeviceFactoryAndroid*>(
video_capture_device_factory_.get())
->ConfigureForTesting();
+#elif defined(OS_WIN)
+ static_cast<VideoCaptureDeviceFactoryWin*>(
+ video_capture_device_factory_.get())
+ ->set_use_media_foundation_for_testing(UseWinMediaFoundation());
#endif
EXPECT_CALL(*video_capture_client_, DoReserveOutputBuffer()).Times(0);
EXPECT_CALL(*video_capture_client_, DoOnIncomingCapturedBuffer()).Times(0);
@@ -266,6 +308,12 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
.Times(0);
}
+#if defined(OS_WIN)
+ bool UseWinMediaFoundation() {
+ return std::get<1>(GetParam()) == WIN_MEDIA_FOUNDATION;
+ }
+#endif
+
void ResetWithNewClient() {
video_capture_client_.reset(new MockVideoCaptureClient(base::Bind(
&VideoCaptureDeviceTest::OnFrameCaptured, base::Unretained(this))));
@@ -288,7 +336,12 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
#if defined(OS_ANDROID)
for (const auto& descriptor : *device_descriptors_) {
- if (IsDeviceUsableForTesting(descriptor)) {
+ // Android deprecated/legacy devices capture on a single thread, which is
+ // occupied by the tests, so nothing gets actually delivered.
+ // TODO(mcasas): use those devices' test mode to deliver frames in a
+ // background thread, https://crbug.com/626857
+ if (!VideoCaptureDeviceFactoryAndroid::IsLegacyOrDeprecatedDevice(
+ descriptor.device_id)) {
DLOG(INFO) << "Using camera " << descriptor.GetNameAndModel();
return std::make_unique<VideoCaptureDeviceDescriptor>(descriptor);
}
@@ -380,10 +433,10 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
#define MAYBE_OpenInvalidDevice OpenInvalidDevice
#endif
// Tries to allocate an invalid device and verifies it doesn't work.
-TEST_F(VideoCaptureDeviceTest, MAYBE_OpenInvalidDevice) {
+WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_OpenInvalidDevice) {
VideoCaptureDeviceDescriptor invalid_descriptor;
invalid_descriptor.device_id = "jibberish";
- invalid_descriptor.display_name = "jibberish";
+ invalid_descriptor.set_display_name("jibberish");
#if defined(OS_WIN)
invalid_descriptor.capture_api =
VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation()
@@ -411,13 +464,20 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_OpenInvalidDevice) {
#endif
}
+// See crbug.com/805411.
+TEST(VideoCaptureDeviceDescriptor, RemoveTrailingWhitespaceFromDisplayName) {
+ VideoCaptureDeviceDescriptor descriptor;
+ descriptor.set_display_name("My WebCam\n");
+ EXPECT_EQ(descriptor.display_name(), "My WebCam");
+}
+
// Allocates the first enumerated device, and expects a frame.
-TEST_P(VideoCaptureDeviceTest, CaptureWithSize) {
+WRAPPED_TEST_P(VideoCaptureDeviceTest, CaptureWithSize) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
- const gfx::Size& size = GetParam();
+ const gfx::Size& size = std::get<0>(GetParam());
if (!IsCaptureSizeSupported(*descriptor, size))
return;
const int width = size.width();
@@ -447,14 +507,22 @@ TEST_P(VideoCaptureDeviceTest, CaptureWithSize) {
}
const gfx::Size kCaptureSizes[] = {gfx::Size(640, 480), gfx::Size(1280, 720)};
+const VideoCaptureImplementationTweak kCaptureImplementationTweaks[] = {
+ NONE,
+#if defined(OS_WIN)
+ WIN_MEDIA_FOUNDATION
+#endif
+};
-INSTANTIATE_TEST_CASE_P(VideoCaptureDeviceTests,
- VideoCaptureDeviceTest,
- testing::ValuesIn(kCaptureSizes));
+INSTANTIATE_TEST_CASE_P(
+ VideoCaptureDeviceTests,
+ VideoCaptureDeviceTest,
+ testing::Combine(testing::ValuesIn(kCaptureSizes),
+ testing::ValuesIn(kCaptureImplementationTweaks)));
// Allocates a device with an uncommon resolution and verifies frames are
// captured in a close, much more typical one.
-TEST_F(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
+WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
@@ -481,7 +549,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
}
// Cause hangs on Windows, Linux. Fails Android. https://crbug.com/417824
-TEST_F(VideoCaptureDeviceTest, DISABLED_ReAllocateCamera) {
+WRAPPED_TEST_P(VideoCaptureDeviceTest, DISABLED_ReAllocateCamera) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
@@ -525,7 +593,7 @@ TEST_F(VideoCaptureDeviceTest, DISABLED_ReAllocateCamera) {
}
// Starts the camera in 720p to try and capture MJPEG format.
-TEST_F(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) {
+WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) {
std::unique_ptr<VideoCaptureDeviceDescriptor> device_descriptor =
GetFirstDeviceDescriptorSupportingPixelFormat(PIXEL_FORMAT_MJPEG);
if (!device_descriptor) {
@@ -562,7 +630,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) {
device->StopAndDeAllocate();
}
-TEST_F(VideoCaptureDeviceTest, NoCameraSupportsPixelFormatMax) {
+WRAPPED_TEST_P(VideoCaptureDeviceTest, NoCameraSupportsPixelFormatMax) {
// Use PIXEL_FORMAT_MAX to iterate all device names for testing
// GetDeviceSupportedFormats().
std::unique_ptr<VideoCaptureDeviceDescriptor> device_descriptor =
@@ -574,7 +642,7 @@ TEST_F(VideoCaptureDeviceTest, NoCameraSupportsPixelFormatMax) {
// Starts the camera and verifies that a photo can be taken. The correctness of
// the photo is enforced by MockImageCaptureClient.
-TEST_F(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
+WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
@@ -623,7 +691,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
}
// Starts the camera and verifies that the photo capabilities can be retrieved.
-TEST_F(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
+WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
@@ -674,4 +742,64 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
device->StopAndDeAllocate();
}
+#if defined(OS_WIN)
+// Verifies that the photo callback is correctly released by MediaFoundation
+WRAPPED_TEST_P(VideoCaptureDeviceTest, CheckPhotoCallbackRelease) {
+ if (!UseWinMediaFoundation())
+ return;
+
+ std::unique_ptr<VideoCaptureDeviceDescriptor> descriptor =
+ GetFirstDeviceDescriptorSupportingPixelFormat(PIXEL_FORMAT_MJPEG);
+ if (!descriptor) {
+ DVLOG(1) << "No usable media foundation device descriptor. Exiting test.";
+ return;
+ }
+
+ EXPECT_CALL(*video_capture_client_, OnError(_, _)).Times(0);
+ EXPECT_CALL(*video_capture_client_, OnStarted());
+
+ std::unique_ptr<VideoCaptureDevice> device(
+ video_capture_device_factory_->CreateDevice(*descriptor));
+ ASSERT_TRUE(device);
+
+ VideoCaptureParams capture_params;
+ capture_params.requested_format.frame_size.SetSize(320, 240);
+ capture_params.requested_format.frame_rate = 30;
+ capture_params.requested_format.pixel_format = PIXEL_FORMAT_MJPEG;
+ device->AllocateAndStart(capture_params, std::move(video_capture_client_));
+
+ if (!static_cast<VideoCaptureDeviceMFWin*>(device.get())
+ ->get_use_photo_stream_to_take_photo_for_testing()) {
+ DVLOG(1) << "The device is not using the MediaFoundation photo callback. "
+ "Exiting test.";
+ device->StopAndDeAllocate();
+ return;
+ }
+
+ MockMFPhotoCallback* callback = new MockMFPhotoCallback();
+ EXPECT_CALL(*callback, DoQueryInterface(_, _)).WillRepeatedly(Return(S_OK));
+ EXPECT_CALL(*callback, DoAddRef()).WillOnce(Return(1U));
+ EXPECT_CALL(*callback, DoRelease()).WillOnce(Return(1U));
+ EXPECT_CALL(*callback, DoOnSample(_)).WillOnce(Return(S_OK));
+ static_cast<VideoCaptureDeviceMFWin*>(device.get())
+ ->set_create_mf_photo_callback_for_testing(base::BindRepeating(
+ &VideoCaptureDeviceTest::CreateMockPhotoCallback,
+ base::Unretained(this), base::Unretained(callback)));
+
+ VideoCaptureDevice::TakePhotoCallback scoped_callback = base::BindOnce(
+ &MockImageCaptureClient::DoOnPhotoTaken, image_capture_client_);
+
+ base::RunLoop run_loop;
+ base::RepeatingClosure quit_closure =
+ BindToCurrentLoop(run_loop.QuitClosure());
+ EXPECT_CALL(*image_capture_client_.get(), OnCorrectPhotoTaken())
+ .WillOnce(RunClosure(quit_closure));
+
+ device->TakePhoto(std::move(scoped_callback));
+ run_loop.Run();
+
+ device->StopAndDeAllocate();
+}
+#endif
+
}; // namespace media
diff --git a/chromium/media/capture/video/win/capability_list_win.h b/chromium/media/capture/video/win/capability_list_win.h
index 678ad432fc1..f873251d6a6 100644
--- a/chromium/media/capture/video/win/capability_list_win.h
+++ b/chromium/media/capture/video/win/capability_list_win.h
@@ -17,22 +17,38 @@
namespace media {
struct CapabilityWin {
- CapabilityWin(int index, const VideoCaptureFormat& format)
- : stream_index(index), supported_format(format), info_header() {}
+ CapabilityWin(int media_type_index, const VideoCaptureFormat& format)
+ : media_type_index(media_type_index),
+ supported_format(format),
+ info_header(),
+ stream_index(0) {}
// Used by VideoCaptureDeviceWin.
- CapabilityWin(int index,
+ CapabilityWin(int media_type_index,
const VideoCaptureFormat& format,
const BITMAPINFOHEADER& info_header)
- : stream_index(index),
+ : media_type_index(media_type_index),
supported_format(format),
- info_header(info_header) {}
+ info_header(info_header),
+ stream_index(0) {}
- const int stream_index;
+ // Used by VideoCaptureDeviceMFWin.
+ CapabilityWin(int media_type_index,
+ const VideoCaptureFormat& format,
+ int stream_index)
+ : media_type_index(media_type_index),
+ supported_format(format),
+ info_header(),
+ stream_index(stream_index) {}
+
+ const int media_type_index;
const VideoCaptureFormat supported_format;
// |info_header| is only valid if DirectShow is used.
const BITMAPINFOHEADER info_header;
+
+ // |stream_index| is only valid if MediaFoundation is used.
+ const int stream_index;
};
typedef std::list<CapabilityWin> CapabilityList;
diff --git a/chromium/media/capture/video/win/metrics.cc b/chromium/media/capture/video/win/metrics.cc
new file mode 100644
index 00000000000..f128ee18e0b
--- /dev/null
+++ b/chromium/media/capture/video/win/metrics.cc
@@ -0,0 +1,82 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/win/metrics.h"
+
+#include "base/metrics/histogram_functions.h"
+#include "base/metrics/histogram_macros.h"
+
+namespace media {
+
+namespace {
+std::string VideoCaptureWinBackendEnumToString(
+ VideoCaptureWinBackend backend_type) {
+ switch (backend_type) {
+ case VideoCaptureWinBackend::kDirectShow:
+ return "DirectShow";
+ case VideoCaptureWinBackend::kMediaFoundation:
+ return "MediaFoundation";
+ default:
+ // The default case is only needed to avoid a compiler warning.
+ NOTREACHED();
+ return "Unknown";
+ }
+}
+
+} // anonymous namespace
+
+bool IsHighResolution(const VideoCaptureFormat& format) {
+ return format.frame_size.width() > 1920;
+}
+
+void LogVideoCaptureWinBackendUsed(VideoCaptureWinBackendUsed value) {
+ base::UmaHistogramEnumeration("Media.VideoCapture.Windows.BackendUsed", value,
+ VideoCaptureWinBackendUsed::kCount);
+}
+
+void LogWindowsImageCaptureOutcome(VideoCaptureWinBackend backend_type,
+ ImageCaptureOutcome value,
+ bool is_high_res) {
+ static const std::string kHistogramPrefix(
+ "Media.VideoCapture.Windows.ImageCaptureOutcome.");
+ static const std::string kAnyResSuffix("AnyRes");
+ static const std::string kHighResSuffix("HighRes");
+ const std::string backend_string =
+ VideoCaptureWinBackendEnumToString(backend_type);
+ base::UmaHistogramEnumeration(
+ kHistogramPrefix + backend_string + kAnyResSuffix, value,
+ ImageCaptureOutcome::kCount);
+ if (is_high_res) {
+ base::UmaHistogramEnumeration(
+ kHistogramPrefix + backend_string + kHighResSuffix, value,
+ ImageCaptureOutcome::kCount);
+ }
+}
+
+void LogNumberOfRetriesNeededToWorkAroundMFInvalidRequest(
+ MediaFoundationFunctionRequiringRetry function,
+ int retry_count) {
+ switch (function) {
+ case MediaFoundationFunctionRequiringRetry::kGetDeviceStreamCount:
+ UMA_HISTOGRAM_COUNTS_1000(
+ "Media.VideoCapture.Windows."
+ "NumberOfRetriesNeededForMFGetDeviceStreamCount",
+ retry_count);
+ break;
+ case MediaFoundationFunctionRequiringRetry::kGetDeviceStreamCategory:
+ UMA_HISTOGRAM_COUNTS_1000(
+ "Media.VideoCapture.Windows."
+ "NumberOfRetriesNeededForMFGetDeviceStreamCategory",
+ retry_count);
+ break;
+ case MediaFoundationFunctionRequiringRetry::kGetAvailableDeviceMediaType:
+ UMA_HISTOGRAM_COUNTS_1000(
+ "Media.VideoCapture.Windows."
+ "NumberOfRetriesNeededForMFGetAvailableDeviceMediaType",
+ retry_count);
+ break;
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/win/metrics.h b/chromium/media/capture/video/win/metrics.h
new file mode 100644
index 00000000000..5b3163523f3
--- /dev/null
+++ b/chromium/media/capture/video/win/metrics.h
@@ -0,0 +1,49 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_WIN_METRICS_H_
+#define MEDIA_CAPTURE_VIDEO_WIN_METRICS_H_
+
+#include "media/capture/video_capture_types.h"
+
+namespace media {
+
+enum class VideoCaptureWinBackend { kDirectShow, kMediaFoundation };
+
+// These values are presisted to logs.
+enum class VideoCaptureWinBackendUsed : int {
+ kUsingDirectShowAsDefault = 0,
+ kUsingMediaFoundationAsDefault = 1,
+ kUsingDirectShowAsFallback = 2,
+ kCount
+};
+
+// These values are presisted to logs.
+enum class ImageCaptureOutcome : int {
+ kSucceededUsingVideoStream = 0,
+ kSucceededUsingPhotoStream = 1,
+ kFailedUsingVideoStream = 2,
+ kFailedUsingPhotoStream = 3,
+ kCount
+};
+
+enum class MediaFoundationFunctionRequiringRetry {
+ kGetDeviceStreamCount,
+ kGetDeviceStreamCategory,
+ kGetAvailableDeviceMediaType
+};
+
+bool IsHighResolution(const VideoCaptureFormat& format);
+
+void LogVideoCaptureWinBackendUsed(VideoCaptureWinBackendUsed value);
+void LogWindowsImageCaptureOutcome(VideoCaptureWinBackend backend_type,
+ ImageCaptureOutcome value,
+ bool is_high_res);
+void LogNumberOfRetriesNeededToWorkAroundMFInvalidRequest(
+ MediaFoundationFunctionRequiringRetry function,
+ int retry_count);
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_WIN_METRICS_H_
diff --git a/chromium/media/capture/video/win/sink_filter_win.cc b/chromium/media/capture/video/win/sink_filter_win.cc
index 0fb9184a68f..3fd2309d067 100644
--- a/chromium/media/capture/video/win/sink_filter_win.cc
+++ b/chromium/media/capture/video/win/sink_filter_win.cc
@@ -9,36 +9,6 @@
namespace media {
-// Define GUID for I420. This is the color format we would like to support but
-// it is not defined in the DirectShow SDK.
-// http://msdn.microsoft.com/en-us/library/dd757532.aspx
-// 30323449-0000-0010-8000-00AA00389B71.
-GUID kMediaSubTypeI420 = {0x30323449,
- 0x0000,
- 0x0010,
- {0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71}};
-
-// UYVY synonym with BT709 color components, used in HD video. This variation
-// might appear in non-USB capture cards and it's implemented as a normal YUV
-// pixel format with the characters HDYC encoded in the first array word.
-GUID kMediaSubTypeHDYC = {0x43594448,
- 0x0000,
- 0x0010,
- {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
-
-GUID kMediaSubTypeZ16 = {0x2036315a,
- 0x0000,
- 0x0010,
- {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
-GUID kMediaSubTypeINVZ = {0x5a564e49,
- 0x2d90,
- 0x4a58,
- {0x92, 0x0b, 0x77, 0x3f, 0x1f, 0x2c, 0x55, 0x6b}};
-GUID kMediaSubTypeY16 = {0x20363159,
- 0x0000,
- 0x0010,
- {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
-
SinkFilterObserver::~SinkFilterObserver() {
}
diff --git a/chromium/media/capture/video/win/sink_filter_win.h b/chromium/media/capture/video/win/sink_filter_win.h
index 2c38b6b4e0b..e99ed213303 100644
--- a/chromium/media/capture/video/win/sink_filter_win.h
+++ b/chromium/media/capture/video/win/sink_filter_win.h
@@ -24,17 +24,38 @@ namespace media {
// it is not defined in the DirectShow SDK.
// http://msdn.microsoft.com/en-us/library/dd757532.aspx
// 30323449-0000-0010-8000-00AA00389B71.
-extern GUID kMediaSubTypeI420;
+const GUID kMediaSubTypeI420 = {
+ 0x30323449,
+ 0x0000,
+ 0x0010,
+ {0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71}};
// UYVY synonym with BT709 color components, used in HD video. This variation
// might appear in non-USB capture cards and it's implemented as a normal YUV
// pixel format with the characters HDYC encoded in the first array word.
-extern GUID kMediaSubTypeHDYC;
+const GUID kMediaSubTypeHDYC = {
+ 0x43594448,
+ 0x0000,
+ 0x0010,
+ {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
+;
// 16-bit grey-scale single plane formats provided by some depth cameras.
-extern GUID kMediaSubTypeZ16;
-extern GUID kMediaSubTypeINVZ;
-extern GUID kMediaSubTypeY16;
+const GUID kMediaSubTypeZ16 = {
+ 0x2036315a,
+ 0x0000,
+ 0x0010,
+ {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
+const GUID kMediaSubTypeINVZ = {
+ 0x5a564e49,
+ 0x2d90,
+ 0x4a58,
+ {0x92, 0x0b, 0x77, 0x3f, 0x1f, 0x2c, 0x55, 0x6b}};
+const GUID kMediaSubTypeY16 = {
+ 0x20363159,
+ 0x0000,
+ 0x0010,
+ {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
class SinkInputPin;
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index fec2ed9b27f..ef3749d657c 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -11,6 +11,7 @@
#include <wrl/client.h>
#include "base/command_line.h"
+#include "base/feature_list.h"
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
#include "base/single_thread_task_runner.h"
@@ -20,14 +21,15 @@
#include "base/win/scoped_variant.h"
#include "media/base/media_switches.h"
#include "media/base/win/mf_initializer.h"
+#include "media/capture/video/win/metrics.h"
#include "media/capture/video/win/video_capture_device_mf_win.h"
#include "media/capture/video/win/video_capture_device_win.h"
using Descriptor = media::VideoCaptureDeviceDescriptor;
using Descriptors = media::VideoCaptureDeviceDescriptors;
-using Microsoft::WRL::ComPtr;
using base::win::ScopedCoMem;
using base::win::ScopedVariant;
+using Microsoft::WRL::ComPtr;
namespace media {
@@ -69,10 +71,9 @@ static bool IsDeviceBlacklistedForQueryingDetailedFrameRates(
static bool LoadMediaFoundationDlls() {
static const wchar_t* const kMfDLLs[] = {
- L"%WINDIR%\\system32\\mf.dll",
- L"%WINDIR%\\system32\\mfplat.dll",
+ L"%WINDIR%\\system32\\mf.dll", L"%WINDIR%\\system32\\mfplat.dll",
L"%WINDIR%\\system32\\mfreadwrite.dll",
- };
+ L"%WINDIR%\\system32\\MFCaptureEngine.dll"};
for (const wchar_t* kMfDLL : kMfDLLs) {
wchar_t path[MAX_PATH] = {0};
@@ -86,8 +87,13 @@ static bool LoadMediaFoundationDlls() {
static bool PrepareVideoCaptureAttributesMediaFoundation(
IMFAttributes** attributes,
int count) {
- if (!InitializeMediaFoundation())
+ // Once https://bugs.chromium.org/p/chromium/issues/detail?id=791615 is fixed,
+ // we must make sure that this method succeeds in capture_unittests context
+ // when MediaFoundation is enabled.
+ if (!VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation() ||
+ !InitializeMediaFoundation()) {
return false;
+ }
if (FAILED(MFCreateAttributes(attributes, count)))
return false;
@@ -250,16 +256,16 @@ static void GetDeviceDescriptorsMediaFoundation(
static void GetDeviceSupportedFormatsDirectShow(const Descriptor& descriptor,
VideoCaptureFormats* formats) {
DVLOG(1) << "GetDeviceSupportedFormatsDirectShow for "
- << descriptor.display_name;
+ << descriptor.display_name();
bool query_detailed_frame_rates =
!IsDeviceBlacklistedForQueryingDetailedFrameRates(
- descriptor.display_name);
+ descriptor.display_name());
CapabilityList capability_list;
VideoCaptureDeviceWin::GetDeviceCapabilityList(
descriptor.device_id, query_detailed_frame_rates, &capability_list);
for (const auto& entry : capability_list) {
formats->emplace_back(entry.supported_format);
- DVLOG(1) << descriptor.display_name << " "
+ DVLOG(1) << descriptor.display_name() << " "
<< VideoCaptureFormat::ToString(entry.supported_format);
}
}
@@ -268,7 +274,7 @@ static void GetDeviceSupportedFormatsMediaFoundation(
const Descriptor& descriptor,
VideoCaptureFormats* formats) {
DVLOG(1) << "GetDeviceSupportedFormatsMediaFoundation for "
- << descriptor.display_name;
+ << descriptor.display_name();
ComPtr<IMFMediaSource> source;
if (!CreateVideoCaptureDeviceMediaFoundation(descriptor.device_id.c_str(),
source.GetAddressOf())) {
@@ -286,8 +292,9 @@ static void GetDeviceSupportedFormatsMediaFoundation(
DWORD stream_index = 0;
ComPtr<IMFMediaType> type;
- while (SUCCEEDED(reader->GetNativeMediaType(kFirstVideoStream, stream_index,
- type.GetAddressOf()))) {
+ while (SUCCEEDED(hr = reader->GetNativeMediaType(
+ static_cast<DWORD>(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
+ stream_index, type.GetAddressOf()))) {
UINT32 width, height;
hr = MFGetAttributeSize(type.Get(), MF_MT_FRAME_SIZE, &width, &height);
if (FAILED(hr)) {
@@ -315,15 +322,15 @@ static void GetDeviceSupportedFormatsMediaFoundation(
DLOG(ERROR) << "GetGUID failed: " << logging::SystemErrorCodeToString(hr);
return;
}
- VideoCaptureDeviceMFWin::FormatFromGuid(type_guid,
- &capture_format.pixel_format);
+ VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
+ type_guid, &capture_format.pixel_format);
type.Reset();
++stream_index;
if (capture_format.pixel_format == PIXEL_FORMAT_UNKNOWN)
continue;
formats->push_back(capture_format);
- DVLOG(1) << descriptor.display_name << " "
+ DVLOG(1) << descriptor.display_name() << " "
<< VideoCaptureFormat::ToString(capture_format);
}
}
@@ -340,8 +347,20 @@ bool VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation() {
}
VideoCaptureDeviceFactoryWin::VideoCaptureDeviceFactoryWin()
- : use_media_foundation_(base::CommandLine::ForCurrentProcess()->HasSwitch(
- switches::kForceMediaFoundationVideoCapture)) {}
+ : use_media_foundation_(
+ base::FeatureList::IsEnabled(media::kMediaFoundationVideoCapture)) {
+ if (!PlatformSupportsMediaFoundation()) {
+ use_media_foundation_ = false;
+ LogVideoCaptureWinBackendUsed(
+ VideoCaptureWinBackendUsed::kUsingDirectShowAsFallback);
+ } else if (use_media_foundation_) {
+ LogVideoCaptureWinBackendUsed(
+ VideoCaptureWinBackendUsed::kUsingMediaFoundationAsDefault);
+ } else {
+ LogVideoCaptureWinBackendUsed(
+ VideoCaptureWinBackendUsed::kUsingDirectShowAsDefault);
+ }
+}
std::unique_ptr<VideoCaptureDevice> VideoCaptureDeviceFactoryWin::CreateDevice(
const Descriptor& device_descriptor) {
@@ -349,19 +368,19 @@ std::unique_ptr<VideoCaptureDevice> VideoCaptureDeviceFactoryWin::CreateDevice(
std::unique_ptr<VideoCaptureDevice> device;
if (device_descriptor.capture_api == VideoCaptureApi::WIN_MEDIA_FOUNDATION) {
DCHECK(PlatformSupportsMediaFoundation());
- device.reset(new VideoCaptureDeviceMFWin(device_descriptor));
- DVLOG(1) << " MediaFoundation Device: " << device_descriptor.display_name;
ComPtr<IMFMediaSource> source;
if (!CreateVideoCaptureDeviceMediaFoundation(
device_descriptor.device_id.c_str(), source.GetAddressOf())) {
return std::unique_ptr<VideoCaptureDevice>();
}
- if (!static_cast<VideoCaptureDeviceMFWin*>(device.get())->Init(source))
+ device.reset(new VideoCaptureDeviceMFWin(source));
+ DVLOG(1) << " MediaFoundation Device: " << device_descriptor.display_name();
+ if (!static_cast<VideoCaptureDeviceMFWin*>(device.get())->Init())
device.reset();
} else if (device_descriptor.capture_api ==
VideoCaptureApi::WIN_DIRECT_SHOW) {
device.reset(new VideoCaptureDeviceWin(device_descriptor));
- DVLOG(1) << " DirectShow Device: " << device_descriptor.display_name;
+ DVLOG(1) << " DirectShow Device: " << device_descriptor.display_name();
if (!static_cast<VideoCaptureDeviceWin*>(device.get())->Init())
device.reset();
} else {
@@ -393,7 +412,8 @@ void VideoCaptureDeviceFactoryWin::GetSupportedFormats(
VideoCaptureDeviceFactory*
VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager) {
+ gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
+ MojoJpegDecodeAcceleratorFactoryCB jda_factory) {
return new VideoCaptureDeviceFactoryWin();
}
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.h b/chromium/media/capture/video/win/video_capture_device_factory_win.h
index 9afcc644000..a7d367838a4 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.h
@@ -30,10 +30,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
const VideoCaptureDeviceDescriptor& device_descriptor,
VideoCaptureFormats* supported_formats) override;
+ void set_use_media_foundation_for_testing(bool use) {
+ use_media_foundation_ = use;
+ }
+
private:
- // Media Foundation is available in Win7 and later, use it if explicitly
- // forced via flag, else use DirectShow.
- const bool use_media_foundation_;
+ bool use_media_foundation_;
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryWin);
};
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.cc b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
index c9ab85a000c..9ddff6fa11c 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
@@ -7,7 +7,9 @@
#include <mfapi.h>
#include <mferror.h>
#include <stddef.h>
+#include <wincodec.h>
+#include <thread>
#include <utility>
#include "base/location.h"
@@ -17,15 +19,115 @@
#include "base/synchronization/waitable_event.h"
#include "base/win/scoped_co_mem.h"
#include "base/win/windows_version.h"
+#include "media/capture/mojo/image_capture_types.h"
+#include "media/capture/video/blob_utils.h"
#include "media/capture/video/win/capability_list_win.h"
#include "media/capture/video/win/sink_filter_win.h"
#include "media/capture/video/win/video_capture_device_utils_win.h"
+using base::Location;
using base::win::ScopedCoMem;
+using Microsoft::WRL::ComPtr;
namespace media {
-static bool GetFrameSize(IMFMediaType* type, gfx::Size* frame_size) {
+namespace {
+
+class MFPhotoCallback final
+ : public base::RefCountedThreadSafe<MFPhotoCallback>,
+ public IMFCaptureEngineOnSampleCallback {
+ public:
+ MFPhotoCallback(VideoCaptureDevice::TakePhotoCallback callback,
+ VideoCaptureFormat format)
+ : callback_(std::move(callback)), format_(format) {}
+
+ STDMETHOD(QueryInterface)(REFIID riid, void** object) override {
+ if (riid == IID_IUnknown || riid == IID_IMFCaptureEngineOnSampleCallback) {
+ AddRef();
+ *object = static_cast<IMFCaptureEngineOnSampleCallback*>(this);
+ return S_OK;
+ }
+ return E_NOINTERFACE;
+ }
+
+ STDMETHOD_(ULONG, AddRef)() override {
+ base::RefCountedThreadSafe<MFPhotoCallback>::AddRef();
+ return 1U;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ base::RefCountedThreadSafe<MFPhotoCallback>::Release();
+ return 1U;
+ }
+
+ STDMETHOD(OnSample)(IMFSample* sample) override {
+ if (!sample)
+ return S_OK;
+
+ DWORD buffer_count = 0;
+ sample->GetBufferCount(&buffer_count);
+
+ for (DWORD i = 0; i < buffer_count; ++i) {
+ ComPtr<IMFMediaBuffer> buffer;
+ sample->GetBufferByIndex(i, buffer.GetAddressOf());
+ if (!buffer)
+ continue;
+
+ BYTE* data = nullptr;
+ DWORD max_length = 0;
+ DWORD length = 0;
+ buffer->Lock(&data, &max_length, &length);
+ mojom::BlobPtr blob = Blobify(data, length, format_);
+ buffer->Unlock();
+ if (blob) {
+ std::move(callback_).Run(std::move(blob));
+ LogWindowsImageCaptureOutcome(
+ VideoCaptureWinBackend::kMediaFoundation,
+ ImageCaptureOutcome::kSucceededUsingPhotoStream,
+ IsHighResolution(format_));
+
+ // What is it supposed to mean if there is more than one buffer sent to
+ // us as a response to requesting a single still image? Are we supposed
+ // to somehow concatenate the buffers? Or is it safe to ignore extra
+ // buffers? For now, we ignore extra buffers.
+ break;
+ }
+ }
+ return S_OK;
+ }
+
+ private:
+ friend class base::RefCountedThreadSafe<MFPhotoCallback>;
+ ~MFPhotoCallback() {
+ if (callback_) {
+ LogWindowsImageCaptureOutcome(
+ VideoCaptureWinBackend::kMediaFoundation,
+ ImageCaptureOutcome::kFailedUsingPhotoStream,
+ IsHighResolution(format_));
+ }
+ }
+
+ VideoCaptureDevice::TakePhotoCallback callback_;
+ const VideoCaptureFormat format_;
+
+ DISALLOW_COPY_AND_ASSIGN(MFPhotoCallback);
+};
+
+scoped_refptr<IMFCaptureEngineOnSampleCallback> CreateMFPhotoCallback(
+ VideoCaptureDevice::TakePhotoCallback callback,
+ VideoCaptureFormat format) {
+ return scoped_refptr<IMFCaptureEngineOnSampleCallback>(
+ new MFPhotoCallback(std::move(callback), format));
+}
+} // namespace
+
+void LogError(const Location& from_here, HRESULT hr) {
+ DPLOG(ERROR) << from_here.ToString()
+ << " hr = " << logging::SystemErrorCodeToString(hr);
+}
+
+static bool GetFrameSizeFromMediaType(IMFMediaType* type,
+ gfx::Size* frame_size) {
UINT32 width32, height32;
if (FAILED(MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width32, &height32)))
return false;
@@ -33,7 +135,7 @@ static bool GetFrameSize(IMFMediaType* type, gfx::Size* frame_size) {
return true;
}
-static bool GetFrameRate(IMFMediaType* type, float* frame_rate) {
+static bool GetFrameRateFromMediaType(IMFMediaType* type, float* frame_rate) {
UINT32 numerator, denominator;
if (FAILED(MFGetAttributeRatio(type, MF_MT_FRAME_RATE, &numerator,
&denominator)) ||
@@ -44,73 +146,242 @@ static bool GetFrameRate(IMFMediaType* type, float* frame_rate) {
return true;
}
-static bool FillFormat(IMFMediaType* type, VideoCaptureFormat* format) {
- GUID type_guid;
- if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &type_guid)) ||
- !GetFrameSize(type, &format->frame_size) ||
- !GetFrameRate(type, &format->frame_rate) ||
- !VideoCaptureDeviceMFWin::FormatFromGuid(type_guid,
- &format->pixel_format)) {
+static bool GetFormatFromSourceMediaType(IMFMediaType* source_media_type,
+ bool photo,
+ VideoCaptureFormat* format) {
+ GUID major_type_guid;
+ if (FAILED(source_media_type->GetGUID(MF_MT_MAJOR_TYPE, &major_type_guid)) ||
+ (major_type_guid != MFMediaType_Image &&
+ (photo ||
+ !GetFrameRateFromMediaType(source_media_type, &format->frame_rate)))) {
+ return false;
+ }
+
+ GUID sub_type_guid;
+ if (FAILED(source_media_type->GetGUID(MF_MT_SUBTYPE, &sub_type_guid)) ||
+ !GetFrameSizeFromMediaType(source_media_type, &format->frame_size) ||
+ !VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
+ sub_type_guid, &format->pixel_format)) {
return false;
}
return true;
}
-HRESULT FillCapabilities(IMFSourceReader* source,
- CapabilityList* capabilities) {
- DWORD stream_index = 0;
- Microsoft::WRL::ComPtr<IMFMediaType> type;
- HRESULT hr;
- while (SUCCEEDED(hr = source->GetNativeMediaType(
- kFirstVideoStream, stream_index, type.GetAddressOf()))) {
- VideoCaptureFormat format;
- if (FillFormat(type.Get(), &format))
- capabilities->emplace_back(stream_index, format);
- type.Reset();
- ++stream_index;
+static HRESULT CopyAttribute(IMFAttributes* source_attributes,
+ IMFAttributes* destination_attributes,
+ const GUID& key) {
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ HRESULT hr = source_attributes->GetItem(key, &var);
+ if (FAILED(hr))
+ return hr;
+
+ hr = destination_attributes->SetItem(key, var);
+ PropVariantClear(&var);
+ return hr;
+}
+
+struct MediaFormatConfiguration {
+ GUID mf_source_media_subtype;
+ GUID mf_sink_media_subtype;
+ VideoPixelFormat pixel_format;
+};
+
+static bool GetMediaFormatConfigurationFromMFSourceMediaSubtype(
+ const GUID& mf_source_media_subtype,
+ MediaFormatConfiguration* media_format_configuration) {
+ static const MediaFormatConfiguration kMediaFormatConfigurationMap[] = {
+ // IMFCaptureEngine inevitably performs the video frame decoding itself.
+ // This means that the sink must always be set to an uncompressed video
+ // format.
+
+ // Since chromium uses I420 at the other end of the pipe, MF known video
+ // output formats are always set to I420.
+ {MFVideoFormat_I420, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+ {MFVideoFormat_YUY2, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+ {MFVideoFormat_UYVY, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+ {MFVideoFormat_RGB24, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+ {MFVideoFormat_RGB32, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+ {MFVideoFormat_ARGB32, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+ {MFVideoFormat_MJPG, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+ {MFVideoFormat_NV12, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+ {MFVideoFormat_YV12, MFVideoFormat_I420, PIXEL_FORMAT_I420},
+
+ // Depth cameras use specific uncompressed video formats unknown to
+ // IMFCaptureEngine.
+ // Therefore, IMFCaptureEngine cannot perform any transcoding on these.
+ // So we ask IMFCaptureEngine to let the frame pass through, without
+ // transcoding.
+ {kMediaSubTypeY16, kMediaSubTypeY16, PIXEL_FORMAT_Y16},
+ {kMediaSubTypeZ16, kMediaSubTypeZ16, PIXEL_FORMAT_Y16},
+ {kMediaSubTypeINVZ, kMediaSubTypeINVZ, PIXEL_FORMAT_Y16},
+
+ // Photo type
+ {GUID_ContainerFormatJpeg, GUID_ContainerFormatJpeg, PIXEL_FORMAT_MJPEG}};
+
+ for (const auto& kMediaFormatConfiguration : kMediaFormatConfigurationMap) {
+ if (kMediaFormatConfiguration.mf_source_media_subtype ==
+ mf_source_media_subtype) {
+ *media_format_configuration = kMediaFormatConfiguration;
+ return true;
+ }
}
- if (capabilities->empty() && (SUCCEEDED(hr) || hr == MF_E_NO_MORE_TYPES))
- hr = HRESULT_FROM_WIN32(ERROR_EMPTY);
+ return false;
+}
- return (hr == MF_E_NO_MORE_TYPES) ? S_OK : hr;
+static HRESULT GetMFSinkMediaSubtype(IMFMediaType* source_media_type,
+ GUID* mf_sink_media_subtype) {
+ GUID source_subtype;
+ HRESULT hr = source_media_type->GetGUID(MF_MT_SUBTYPE, &source_subtype);
+ if (FAILED(hr))
+ return hr;
+ MediaFormatConfiguration media_format_configuration;
+ if (!GetMediaFormatConfigurationFromMFSourceMediaSubtype(
+ source_subtype, &media_format_configuration))
+ return E_FAIL;
+ *mf_sink_media_subtype = media_format_configuration.mf_sink_media_subtype;
+ return S_OK;
}
-class MFReaderCallback final
- : public base::RefCountedThreadSafe<MFReaderCallback>,
- public IMFSourceReaderCallback {
- public:
- MFReaderCallback(VideoCaptureDeviceMFWin* observer)
- : observer_(observer), wait_event_(NULL) {}
+static HRESULT ConvertToPhotoSinkMediaType(
+ IMFMediaType* source_media_type,
+ IMFMediaType* destination_media_type) {
+ HRESULT hr =
+ destination_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Image);
+ if (FAILED(hr))
+ return hr;
+
+ GUID mf_sink_media_subtype;
+ hr = GetMFSinkMediaSubtype(source_media_type, &mf_sink_media_subtype);
+ if (FAILED(hr))
+ return hr;
+
+ hr = destination_media_type->SetGUID(MF_MT_SUBTYPE, mf_sink_media_subtype);
+ if (FAILED(hr))
+ return hr;
+
+ return CopyAttribute(source_media_type, destination_media_type,
+ MF_MT_FRAME_SIZE);
+}
+
+static HRESULT ConvertToVideoSinkMediaType(IMFMediaType* source_media_type,
+ IMFMediaType* sink_media_type) {
+ HRESULT hr = sink_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ if (FAILED(hr))
+ return hr;
+
+ GUID mf_sink_media_subtype;
+ hr = GetMFSinkMediaSubtype(source_media_type, &mf_sink_media_subtype);
+ if (FAILED(hr))
+ return hr;
+
+ hr = sink_media_type->SetGUID(MF_MT_SUBTYPE, mf_sink_media_subtype);
+ if (FAILED(hr))
+ return hr;
+
+ hr = CopyAttribute(source_media_type, sink_media_type, MF_MT_FRAME_SIZE);
+ if (FAILED(hr))
+ return hr;
+
+ hr = CopyAttribute(source_media_type, sink_media_type, MF_MT_FRAME_RATE);
+ if (FAILED(hr))
+ return hr;
+
+ hr = CopyAttribute(source_media_type, sink_media_type,
+ MF_MT_PIXEL_ASPECT_RATIO);
+ if (FAILED(hr))
+ return hr;
+
+ return CopyAttribute(source_media_type, sink_media_type,
+ MF_MT_INTERLACE_MODE);
+}
+
+static const CapabilityWin& GetBestMatchedPhotoCapability(
+ ComPtr<IMFMediaType> current_media_type,
+ gfx::Size requested_size,
+ const CapabilityList& capabilities) {
+ gfx::Size current_size;
+ GetFrameSizeFromMediaType(current_media_type.Get(), &current_size);
+
+ int requested_height = requested_size.height() > 0 ? requested_size.height()
+ : current_size.height();
+ int requested_width = requested_size.width() > 0 ? requested_size.width()
+ : current_size.width();
+
+ const CapabilityWin* best_match = &(*capabilities.begin());
+ for (const CapabilityWin& capability : capabilities) {
+ int height = capability.supported_format.frame_size.height();
+ int width = capability.supported_format.frame_size.width();
+ int best_height = best_match->supported_format.frame_size.height();
+ int best_width = best_match->supported_format.frame_size.width();
+
+ if (std::abs(height - requested_height) <= std::abs(height - best_height) &&
+ std::abs(width - requested_width) <= std::abs(width - best_width)) {
+ best_match = &capability;
+ }
+ }
+ return *best_match;
+}
+
+HRESULT CreateCaptureEngine(IMFCaptureEngine** engine) {
+ ComPtr<IMFCaptureEngineClassFactory> capture_engine_class_factory;
+ HRESULT hr = CoCreateInstance(
+ CLSID_MFCaptureEngineClassFactory, NULL, CLSCTX_INPROC_SERVER,
+ IID_PPV_ARGS(capture_engine_class_factory.GetAddressOf()));
+ if (FAILED(hr))
+ return hr;
- void SetSignalOnFlush(base::WaitableEvent* event) { wait_event_ = event; }
+ return capture_engine_class_factory->CreateInstance(CLSID_MFCaptureEngine,
+ IID_PPV_ARGS(engine));
+}
+
+class MFVideoCallback final
+ : public base::RefCountedThreadSafe<MFVideoCallback>,
+ public IMFCaptureEngineOnSampleCallback,
+ public IMFCaptureEngineOnEventCallback {
+ public:
+ MFVideoCallback(VideoCaptureDeviceMFWin* observer) : observer_(observer) {}
STDMETHOD(QueryInterface)(REFIID riid, void** object) override {
- if (riid != IID_IUnknown && riid != IID_IMFSourceReaderCallback)
- return E_NOINTERFACE;
- *object = static_cast<IMFSourceReaderCallback*>(this);
- AddRef();
- return S_OK;
+ HRESULT hr = E_NOINTERFACE;
+ if (riid == IID_IUnknown) {
+ *object = this;
+ hr = S_OK;
+ } else if (riid == IID_IMFCaptureEngineOnSampleCallback) {
+ *object = static_cast<IMFCaptureEngineOnSampleCallback*>(this);
+ hr = S_OK;
+ } else if (riid == IID_IMFCaptureEngineOnEventCallback) {
+ *object = static_cast<IMFCaptureEngineOnEventCallback*>(this);
+ hr = S_OK;
+ }
+ if (SUCCEEDED(hr))
+ AddRef();
+
+ return hr;
}
STDMETHOD_(ULONG, AddRef)() override {
- base::RefCountedThreadSafe<MFReaderCallback>::AddRef();
+ base::RefCountedThreadSafe<MFVideoCallback>::AddRef();
return 1U;
}
STDMETHOD_(ULONG, Release)() override {
- base::RefCountedThreadSafe<MFReaderCallback>::Release();
+ base::RefCountedThreadSafe<MFVideoCallback>::Release();
return 1U;
}
- STDMETHOD(OnReadSample)
- (HRESULT status,
- DWORD stream_index,
- DWORD stream_flags,
- LONGLONG raw_time_stamp,
- IMFSample* sample) override {
+ STDMETHOD(OnEvent)(IMFMediaEvent* media_event) override {
+ observer_->OnEvent(media_event);
+ return S_OK;
+ }
+
+ STDMETHOD(OnSample)(IMFSample* sample) override {
base::TimeTicks reference_time(base::TimeTicks::Now());
+
+ LONGLONG raw_time_stamp = 0;
+ sample->GetSampleTime(&raw_time_stamp);
base::TimeDelta timestamp =
base::TimeDelta::FromMicroseconds(raw_time_stamp / 10);
if (!sample) {
@@ -122,9 +393,9 @@ class MFReaderCallback final
sample->GetBufferCount(&count);
for (DWORD i = 0; i < count; ++i) {
- Microsoft::WRL::ComPtr<IMFMediaBuffer> buffer;
+ ComPtr<IMFMediaBuffer> buffer;
sample->GetBufferByIndex(i, buffer.GetAddressOf());
- if (buffer.Get()) {
+ if (buffer) {
DWORD length = 0, max_length = 0;
BYTE* data = NULL;
buffer->Lock(&data, &max_length, &length);
@@ -136,80 +407,199 @@ class MFReaderCallback final
return S_OK;
}
- STDMETHOD(OnFlush)(DWORD stream_index) override {
- if (wait_event_) {
- wait_event_->Signal();
- wait_event_ = NULL;
- }
- return S_OK;
- }
-
- STDMETHOD(OnEvent)(DWORD stream_index, IMFMediaEvent* event) override {
- NOTIMPLEMENTED();
- return S_OK;
- }
-
private:
- friend class base::RefCountedThreadSafe<MFReaderCallback>;
- ~MFReaderCallback() {}
-
+ friend class base::RefCountedThreadSafe<MFVideoCallback>;
+ ~MFVideoCallback() {}
VideoCaptureDeviceMFWin* observer_;
- base::WaitableEvent* wait_event_;
};
// static
-bool VideoCaptureDeviceMFWin::FormatFromGuid(const GUID& guid,
- VideoPixelFormat* format) {
- struct {
- const GUID& guid;
- const VideoPixelFormat format;
- } static const kFormatMap[] = {
- {MFVideoFormat_I420, PIXEL_FORMAT_I420},
- {MFVideoFormat_YUY2, PIXEL_FORMAT_YUY2},
- {MFVideoFormat_UYVY, PIXEL_FORMAT_UYVY},
- {MFVideoFormat_RGB24, PIXEL_FORMAT_RGB24},
- {MFVideoFormat_ARGB32, PIXEL_FORMAT_ARGB},
- {MFVideoFormat_MJPG, PIXEL_FORMAT_MJPEG},
- {MFVideoFormat_YV12, PIXEL_FORMAT_YV12},
- {kMediaSubTypeY16, PIXEL_FORMAT_Y16},
- {kMediaSubTypeZ16, PIXEL_FORMAT_Y16},
- {kMediaSubTypeINVZ, PIXEL_FORMAT_Y16},
- };
-
- for (const auto& kFormat : kFormatMap) {
- if (kFormat.guid == guid) {
- *format = kFormat.format;
- return true;
+bool VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
+ const GUID& mf_source_media_subtype,
+ VideoPixelFormat* pixel_format) {
+ MediaFormatConfiguration media_format_configuration;
+ if (!GetMediaFormatConfigurationFromMFSourceMediaSubtype(
+ mf_source_media_subtype, &media_format_configuration))
+ return false;
+
+ *pixel_format = media_format_configuration.pixel_format;
+ return true;
+}
+
+HRESULT VideoCaptureDeviceMFWin::ExecuteHresultCallbackWithRetries(
+ base::RepeatingCallback<HRESULT()> callback,
+ MediaFoundationFunctionRequiringRetry which_function) {
+ // Retry callback execution on MF_E_INVALIDREQUEST.
+ // MF_E_INVALIDREQUEST is not documented in MediaFoundation documentation.
+ // It could mean that MediaFoundation or the underlying device can be in a
+ // state that reject these calls. Since MediaFoundation gives no intel about
+ // that state beginning and ending (i.e. via some kind of event), we retry the
+ // call until it succeed.
+ HRESULT hr;
+ int retry_count = 0;
+ do {
+ hr = callback.Run();
+ if (FAILED(hr))
+ base::PlatformThread::Sleep(
+ base::TimeDelta::FromMilliseconds(retry_delay_in_ms_));
+
+ // Give up after some amount of time
+ } while (hr == MF_E_INVALIDREQUEST && retry_count++ < max_retry_count_);
+ LogNumberOfRetriesNeededToWorkAroundMFInvalidRequest(which_function,
+ retry_count);
+
+ return hr;
+}
+
+HRESULT VideoCaptureDeviceMFWin::GetDeviceStreamCount(IMFCaptureSource* source,
+ DWORD* count) {
+ // Sometimes, GetDeviceStreamCount returns an
+ // undocumented MF_E_INVALIDREQUEST. Retrying solves the issue.
+ return ExecuteHresultCallbackWithRetries(
+ base::BindRepeating(
+ [](IMFCaptureSource* source, DWORD* count) {
+ return source->GetDeviceStreamCount(count);
+ },
+ base::Unretained(source), count),
+ MediaFoundationFunctionRequiringRetry::kGetDeviceStreamCount);
+}
+
+HRESULT VideoCaptureDeviceMFWin::GetDeviceStreamCategory(
+ IMFCaptureSource* source,
+ DWORD stream_index,
+ MF_CAPTURE_ENGINE_STREAM_CATEGORY* stream_category) {
+ // We believe that GetDeviceStreamCategory could be affected by the same
+ // behaviour of GetDeviceStreamCount and GetAvailableDeviceMediaType
+ return ExecuteHresultCallbackWithRetries(
+ base::BindRepeating(
+ [](IMFCaptureSource* source, DWORD stream_index,
+ MF_CAPTURE_ENGINE_STREAM_CATEGORY* stream_category) {
+ return source->GetDeviceStreamCategory(stream_index,
+ stream_category);
+ },
+ base::Unretained(source), stream_index, stream_category),
+ MediaFoundationFunctionRequiringRetry::kGetDeviceStreamCategory);
+}
+
+HRESULT VideoCaptureDeviceMFWin::GetAvailableDeviceMediaType(
+ IMFCaptureSource* source,
+ DWORD stream_index,
+ DWORD media_type_index,
+ IMFMediaType** type) {
+ // Rarely, for some unknown reason, GetAvailableDeviceMediaType returns an
+ // undocumented MF_E_INVALIDREQUEST. Retrying solves the issue.
+ return ExecuteHresultCallbackWithRetries(
+ base::BindRepeating(
+ [](IMFCaptureSource* source, DWORD stream_index,
+ DWORD media_type_index, IMFMediaType** type) {
+ return source->GetAvailableDeviceMediaType(stream_index,
+ media_type_index, type);
+ },
+ base::Unretained(source), stream_index, media_type_index, type),
+ MediaFoundationFunctionRequiringRetry::kGetAvailableDeviceMediaType);
+}
+
+HRESULT VideoCaptureDeviceMFWin::FillCapabilities(
+ IMFCaptureSource* source,
+ bool photo,
+ CapabilityList* capabilities) {
+ DWORD stream_count = 0;
+ HRESULT hr = GetDeviceStreamCount(source, &stream_count);
+ if (FAILED(hr))
+ return hr;
+
+ for (DWORD stream_index = 0; stream_index < stream_count; stream_index++) {
+ MF_CAPTURE_ENGINE_STREAM_CATEGORY stream_category;
+ hr = GetDeviceStreamCategory(source, stream_index, &stream_category);
+ if (FAILED(hr))
+ return hr;
+
+ if ((photo && stream_category !=
+ MF_CAPTURE_ENGINE_STREAM_CATEGORY_PHOTO_INDEPENDENT) ||
+ (!photo &&
+ stream_category != MF_CAPTURE_ENGINE_STREAM_CATEGORY_VIDEO_PREVIEW &&
+ stream_category != MF_CAPTURE_ENGINE_STREAM_CATEGORY_VIDEO_CAPTURE)) {
+ continue;
+ }
+
+ DWORD media_type_index = 0;
+ ComPtr<IMFMediaType> type;
+ while (SUCCEEDED(hr = GetAvailableDeviceMediaType(source, stream_index,
+ media_type_index,
+ type.GetAddressOf()))) {
+ VideoCaptureFormat format;
+ if (GetFormatFromSourceMediaType(type.Get(), photo, &format))
+ capabilities->emplace_back(media_type_index, format, stream_index);
+ type.Reset();
+ ++media_type_index;
+ }
+ if (hr == MF_E_NO_MORE_TYPES) {
+ hr = S_OK;
+ }
+ if (FAILED(hr)) {
+ return hr;
}
}
- return false;
+ return hr;
}
+VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(ComPtr<IMFMediaSource> source)
+ : VideoCaptureDeviceMFWin(source, nullptr) {}
+
VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
- const VideoCaptureDeviceDescriptor& device_descriptor)
- : descriptor_(device_descriptor), capture_(0) {
+ ComPtr<IMFMediaSource> source,
+ ComPtr<IMFCaptureEngine> engine)
+ : create_mf_photo_callback_(base::BindRepeating(&CreateMFPhotoCallback)),
+ is_initialized_(false),
+ max_retry_count_(200),
+ retry_delay_in_ms_(50),
+ source_(source),
+ engine_(engine),
+ is_started_(false) {
DETACH_FROM_SEQUENCE(sequence_checker_);
}
VideoCaptureDeviceMFWin::~VideoCaptureDeviceMFWin() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (!video_stream_take_photo_callbacks_.empty()) {
+ for (size_t k = 0; k < video_stream_take_photo_callbacks_.size(); k++) {
+ LogWindowsImageCaptureOutcome(
+ VideoCaptureWinBackend::kMediaFoundation,
+ ImageCaptureOutcome::kFailedUsingVideoStream,
+ selected_video_capability_
+ ? IsHighResolution(selected_video_capability_->supported_format)
+ : false);
+ }
+ }
}
-bool VideoCaptureDeviceMFWin::Init(
- const Microsoft::WRL::ComPtr<IMFMediaSource>& source) {
+bool VideoCaptureDeviceMFWin::Init() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(!reader_.Get());
+ DCHECK(!is_initialized_);
- Microsoft::WRL::ComPtr<IMFAttributes> attributes;
- MFCreateAttributes(attributes.GetAddressOf(), 1);
- DCHECK(attributes.Get());
+ HRESULT hr = S_OK;
+ if (!engine_)
+ hr = CreateCaptureEngine(engine_.GetAddressOf());
- callback_ = new MFReaderCallback(this);
- attributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, callback_.get());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return false;
+ }
+
+ ComPtr<IMFAttributes> attributes;
+ MFCreateAttributes(attributes.GetAddressOf(), 1);
+ DCHECK(attributes);
- return SUCCEEDED(MFCreateSourceReaderFromMediaSource(
- source.Get(), attributes.Get(), reader_.GetAddressOf()));
+ video_callback_ = new MFVideoCallback(this);
+ hr = engine_->Initialize(video_callback_.get(), attributes.Get(), nullptr,
+ source_.Get());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return false;
+ }
+ is_initialized_ = true;
+ return true;
}
void VideoCaptureDeviceMFWin::AllocateAndStart(
@@ -220,65 +610,327 @@ void VideoCaptureDeviceMFWin::AllocateAndStart(
base::AutoLock lock(lock_);
client_ = std::move(client);
- DCHECK_EQ(capture_, false);
+ DCHECK_EQ(false, is_started_);
- CapabilityList capabilities;
- HRESULT hr = S_OK;
- if (reader_.Get()) {
- hr = FillCapabilities(reader_.Get(), &capabilities);
- if (SUCCEEDED(hr)) {
- const CapabilityWin found_capability =
- GetBestMatchedCapability(params.requested_format, capabilities);
- Microsoft::WRL::ComPtr<IMFMediaType> type;
- hr = reader_->GetNativeMediaType(kFirstVideoStream,
- found_capability.stream_index,
- type.GetAddressOf());
- if (SUCCEEDED(hr)) {
- hr = reader_->SetCurrentMediaType(kFirstVideoStream, NULL, type.Get());
- if (SUCCEEDED(hr)) {
- hr =
- reader_->ReadSample(kFirstVideoStream, 0, NULL, NULL, NULL, NULL);
- if (SUCCEEDED(hr)) {
- capture_format_ = found_capability.supported_format;
- client_->OnStarted();
- capture_ = true;
- return;
- }
- }
- }
- }
+ if (!engine_) {
+ OnError(FROM_HERE, E_FAIL);
+ return;
+ }
+
+ ComPtr<IMFCaptureSource> source;
+ HRESULT hr = engine_->GetSource(source.GetAddressOf());
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = FillCapabilities(source.Get(), true, &photo_capabilities_);
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ if (!photo_capabilities_.empty()) {
+ selected_photo_capability_.reset(
+ new CapabilityWin(photo_capabilities_.front()));
+ }
+
+ CapabilityList video_capabilities;
+ hr = FillCapabilities(source.Get(), false, &video_capabilities);
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ if (video_capabilities.empty()) {
+ OnError(FROM_HERE, "No video capability found");
+ return;
+ }
+
+ const CapabilityWin best_match_video_capability =
+ GetBestMatchedCapability(params.requested_format, video_capabilities);
+ ComPtr<IMFMediaType> source_video_media_type;
+ hr = GetAvailableDeviceMediaType(source.Get(),
+ best_match_video_capability.stream_index,
+ best_match_video_capability.media_type_index,
+ source_video_media_type.GetAddressOf());
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = source->SetCurrentDeviceMediaType(
+ best_match_video_capability.stream_index, source_video_media_type.Get());
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFCaptureSink> sink;
+ hr = engine_->GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW,
+ sink.GetAddressOf());
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFCapturePreviewSink> preview_sink;
+ hr = sink->QueryInterface(IID_PPV_ARGS(preview_sink.GetAddressOf()));
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = preview_sink->RemoveAllStreams();
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFMediaType> sink_video_media_type;
+ hr = MFCreateMediaType(sink_video_media_type.GetAddressOf());
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = ConvertToVideoSinkMediaType(source_video_media_type.Get(),
+ sink_video_media_type.Get());
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ DWORD dw_sink_stream_index = 0;
+ hr = preview_sink->AddStream(best_match_video_capability.stream_index,
+ sink_video_media_type.Get(), NULL,
+ &dw_sink_stream_index);
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
}
- OnError(FROM_HERE, hr);
+ hr = preview_sink->SetSampleCallback(dw_sink_stream_index,
+ video_callback_.get());
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = engine_->StartPreview();
+ if (FAILED(hr)) {
+ OnError(FROM_HERE, hr);
+ return;
+ }
+
+ selected_video_capability_.reset(
+ new CapabilityWin(best_match_video_capability));
+
+ client_->OnStarted();
+ is_started_ = true;
}
void VideoCaptureDeviceMFWin::StopAndDeAllocate() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- base::WaitableEvent flushed(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
- const int kFlushTimeOutInMs = 1000;
- bool wait = false;
- {
- base::AutoLock lock(lock_);
- if (capture_) {
- capture_ = false;
- callback_->SetSignalOnFlush(&flushed);
- wait = SUCCEEDED(
- reader_->Flush(static_cast<DWORD>(MF_SOURCE_READER_ALL_STREAMS)));
- if (!wait) {
- callback_->SetSignalOnFlush(NULL);
- }
+ base::AutoLock lock(lock_);
+
+ if (is_started_ && engine_)
+ engine_->StopPreview();
+ is_started_ = false;
+
+ client_.reset();
+}
+
+void VideoCaptureDeviceMFWin::TakePhoto(TakePhotoCallback callback) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ base::AutoLock lock(lock_);
+
+ if (!is_started_)
+ return;
+
+ if (!selected_photo_capability_) {
+ video_stream_take_photo_callbacks_.push(std::move(callback));
+ return;
+ }
+
+ ComPtr<IMFCaptureSource> source;
+ HRESULT hr = engine_->GetSource(source.GetAddressOf());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFMediaType> source_media_type;
+ hr = GetAvailableDeviceMediaType(source.Get(),
+ selected_photo_capability_->stream_index,
+ selected_photo_capability_->media_type_index,
+ source_media_type.GetAddressOf());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = source->SetCurrentDeviceMediaType(
+ selected_photo_capability_->stream_index, source_media_type.Get());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFMediaType> sink_media_type;
+ hr = MFCreateMediaType(sink_media_type.GetAddressOf());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = ConvertToPhotoSinkMediaType(source_media_type.Get(),
+ sink_media_type.Get());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ VideoCaptureFormat format;
+ hr = GetFormatFromSourceMediaType(sink_media_type.Get(), true, &format)
+ ? S_OK
+ : E_FAIL;
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFCaptureSink> sink;
+ hr = engine_->GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PHOTO, sink.GetAddressOf());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFCapturePhotoSink> photo_sink;
+ hr = sink->QueryInterface(IID_PPV_ARGS(photo_sink.GetAddressOf()));
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = photo_sink->RemoveAllStreams();
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ DWORD dw_sink_stream_index = 0;
+ hr =
+ photo_sink->AddStream(selected_photo_capability_->stream_index,
+ sink_media_type.Get(), NULL, &dw_sink_stream_index);
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ scoped_refptr<IMFCaptureEngineOnSampleCallback> photo_callback =
+ create_mf_photo_callback_.Run(std::move(callback), format);
+ hr = photo_sink->SetSampleCallback(photo_callback.get());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ hr = engine_->TakePhoto();
+ if (FAILED(hr))
+ LogError(FROM_HERE, hr);
+}
+
+void VideoCaptureDeviceMFWin::GetPhotoState(GetPhotoStateCallback callback) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ if (!is_started_)
+ return;
+
+ ComPtr<IMFCaptureSource> source;
+ HRESULT hr = engine_->GetSource(source.GetAddressOf());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFMediaType> current_media_type;
+ hr = source->GetCurrentDeviceMediaType(
+ selected_photo_capability_ ? selected_photo_capability_->stream_index
+ : selected_video_capability_->stream_index,
+ current_media_type.GetAddressOf());
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ auto photo_capabilities = mojo::CreateEmptyPhotoState();
+ gfx::Size current_size;
+ GetFrameSizeFromMediaType(current_media_type.Get(), &current_size);
+
+ gfx::Size min_size = gfx::Size(current_size.width(), current_size.height());
+ gfx::Size max_size = gfx::Size(current_size.width(), current_size.height());
+ for (const CapabilityWin& capability : photo_capabilities_) {
+ min_size.SetToMin(capability.supported_format.frame_size);
+ max_size.SetToMax(capability.supported_format.frame_size);
+ }
+
+ photo_capabilities->height = mojom::Range::New(
+ max_size.height(), min_size.height(), current_size.height(), 1);
+ photo_capabilities->width = mojom::Range::New(
+ max_size.width(), min_size.width(), current_size.width(), 1);
+
+ std::move(callback).Run(std::move(photo_capabilities));
+}
+
+void VideoCaptureDeviceMFWin::SetPhotoOptions(
+ mojom::PhotoSettingsPtr settings,
+ SetPhotoOptionsCallback callback) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ if (!is_started_)
+ return;
+
+ HRESULT hr = S_OK;
+ ComPtr<IMFCaptureSource> source;
+ hr = engine_->GetSource(source.GetAddressOf());
+
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ if (!photo_capabilities_.empty() &&
+ (settings->has_height || settings->has_width)) {
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
+ }
+
+ ComPtr<IMFMediaType> current_source_media_type;
+ hr = source->GetCurrentDeviceMediaType(
+ selected_photo_capability_->stream_index,
+ current_source_media_type.GetAddressOf());
+
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return;
}
- client_.reset();
+
+ gfx::Size requested_size = gfx::Size();
+ if (settings->has_height)
+ requested_size.set_height(settings->height);
+
+ if (settings->has_width)
+ requested_size.set_width(settings->width);
+
+ const CapabilityWin best_match = GetBestMatchedPhotoCapability(
+ current_source_media_type, requested_size, photo_capabilities_);
+ selected_photo_capability_.reset(new CapabilityWin(best_match));
}
- // If the device has been unplugged, the Flush() won't trigger the event
- // and a timeout will happen.
- // TODO(tommi): Hook up the IMFMediaEventGenerator notifications API and
- // do not wait at all after getting MEVideoCaptureDeviceRemoved event.
- // See issue/226396.
- if (wait)
- flushed.TimedWait(base::TimeDelta::FromMilliseconds(kFlushTimeOutInMs));
+ std::move(callback).Run(true);
}
void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
@@ -288,33 +940,60 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
base::AutoLock lock(lock_);
- if (data && client_.get()) {
- client_->OnIncomingCapturedData(data, length, capture_format_, rotation,
- reference_time, timestamp);
+
+ if (!data)
+ return;
+
+ if (client_.get()) {
+ client_->OnIncomingCapturedData(
+ data, length, selected_video_capability_->supported_format, rotation,
+ reference_time, timestamp);
}
- if (capture_) {
- HRESULT hr =
- reader_->ReadSample(kFirstVideoStream, 0, NULL, NULL, NULL, NULL);
- if (FAILED(hr)) {
- // If running the *VideoCap* unit tests on repeat, this can sometimes
- // fail with HRESULT_FROM_WINHRESULT_FROM_WIN32(ERROR_INVALID_FUNCTION).
- // It's not clear to me why this is, but it is possible that it has
- // something to do with this bug:
- // http://support.microsoft.com/kb/979567
- OnError(FROM_HERE, hr);
+ while (!video_stream_take_photo_callbacks_.empty()) {
+ TakePhotoCallback cb =
+ std::move(video_stream_take_photo_callbacks_.front());
+ video_stream_take_photo_callbacks_.pop();
+
+ mojom::BlobPtr blob =
+ Blobify(data, length, selected_video_capability_->supported_format);
+ if (!blob) {
+ LogWindowsImageCaptureOutcome(
+ VideoCaptureWinBackend::kMediaFoundation,
+ ImageCaptureOutcome::kFailedUsingVideoStream,
+ IsHighResolution(selected_video_capability_->supported_format));
+ continue;
}
+
+ std::move(cb).Run(std::move(blob));
+ LogWindowsImageCaptureOutcome(
+ VideoCaptureWinBackend::kMediaFoundation,
+ ImageCaptureOutcome::kSucceededUsingVideoStream,
+ IsHighResolution(selected_video_capability_->supported_format));
}
}
-void VideoCaptureDeviceMFWin::OnError(const base::Location& from_here,
- HRESULT hr) {
- if (client_.get()) {
- client_->OnError(
- from_here,
- base::StringPrintf("VideoCaptureDeviceMFWin: %s",
- logging::SystemErrorCodeToString(hr).c_str()));
- }
+void VideoCaptureDeviceMFWin::OnEvent(IMFMediaEvent* media_event) {
+ base::AutoLock lock(lock_);
+
+ HRESULT hr;
+ media_event->GetStatus(&hr);
+
+ if (FAILED(hr))
+ OnError(FROM_HERE, hr);
+}
+
+void VideoCaptureDeviceMFWin::OnError(const Location& from_here, HRESULT hr) {
+ OnError(from_here, logging::SystemErrorCodeToString(hr).c_str());
+}
+
+void VideoCaptureDeviceMFWin::OnError(const Location& from_here,
+ const char* message) {
+ if (!client_.get())
+ return;
+
+ client_->OnError(from_here,
+ base::StringPrintf("VideoCaptureDeviceMFWin: %s", message));
}
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.h b/chromium/media/capture/video/win/video_capture_device_mf_win.h
index 93534f9dfb4..c43b143b6d9 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.h
@@ -3,12 +3,13 @@
// found in the LICENSE file.
// Windows specific implementation of VideoCaptureDevice.
-// DirectShow is used for capturing. DirectShow provide its own threads
-// for capturing.
+// MediaFoundation is used for capturing. MediaFoundation provides its own
+// threads for capturing.
#ifndef MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DEVICE_MF_WIN_H_
#define MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DEVICE_MF_WIN_H_
+#include <mfcaptureengine.h>
#include <mfidl.h>
#include <mfreadwrite.h>
#include <stdint.h>
@@ -16,11 +17,13 @@
#include <vector>
+#include "base/callback_forward.h"
#include "base/macros.h"
#include "base/sequence_checker.h"
-#include "base/synchronization/lock.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/video_capture_device.h"
+#include "media/capture/video/win/capability_list_win.h"
+#include "media/capture/video/win/metrics.h"
interface IMFSourceReader;
@@ -30,27 +33,33 @@ class Location;
namespace media {
-class MFReaderCallback;
-
-const DWORD kFirstVideoStream =
- static_cast<DWORD>(MF_SOURCE_READER_FIRST_VIDEO_STREAM);
+class MFVideoCallback;
class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
public:
- static bool FormatFromGuid(const GUID& guid, VideoPixelFormat* format);
+ static bool GetPixelFormatFromMFSourceMediaSubtype(const GUID& guid,
+ VideoPixelFormat* format);
explicit VideoCaptureDeviceMFWin(
- const VideoCaptureDeviceDescriptor& device_descriptor);
+ Microsoft::WRL::ComPtr<IMFMediaSource> source);
+ explicit VideoCaptureDeviceMFWin(
+ Microsoft::WRL::ComPtr<IMFMediaSource> source,
+ Microsoft::WRL::ComPtr<IMFCaptureEngine> engine);
+
~VideoCaptureDeviceMFWin() override;
// Opens the device driver for this device.
- bool Init(const Microsoft::WRL::ComPtr<IMFMediaSource>& source);
+ bool Init();
// VideoCaptureDevice implementation.
void AllocateAndStart(
const VideoCaptureParams& params,
std::unique_ptr<VideoCaptureDevice::Client> client) override;
void StopAndDeAllocate() override;
+ void TakePhoto(TakePhotoCallback callback) override;
+ void GetPhotoState(GetPhotoStateCallback callback) override;
+ void SetPhotoOptions(mojom::PhotoSettingsPtr settings,
+ SetPhotoOptionsCallback callback) override;
// Captured new video data.
void OnIncomingCapturedData(const uint8_t* data,
@@ -58,19 +67,68 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
int rotation,
base::TimeTicks reference_time,
base::TimeDelta timestamp);
+ void OnEvent(IMFMediaEvent* media_event);
+
+ using CreateMFPhotoCallbackCB =
+ base::RepeatingCallback<scoped_refptr<IMFCaptureEngineOnSampleCallback>(
+ VideoCaptureDevice::TakePhotoCallback callback,
+ VideoCaptureFormat format)>;
+
+ bool get_use_photo_stream_to_take_photo_for_testing() {
+ return !photo_capabilities_.empty();
+ }
+
+ void set_create_mf_photo_callback_for_testing(CreateMFPhotoCallbackCB cb) {
+ create_mf_photo_callback_ = cb;
+ }
+
+ void set_max_retry_count_for_testing(int max_retry_count) {
+ max_retry_count_ = max_retry_count;
+ }
+
+ void set_retry_delay_in_ms_for_testing(int retry_delay_in_ms) {
+ retry_delay_in_ms_ = retry_delay_in_ms;
+ }
private:
+ HRESULT ExecuteHresultCallbackWithRetries(
+ base::RepeatingCallback<HRESULT()> callback,
+ MediaFoundationFunctionRequiringRetry which_function);
+ HRESULT GetDeviceStreamCount(IMFCaptureSource* source, DWORD* count);
+ HRESULT GetDeviceStreamCategory(
+ IMFCaptureSource* source,
+ DWORD stream_index,
+ MF_CAPTURE_ENGINE_STREAM_CATEGORY* stream_category);
+ HRESULT GetAvailableDeviceMediaType(IMFCaptureSource* source,
+ DWORD stream_index,
+ DWORD media_type_index,
+ IMFMediaType** type);
+
+ HRESULT FillCapabilities(IMFCaptureSource* source,
+ bool photo,
+ CapabilityList* capabilities);
void OnError(const base::Location& from_here, HRESULT hr);
+ void OnError(const base::Location& from_here, const char* message);
+
+ CreateMFPhotoCallbackCB create_mf_photo_callback_;
+ scoped_refptr<MFVideoCallback> video_callback_;
+ bool is_initialized_;
+ int max_retry_count_;
+ int retry_delay_in_ms_;
- VideoCaptureDeviceDescriptor descriptor_;
- Microsoft::WRL::ComPtr<IMFActivate> device_;
- scoped_refptr<MFReaderCallback> callback_;
+ // Guards the below variables from concurrent access between methods running
+ // on |sequence_checker_| and calls to OnIncomingCapturedData() and OnEvent()
+ // made by MediaFoundation on threads outside of our control.
+ base::Lock lock_;
- base::Lock lock_; // Used to guard the below variables.
std::unique_ptr<VideoCaptureDevice::Client> client_;
- Microsoft::WRL::ComPtr<IMFSourceReader> reader_;
- VideoCaptureFormat capture_format_;
- bool capture_;
+ const Microsoft::WRL::ComPtr<IMFMediaSource> source_;
+ Microsoft::WRL::ComPtr<IMFCaptureEngine> engine_;
+ std::unique_ptr<CapabilityWin> selected_video_capability_;
+ CapabilityList photo_capabilities_;
+ std::unique_ptr<CapabilityWin> selected_photo_capability_;
+ bool is_started_;
+ base::queue<TakePhotoCallback> video_stream_take_photo_callbacks_;
SEQUENCE_CHECKER(sequence_checker_);
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
new file mode 100644
index 00000000000..8b6b1e03595
--- /dev/null
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
@@ -0,0 +1,1188 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <mfapi.h>
+#include <mferror.h>
+#include <stddef.h>
+#include <wincodec.h>
+
+#include "media/capture/video/win/sink_filter_win.h"
+#include "media/capture/video/win/video_capture_device_factory_win.h"
+#include "media/capture/video/win/video_capture_device_mf_win.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::Invoke;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Return;
+using ::testing::AtLeast;
+using ::testing::Mock;
+using Microsoft::WRL::ComPtr;
+
+namespace media {
+
+namespace {
+class MockClient : public VideoCaptureDevice::Client {
+ public:
+ void OnIncomingCapturedData(const uint8_t* data,
+ int length,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) override {}
+
+ MOCK_METHOD4(
+ ReserveOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+
+ void OnIncomingCapturedBuffer(Buffer buffer,
+ const VideoCaptureFormat& format,
+ base::TimeTicks reference_,
+ base::TimeDelta timestamp) override {}
+
+ void OnIncomingCapturedBufferExt(
+ Buffer buffer,
+ const VideoCaptureFormat& format,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ gfx::Rect visible_rect,
+ const VideoFrameMetadata& additional_metadata) override {}
+
+ MOCK_METHOD4(
+ ResurrectLastOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+
+ MOCK_METHOD2(OnError, void(const base::Location&, const std::string&));
+
+ double GetBufferPoolUtilization() const override { return 0.0; }
+
+ MOCK_METHOD0(OnStarted, void());
+};
+
+class MockImageCaptureClient
+ : public base::RefCountedThreadSafe<MockImageCaptureClient> {
+ public:
+ // GMock doesn't support move-only arguments, so we use this forward method.
+ void DoOnGetPhotoState(mojom::PhotoStatePtr received_state) {
+ state = std::move(received_state);
+ }
+
+ MOCK_METHOD1(OnCorrectSetPhotoOptions, void(bool));
+
+ // GMock doesn't support move-only arguments, so we use this forward method.
+ void DoOnPhotoTaken(mojom::BlobPtr blob) {
+ EXPECT_TRUE(blob);
+ OnCorrectPhotoTaken();
+ }
+ MOCK_METHOD0(OnCorrectPhotoTaken, void(void));
+
+ mojom::PhotoStatePtr state;
+
+ private:
+ friend class base::RefCountedThreadSafe<MockImageCaptureClient>;
+ virtual ~MockImageCaptureClient() = default;
+};
+
+class MockMFMediaSource : public base::RefCountedThreadSafe<MockMFMediaSource>,
+ public IMFMediaSource {
+ public:
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppvObject) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD_(ULONG, AddRef)() override {
+ base::RefCountedThreadSafe<MockMFMediaSource>::AddRef();
+ return 1U;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ base::RefCountedThreadSafe<MockMFMediaSource>::Release();
+ return 1U;
+ }
+ STDMETHOD(GetEvent)(DWORD dwFlags, IMFMediaEvent** ppEvent) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(BeginGetEvent)
+ (IMFAsyncCallback* pCallback, IUnknown* punkState) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(EndGetEvent)
+ (IMFAsyncResult* pResult, IMFMediaEvent** ppEvent) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(QueueEvent)
+ (MediaEventType met,
+ REFGUID guidExtendedType,
+ HRESULT hrStatus,
+ const PROPVARIANT* pvValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetCharacteristics)(DWORD* pdwCharacteristics) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(CreatePresentationDescriptor)
+ (IMFPresentationDescriptor** ppPresentationDescriptor) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(Start)
+ (IMFPresentationDescriptor* pPresentationDescriptor,
+ const GUID* pguidTimeFormat,
+ const PROPVARIANT* pvarStartPosition) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(Stop)(void) override { return E_NOTIMPL; }
+ STDMETHOD(Pause)(void) override { return E_NOTIMPL; }
+ STDMETHOD(Shutdown)(void) override { return E_NOTIMPL; }
+
+ private:
+ friend class base::RefCountedThreadSafe<MockMFMediaSource>;
+ virtual ~MockMFMediaSource() = default;
+};
+
+class MockMFCaptureSource
+ : public base::RefCountedThreadSafe<MockMFCaptureSource>,
+ public IMFCaptureSource {
+ public:
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppvObject) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD_(ULONG, AddRef)() override {
+ base::RefCountedThreadSafe<MockMFCaptureSource>::AddRef();
+ return 1U;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ base::RefCountedThreadSafe<MockMFCaptureSource>::Release();
+ return 1U;
+ }
+ STDMETHOD(GetCaptureDeviceSource)
+ (MF_CAPTURE_ENGINE_DEVICE_TYPE mfCaptureEngineDeviceType,
+ IMFMediaSource** ppMediaSource) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetCaptureDeviceActivate)
+ (MF_CAPTURE_ENGINE_DEVICE_TYPE mfCaptureEngineDeviceType,
+ IMFActivate** ppActivate) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetService)
+ (REFIID rguidService, REFIID riid, IUnknown** ppUnknown) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(AddEffect)(DWORD dwSourceStreamIndex, IUnknown* pUnknown) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(RemoveEffect)
+ (DWORD dwSourceStreamIndex, IUnknown* pUnknown) override { return E_NOTIMPL; }
+ STDMETHOD(RemoveAllEffects)(DWORD dwSourceStreamIndex) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetAvailableDeviceMediaType)
+ (DWORD stream_index,
+ DWORD media_type_index,
+ IMFMediaType** media_type) override {
+ return DoGetAvailableDeviceMediaType(stream_index, media_type_index,
+ media_type);
+ }
+
+ MOCK_METHOD3(DoGetAvailableDeviceMediaType,
+ HRESULT(DWORD, DWORD, IMFMediaType**));
+
+ STDMETHOD(SetCurrentDeviceMediaType)
+ (DWORD dwSourceStreamIndex, IMFMediaType* pMediaType) override {
+ return DoSetCurrentDeviceMediaType(dwSourceStreamIndex, pMediaType);
+ }
+
+ MOCK_METHOD2(DoSetCurrentDeviceMediaType, HRESULT(DWORD, IMFMediaType*));
+
+ STDMETHOD(GetCurrentDeviceMediaType)
+ (DWORD stream_index, IMFMediaType** media_type) {
+ return DoGetCurrentDeviceMediaType(stream_index, media_type);
+ }
+ MOCK_METHOD2(DoGetCurrentDeviceMediaType, HRESULT(DWORD, IMFMediaType**));
+
+ STDMETHOD(GetDeviceStreamCount)(DWORD* count) {
+ return DoGetDeviceStreamCount(count);
+ }
+ MOCK_METHOD1(DoGetDeviceStreamCount, HRESULT(DWORD*));
+
+ STDMETHOD(GetDeviceStreamCategory)
+ (DWORD stream_index, MF_CAPTURE_ENGINE_STREAM_CATEGORY* category) {
+ return DoGetDeviceStreamCategory(stream_index, category);
+ }
+ MOCK_METHOD2(DoGetDeviceStreamCategory,
+ HRESULT(DWORD, MF_CAPTURE_ENGINE_STREAM_CATEGORY*));
+
+ STDMETHOD(GetMirrorState)(DWORD dwStreamIndex, BOOL* pfMirrorState) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetMirrorState)(DWORD dwStreamIndex, BOOL fMirrorState) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetStreamIndexFromFriendlyName)
+ (UINT32 uifriendlyName, DWORD* pdwActualStreamIndex) override {
+ return E_NOTIMPL;
+ }
+
+ private:
+ friend class base::RefCountedThreadSafe<MockMFCaptureSource>;
+ virtual ~MockMFCaptureSource() = default;
+};
+
+class MockCapturePreviewSink
+ : public base::RefCountedThreadSafe<MockCapturePreviewSink>,
+ public IMFCapturePreviewSink {
+ public:
+ STDMETHOD(QueryInterface)(REFIID riid, void** object) override {
+ if (riid == IID_IUnknown || riid == IID_IMFCapturePreviewSink) {
+ AddRef();
+ *object = this;
+ return S_OK;
+ }
+ return E_NOINTERFACE;
+ }
+ STDMETHOD_(ULONG, AddRef)() override {
+ base::RefCountedThreadSafe<MockCapturePreviewSink>::AddRef();
+ return 1U;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ base::RefCountedThreadSafe<MockCapturePreviewSink>::Release();
+ return 1U;
+ }
+ STDMETHOD(GetOutputMediaType)
+ (DWORD dwSinkStreamIndex, IMFMediaType** ppMediaType) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetService)
+ (DWORD dwSinkStreamIndex,
+ REFGUID rguidService,
+ REFIID riid,
+ IUnknown** ppUnknown) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(AddStream)
+ (DWORD stream_index,
+ IMFMediaType* media_type,
+ IMFAttributes* attributes,
+ DWORD* sink_stream_index) override {
+ return DoAddStream(stream_index, media_type, attributes, sink_stream_index);
+ }
+
+ MOCK_METHOD4(DoAddStream,
+ HRESULT(DWORD, IMFMediaType*, IMFAttributes*, DWORD*));
+
+ STDMETHOD(Prepare)(void) override { return E_NOTIMPL; }
+ STDMETHOD(RemoveAllStreams)(void) override { return S_OK; }
+ STDMETHOD(SetRenderHandle)(HANDLE handle) override { return E_NOTIMPL; }
+ STDMETHOD(SetRenderSurface)(IUnknown* pSurface) override { return E_NOTIMPL; }
+ STDMETHOD(UpdateVideo)
+ (const MFVideoNormalizedRect* pSrc,
+ const RECT* pDst,
+ const COLORREF* pBorderClr) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetSampleCallback)
+ (DWORD dwStreamSinkIndex,
+ IMFCaptureEngineOnSampleCallback* pCallback) override {
+ sample_callback = pCallback;
+ return S_OK;
+ }
+ STDMETHOD(GetMirrorState)(BOOL* pfMirrorState) override { return E_NOTIMPL; }
+ STDMETHOD(SetMirrorState)(BOOL fMirrorState) override { return E_NOTIMPL; }
+ STDMETHOD(GetRotation)
+ (DWORD dwStreamIndex, DWORD* pdwRotationValue) override { return E_NOTIMPL; }
+ STDMETHOD(SetRotation)(DWORD dwStreamIndex, DWORD dwRotationValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetCustomSink)(IMFMediaSink* pMediaSink) override {
+ return E_NOTIMPL;
+ }
+
+ scoped_refptr<IMFCaptureEngineOnSampleCallback> sample_callback;
+
+ private:
+ friend class base::RefCountedThreadSafe<MockCapturePreviewSink>;
+ virtual ~MockCapturePreviewSink() = default;
+};
+
+class MockCapturePhotoSink
+ : public base::RefCountedThreadSafe<MockCapturePhotoSink>,
+ public IMFCapturePhotoSink {
+ public:
+ STDMETHOD(QueryInterface)(REFIID riid, void** object) override {
+ if (riid == IID_IUnknown || riid == IID_IMFCapturePhotoSink) {
+ AddRef();
+ *object = this;
+ return S_OK;
+ }
+ return E_NOINTERFACE;
+ }
+ STDMETHOD_(ULONG, AddRef)() override {
+ base::RefCountedThreadSafe<MockCapturePhotoSink>::AddRef();
+ return 1U;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ base::RefCountedThreadSafe<MockCapturePhotoSink>::Release();
+ return 1U;
+ }
+ STDMETHOD(GetOutputMediaType)
+ (DWORD dwSinkStreamIndex, IMFMediaType** ppMediaType) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetService)
+ (DWORD dwSinkStreamIndex,
+ REFGUID rguidService,
+ REFIID riid,
+ IUnknown** ppUnknown) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(AddStream)
+ (DWORD dwSourceStreamIndex,
+ IMFMediaType* pMediaType,
+ IMFAttributes* pAttributes,
+ DWORD* pdwSinkStreamIndex) override {
+ return S_OK;
+ }
+ STDMETHOD(Prepare)(void) override { return E_NOTIMPL; }
+ STDMETHOD(RemoveAllStreams)(void) override { return S_OK; }
+
+ STDMETHOD(SetOutputFileName)(LPCWSTR fileName) override { return E_NOTIMPL; }
+ STDMETHOD(SetSampleCallback)
+ (IMFCaptureEngineOnSampleCallback* pCallback) override {
+ sample_callback = pCallback;
+ return S_OK;
+ }
+ STDMETHOD(SetOutputByteStream)(IMFByteStream* pByteStream) override {
+ return E_NOTIMPL;
+ }
+
+ scoped_refptr<IMFCaptureEngineOnSampleCallback> sample_callback;
+
+ private:
+ friend class base::RefCountedThreadSafe<MockCapturePhotoSink>;
+ virtual ~MockCapturePhotoSink() = default;
+};
+
+class MockMFCaptureEngine
+ : public base::RefCountedThreadSafe<MockMFCaptureEngine>,
+ public IMFCaptureEngine {
+ public:
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppvObject) { return S_OK; }
+ STDMETHOD_(ULONG, AddRef)() override {
+ base::RefCountedThreadSafe<MockMFCaptureEngine>::AddRef();
+ return 1U;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ base::RefCountedThreadSafe<MockMFCaptureEngine>::Release();
+ return 1U;
+ }
+ STDMETHOD(Initialize)
+ (IMFCaptureEngineOnEventCallback* pEventCallback,
+ IMFAttributes* pAttributes,
+ IUnknown* pAudioSource,
+ IUnknown* pVideoSource) override {
+ EXPECT_TRUE(pEventCallback);
+ EXPECT_TRUE(pAttributes);
+ EXPECT_TRUE(pVideoSource);
+ event_callback = pEventCallback;
+ OnCorrectInitialize();
+ return S_OK;
+ }
+
+ MOCK_METHOD0(OnCorrectInitialize, void(void));
+
+ STDMETHOD(StartPreview)(void) override {
+ OnStartPreview();
+ return S_OK;
+ }
+
+ MOCK_METHOD0(OnStartPreview, void(void));
+
+ STDMETHOD(StopPreview)(void) override {
+ OnStopPreview();
+ return S_OK;
+ }
+
+ MOCK_METHOD0(OnStopPreview, void(void));
+
+ STDMETHOD(StartRecord)(void) override { return E_NOTIMPL; }
+ STDMETHOD(StopRecord)(BOOL bFinalize, BOOL bFlushUnprocessedSamples) {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(TakePhoto)(void) override {
+ OnTakePhoto();
+ return S_OK;
+ }
+ MOCK_METHOD0(OnTakePhoto, void(void));
+
+ STDMETHOD(GetSink)(MF_CAPTURE_ENGINE_SINK_TYPE type, IMFCaptureSink** sink) {
+ return DoGetSink(type, sink);
+ }
+ MOCK_METHOD2(DoGetSink,
+ HRESULT(MF_CAPTURE_ENGINE_SINK_TYPE, IMFCaptureSink**));
+
+ STDMETHOD(GetSource)(IMFCaptureSource** source) {
+ *source = DoGetSource();
+ return source ? S_OK : E_FAIL;
+ }
+ MOCK_METHOD0(DoGetSource, IMFCaptureSource*());
+
+ scoped_refptr<IMFCaptureEngineOnEventCallback> event_callback;
+
+ private:
+ friend class base::RefCountedThreadSafe<MockMFCaptureEngine>;
+ virtual ~MockMFCaptureEngine() = default;
+};
+
+class StubMFMediaType : public base::RefCountedThreadSafe<StubMFMediaType>,
+ public IMFMediaType {
+ public:
+ StubMFMediaType(GUID major_type,
+ GUID sub_type,
+ int frame_width,
+ int frame_height,
+ int frame_rate)
+ : major_type_(major_type),
+ sub_type_(sub_type),
+ frame_width_(frame_width),
+ frame_height_(frame_height),
+ frame_rate_(frame_rate) {}
+
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppvObject) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD_(ULONG, AddRef)() override {
+ base::RefCountedThreadSafe<StubMFMediaType>::AddRef();
+ return 1U;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ base::RefCountedThreadSafe<StubMFMediaType>::Release();
+ return 1U;
+ }
+ STDMETHOD(GetItem)(REFGUID key, PROPVARIANT* value) override {
+ if (key == MF_MT_FRAME_SIZE) {
+ value->vt = VT_UI8;
+ value->uhVal.QuadPart = Pack2UINT32AsUINT64(frame_width_, frame_height_);
+ return S_OK;
+ }
+ if (key == MF_MT_FRAME_RATE) {
+ value->vt = VT_UI8;
+ value->uhVal.QuadPart = Pack2UINT32AsUINT64(frame_rate_, 1);
+ return S_OK;
+ }
+ if (key == MF_MT_PIXEL_ASPECT_RATIO) {
+ value->vt = VT_UI8;
+ value->uhVal.QuadPart = Pack2UINT32AsUINT64(1, 1);
+ return S_OK;
+ }
+ if (key == MF_MT_INTERLACE_MODE) {
+ value->vt = VT_UI4;
+ value->uintVal = MFVideoInterlace_Progressive;
+ return S_OK;
+ }
+ return E_FAIL;
+ }
+ STDMETHOD(GetItemType)(REFGUID guidKey, MF_ATTRIBUTE_TYPE* pType) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(CompareItem)
+ (REFGUID guidKey, REFPROPVARIANT Value, BOOL* pbResult) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(Compare)
+ (IMFAttributes* pTheirs,
+ MF_ATTRIBUTES_MATCH_TYPE MatchType,
+ BOOL* pbResult) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetUINT32)(REFGUID key, UINT32* value) override {
+ if (key == MF_MT_INTERLACE_MODE) {
+ *value = MFVideoInterlace_Progressive;
+ return S_OK;
+ }
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetUINT64)(REFGUID key, UINT64* value) override {
+ if (key == MF_MT_FRAME_SIZE) {
+ *value = (long long)frame_width_ << 32 | frame_height_;
+ return S_OK;
+ }
+ if (key == MF_MT_FRAME_RATE) {
+ *value = (long long)frame_rate_ << 32 | 1;
+ return S_OK;
+ }
+ if (key == MF_MT_PIXEL_ASPECT_RATIO) {
+ *value = (long long)1 << 32 | 1;
+ return S_OK;
+ }
+ return E_FAIL;
+ }
+ STDMETHOD(GetDouble)(REFGUID guidKey, double* pfValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetGUID)(REFGUID key, GUID* value) override {
+ if (key == MF_MT_MAJOR_TYPE) {
+ *value = major_type_;
+ return S_OK;
+ }
+ if (key == MF_MT_SUBTYPE) {
+ *value = sub_type_;
+ return S_OK;
+ }
+ return E_FAIL;
+ }
+ STDMETHOD(GetStringLength)(REFGUID guidKey, UINT32* pcchLength) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetString)
+ (REFGUID guidKey,
+ LPWSTR pwszValue,
+ UINT32 cchBufSize,
+ UINT32* pcchLength) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetAllocatedString)
+ (REFGUID guidKey, LPWSTR* ppwszValue, UINT32* pcchLength) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetBlobSize)(REFGUID guidKey, UINT32* pcbBlobSize) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetBlob)
+ (REFGUID guidKey,
+ UINT8* pBuf,
+ UINT32 cbBufSize,
+ UINT32* pcbBlobSize) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetAllocatedBlob)
+ (REFGUID guidKey, UINT8** ppBuf, UINT32* pcbSize) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetUnknown)(REFGUID guidKey, REFIID riid, LPVOID* ppv) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetItem)(REFGUID guidKey, REFPROPVARIANT Value) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(DeleteItem)(REFGUID guidKey) override { return E_NOTIMPL; }
+ STDMETHOD(DeleteAllItems)(void) override { return E_NOTIMPL; }
+ STDMETHOD(SetUINT32)(REFGUID guidKey, UINT32 unValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetUINT64)(REFGUID guidKey, UINT64 unValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetDouble)(REFGUID guidKey, double fValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetGUID)(REFGUID guidKey, REFGUID guidValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetString)(REFGUID guidKey, LPCWSTR wszValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetBlob)
+ (REFGUID guidKey, const UINT8* pBuf, UINT32 cbBufSize) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(SetUnknown)(REFGUID guidKey, IUnknown* pUnknown) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(LockStore)(void) override { return E_NOTIMPL; }
+ STDMETHOD(UnlockStore)(void) override { return E_NOTIMPL; }
+ STDMETHOD(GetCount)(UINT32* pcItems) override { return E_NOTIMPL; }
+ STDMETHOD(GetItemByIndex)
+ (UINT32 unIndex, GUID* pguidKey, PROPVARIANT* pValue) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(CopyAllItems)(IMFAttributes* pDest) override { return E_NOTIMPL; }
+ STDMETHOD(GetMajorType)(GUID* pguidMajorType) override { return E_NOTIMPL; }
+ STDMETHOD(IsCompressedFormat)(BOOL* pfCompressed) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(IsEqual)(IMFMediaType* pIMediaType, DWORD* pdwFlags) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(GetRepresentation)
+ (GUID guidRepresentation, LPVOID* ppvRepresentation) override {
+ return E_NOTIMPL;
+ }
+ STDMETHOD(FreeRepresentation)
+ (GUID guidRepresentation, LPVOID pvRepresentation) override {
+ return E_NOTIMPL;
+ }
+
+ private:
+ friend class base::RefCountedThreadSafe<StubMFMediaType>;
+ virtual ~StubMFMediaType() = default;
+
+ const GUID major_type_;
+ const GUID sub_type_;
+ const int frame_width_;
+ const int frame_height_;
+ const int frame_rate_;
+};
+
+class MockMFMediaEvent : public base::RefCountedThreadSafe<MockMFMediaEvent>,
+ public IMFMediaEvent {
+ public:
+ STDMETHOD(QueryInterface)(REFIID riid, void** ppvObject) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD_(ULONG, AddRef)() override {
+ base::RefCountedThreadSafe<MockMFMediaEvent>::AddRef();
+ return 1U;
+ }
+
+ STDMETHOD_(ULONG, Release)() override {
+ base::RefCountedThreadSafe<MockMFMediaEvent>::Release();
+ return 1U;
+ }
+
+ STDMETHOD(GetItem)(REFGUID guidKey, PROPVARIANT* pValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetItemType)(REFGUID guidKey, MF_ATTRIBUTE_TYPE* pType) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(CompareItem)
+ (REFGUID guidKey, REFPROPVARIANT Value, BOOL* pbResult) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(Compare)
+ (IMFAttributes* pTheirs,
+ MF_ATTRIBUTES_MATCH_TYPE MatchType,
+ BOOL* pbResult) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetUINT32)(REFGUID guidKey, UINT32* punValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetUINT64)(REFGUID guidKey, UINT64* punValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetDouble)(REFGUID guidKey, double* pfValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetGUID)(REFGUID guidKey, GUID* pguidValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetStringLength)(REFGUID guidKey, UINT32* pcchLength) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetString)
+ (REFGUID guidKey,
+ LPWSTR pwszValue,
+ UINT32 cchBufSize,
+ UINT32* pcchLength) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetAllocatedString)
+ (REFGUID guidKey, LPWSTR* ppwszValue, UINT32* pcchLength) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetBlobSize)(REFGUID guidKey, UINT32* pcbBlobSize) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetBlob)
+ (REFGUID guidKey,
+ UINT8* pBuf,
+ UINT32 cbBufSize,
+ UINT32* pcbBlobSize) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetAllocatedBlob)
+ (REFGUID guidKey, UINT8** ppBuf, UINT32* pcbSize) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetUnknown)(REFGUID guidKey, REFIID riid, LPVOID* ppv) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(SetItem)(REFGUID guidKey, REFPROPVARIANT Value) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(DeleteItem)(REFGUID guidKey) override { return E_NOTIMPL; }
+
+ STDMETHOD(DeleteAllItems)(void) override { return E_NOTIMPL; }
+
+ STDMETHOD(SetUINT32)(REFGUID guidKey, UINT32 unValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(SetUINT64)(REFGUID guidKey, UINT64 unValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(SetDouble)(REFGUID guidKey, double fValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(SetGUID)(REFGUID guidKey, REFGUID guidValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(SetString)(REFGUID guidKey, LPCWSTR wszValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(SetBlob)
+ (REFGUID guidKey, const UINT8* pBuf, UINT32 cbBufSize) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(SetUnknown)(REFGUID guidKey, IUnknown* pUnknown) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(LockStore)(void) override { return E_NOTIMPL; }
+
+ STDMETHOD(UnlockStore)(void) override { return E_NOTIMPL; }
+
+ STDMETHOD(GetCount)(UINT32* pcItems) override { return E_NOTIMPL; }
+
+ STDMETHOD(GetItemByIndex)
+ (UINT32 unIndex, GUID* pguidKey, PROPVARIANT* pValue) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(CopyAllItems)(IMFAttributes* pDest) override { return E_NOTIMPL; }
+
+ STDMETHOD(GetType)(MediaEventType* pmet) override { return E_NOTIMPL; }
+
+ STDMETHOD(GetExtendedType)(GUID* pguidExtendedType) override {
+ return E_NOTIMPL;
+ }
+
+ STDMETHOD(GetStatus)(HRESULT* status) override {
+ *status = DoGetStatus();
+ return S_OK;
+ }
+ MOCK_METHOD0(DoGetStatus, HRESULT());
+
+ STDMETHOD(GetValue)(PROPVARIANT* pvValue) override { return E_NOTIMPL; }
+
+ private:
+ friend class base::RefCountedThreadSafe<MockMFMediaEvent>;
+ virtual ~MockMFMediaEvent() = default;
+};
+
+} // namespace
+
+const int kArbitraryValidVideoWidth = 1920;
+const int kArbitraryValidVideoHeight = 1080;
+
+const int kArbitraryValidPhotoWidth = 3264;
+const int kArbitraryValidPhotoHeight = 2448;
+
+class VideoCaptureDeviceMFWinTest : public ::testing::Test {
+ protected:
+ VideoCaptureDeviceMFWinTest()
+ : media_source_(new MockMFMediaSource()),
+ engine_(new MockMFCaptureEngine()),
+ client_(new MockClient()),
+ image_capture_client_(new MockImageCaptureClient()),
+ device_(new VideoCaptureDeviceMFWin(media_source_, engine_)),
+ capture_source_(new MockMFCaptureSource()),
+ capture_preview_sink_(new MockCapturePreviewSink()),
+ media_foundation_supported_(
+ VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation()) {}
+
+ void SetUp() override {
+ if (!media_foundation_supported_)
+ return;
+ device_->set_max_retry_count_for_testing(3);
+ device_->set_retry_delay_in_ms_for_testing(1);
+
+ EXPECT_CALL(*(engine_.Get()), OnCorrectInitialize());
+ EXPECT_TRUE(device_->Init());
+ EXPECT_CALL(*(engine_.Get()), DoGetSource())
+ .WillRepeatedly(Invoke([this]() {
+ this->capture_source_->AddRef();
+ return this->capture_source_.get();
+ }));
+ }
+
+ bool ShouldSkipTest() {
+ if (media_foundation_supported_)
+ return false;
+ DVLOG(1) << "Media foundation is not supported by the current platform. "
+ "Skipping test.";
+ return true;
+ }
+
+ void PrepareMFDeviceWithOneVideoStream(GUID mf_video_subtype) {
+ EXPECT_CALL(*capture_source_, DoGetDeviceStreamCount(_))
+ .WillRepeatedly(Invoke([](DWORD* stream_count) {
+ *stream_count = 1;
+ return S_OK;
+ }));
+ EXPECT_CALL(*capture_source_, DoGetDeviceStreamCategory(0, _))
+ .WillRepeatedly(Invoke([](DWORD stream_index,
+ MF_CAPTURE_ENGINE_STREAM_CATEGORY* category) {
+ *category = MF_CAPTURE_ENGINE_STREAM_CATEGORY_VIDEO_PREVIEW;
+ return S_OK;
+ }));
+
+ EXPECT_CALL(*capture_source_, DoGetAvailableDeviceMediaType(0, _, _))
+ .WillRepeatedly(Invoke([mf_video_subtype](DWORD stream_index,
+ DWORD media_type_index,
+ IMFMediaType** media_type) {
+ if (media_type_index != 0)
+ return MF_E_NO_MORE_TYPES;
+
+ *media_type = new StubMFMediaType(MFMediaType_Video, mf_video_subtype,
+ kArbitraryValidVideoWidth,
+ kArbitraryValidVideoHeight, 30);
+ (*media_type)->AddRef();
+
+ return S_OK;
+ }));
+
+ EXPECT_CALL(*(engine_.Get()),
+ DoGetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW, _))
+ .WillRepeatedly(Invoke([this](MF_CAPTURE_ENGINE_SINK_TYPE sink_type,
+ IMFCaptureSink** sink) {
+ *sink = this->capture_preview_sink_.get();
+ this->capture_preview_sink_->AddRef();
+ return S_OK;
+ }));
+
+ EXPECT_CALL(*capture_source_, DoGetCurrentDeviceMediaType(_, _))
+ .WillRepeatedly(Invoke([mf_video_subtype](DWORD stream_index,
+ IMFMediaType** media_type) {
+ *media_type = new StubMFMediaType(MFMediaType_Video, mf_video_subtype,
+ kArbitraryValidVideoWidth,
+ kArbitraryValidVideoHeight, 30);
+ (*media_type)->AddRef();
+ return S_OK;
+ }));
+ }
+
+ void PrepareMFDeviceWithOneVideoStreamAndOnePhotoStream(
+ GUID mf_video_subtype) {
+ EXPECT_CALL(*capture_source_, DoGetDeviceStreamCount(_))
+ .WillRepeatedly(Invoke([](DWORD* stream_count) {
+ *stream_count = 2;
+ return S_OK;
+ }));
+ EXPECT_CALL(*capture_source_, DoGetDeviceStreamCategory(_, _))
+ .WillRepeatedly(Invoke([](DWORD stream_index,
+ MF_CAPTURE_ENGINE_STREAM_CATEGORY* category) {
+ if (stream_index == 0) {
+ *category = MF_CAPTURE_ENGINE_STREAM_CATEGORY_VIDEO_PREVIEW;
+ return S_OK;
+ } else if (stream_index == 1) {
+ *category = MF_CAPTURE_ENGINE_STREAM_CATEGORY_PHOTO_INDEPENDENT;
+ return S_OK;
+ }
+ return E_FAIL;
+ }));
+
+ auto get_device_media_type = [mf_video_subtype](DWORD stream_index,
+ IMFMediaType** media_type) {
+ if (stream_index == 0) {
+ *media_type = new StubMFMediaType(MFMediaType_Video, mf_video_subtype,
+ kArbitraryValidVideoWidth,
+ kArbitraryValidVideoHeight, 30);
+ (*media_type)->AddRef();
+ return S_OK;
+ } else if (stream_index == 1) {
+ *media_type = new StubMFMediaType(
+ MFMediaType_Image, GUID_ContainerFormatJpeg,
+ kArbitraryValidPhotoWidth, kArbitraryValidPhotoHeight, 0);
+ (*media_type)->AddRef();
+ return S_OK;
+ }
+ return E_FAIL;
+ };
+
+ EXPECT_CALL(*capture_source_, DoGetAvailableDeviceMediaType(_, _, _))
+ .WillRepeatedly(Invoke(
+ [get_device_media_type](DWORD stream_index, DWORD media_type_index,
+ IMFMediaType** media_type) {
+ if (media_type_index != 0)
+ return MF_E_NO_MORE_TYPES;
+ return get_device_media_type(stream_index, media_type);
+ }));
+
+ EXPECT_CALL(*(engine_.Get()), DoGetSink(_, _))
+ .WillRepeatedly(Invoke([this](MF_CAPTURE_ENGINE_SINK_TYPE sink_type,
+ IMFCaptureSink** sink) {
+ if (sink_type == MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW) {
+ *sink = this->capture_preview_sink_.get();
+ this->capture_preview_sink_->AddRef();
+ return S_OK;
+ } else if (sink_type == MF_CAPTURE_ENGINE_SINK_TYPE_PHOTO) {
+ *sink = new MockCapturePhotoSink();
+ (*sink)->AddRef();
+ return S_OK;
+ }
+ return E_FAIL;
+ }));
+
+ EXPECT_CALL(*capture_source_, DoGetCurrentDeviceMediaType(_, _))
+ .WillRepeatedly(Invoke(get_device_media_type));
+ }
+
+ Microsoft::WRL::ComPtr<MockMFMediaSource> media_source_;
+ Microsoft::WRL::ComPtr<MockMFCaptureEngine> engine_;
+ std::unique_ptr<MockClient> client_;
+ scoped_refptr<MockImageCaptureClient> image_capture_client_;
+ std::unique_ptr<VideoCaptureDeviceMFWin> device_;
+ VideoCaptureFormat last_format_;
+
+ scoped_refptr<MockMFCaptureSource> capture_source_;
+ scoped_refptr<MockCapturePreviewSink> capture_preview_sink_;
+
+ private:
+ const bool media_foundation_supported_;
+};
+
+// Expects StartPreview() to be called on AllocateAndStart()
+TEST_F(VideoCaptureDeviceMFWinTest, StartPreviewOnAllocateAndStart) {
+ if (ShouldSkipTest())
+ return;
+
+ PrepareMFDeviceWithOneVideoStream(MFVideoFormat_MJPG);
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+ EXPECT_CALL(*(engine_.Get()), OnStopPreview());
+
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+
+ device_->StopAndDeAllocate();
+}
+
+// Expects OnError() to be called on an errored IMFMediaEvent
+TEST_F(VideoCaptureDeviceMFWinTest, CallClientOnErrorMediaEvent) {
+ if (ShouldSkipTest())
+ return;
+
+ PrepareMFDeviceWithOneVideoStream(MFVideoFormat_MJPG);
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+ EXPECT_CALL(*client_, OnError(_, _));
+ scoped_refptr<MockMFMediaEvent> media_event_error = new MockMFMediaEvent();
+ EXPECT_CALL(*media_event_error, DoGetStatus()).WillRepeatedly(Return(E_FAIL));
+
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+ engine_->event_callback->OnEvent(media_event_error.get());
+}
+
+// Allocates device with flaky methods failing with MF_E_INVALIDREQUEST and
+// expects the device to retry and start correctly
+TEST_F(VideoCaptureDeviceMFWinTest, AllocateAndStartWithFlakyInvalidRequest) {
+ if (ShouldSkipTest())
+ return;
+
+ EXPECT_CALL(*capture_source_, DoGetDeviceStreamCount(_))
+ .Times(AtLeast(2))
+ .WillOnce(Return(MF_E_INVALIDREQUEST))
+ .WillRepeatedly(Invoke([](DWORD* stream_count) {
+ *stream_count = 1;
+ return S_OK;
+ }));
+ EXPECT_CALL(*capture_source_, DoGetDeviceStreamCategory(0, _))
+ .Times(AtLeast(2))
+ .WillOnce(Return(MF_E_INVALIDREQUEST))
+ .WillRepeatedly(Invoke(
+ [](DWORD stream_index, MF_CAPTURE_ENGINE_STREAM_CATEGORY* category) {
+ *category = MF_CAPTURE_ENGINE_STREAM_CATEGORY_VIDEO_PREVIEW;
+ return S_OK;
+ }));
+
+ EXPECT_CALL(*capture_source_, DoGetAvailableDeviceMediaType(0, _, _))
+ .Times(AtLeast(2))
+ .WillOnce(Return(MF_E_INVALIDREQUEST))
+ .WillRepeatedly(Invoke([](DWORD stream_index, DWORD media_type_index,
+ IMFMediaType** media_type) {
+ if (media_type_index != 0)
+ return MF_E_NO_MORE_TYPES;
+
+ *media_type = new StubMFMediaType(MFMediaType_Video, MFVideoFormat_MJPG,
+ kArbitraryValidVideoWidth,
+ kArbitraryValidVideoHeight, 30);
+ (*media_type)->AddRef();
+
+ return S_OK;
+ }));
+
+ EXPECT_CALL(*(engine_.Get()),
+ DoGetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW, _))
+ .WillRepeatedly(Invoke(
+ [](MF_CAPTURE_ENGINE_SINK_TYPE sink_type, IMFCaptureSink** sink) {
+ *sink = new MockCapturePreviewSink();
+ (*sink)->AddRef();
+ return S_OK;
+ }));
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+}
+
+// Allocates device with methods always failing with MF_E_INVALIDREQUEST and
+// expects the device to give up and call OnError()
+TEST_F(VideoCaptureDeviceMFWinTest, AllocateAndStartWithFailingInvalidRequest) {
+ if (ShouldSkipTest())
+ return;
+
+ EXPECT_CALL(*capture_source_, DoGetDeviceStreamCount(_))
+ .WillRepeatedly(Return(MF_E_INVALIDREQUEST));
+
+ EXPECT_CALL(*client_, OnError(_, _));
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+}
+
+// Given an |IMFCaptureSource| offering a video stream without photo stream to
+// |VideoCaptureDevice|, when asking the photo state from |VideoCaptureDevice|
+// then expect the returned state to match the video resolution
+TEST_F(VideoCaptureDeviceMFWinTest, GetPhotoStateViaVideoStream) {
+ if (ShouldSkipTest())
+ return;
+
+ PrepareMFDeviceWithOneVideoStream(MFVideoFormat_MJPG);
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+
+ VideoCaptureDevice::GetPhotoStateCallback get_photo_state_callback =
+ base::BindOnce(&MockImageCaptureClient::DoOnGetPhotoState,
+ image_capture_client_);
+ device_->GetPhotoState(std::move(get_photo_state_callback));
+
+ mojom::PhotoState* state = image_capture_client_->state.get();
+ ASSERT_EQ(state->width->min, kArbitraryValidVideoWidth);
+ ASSERT_EQ(state->width->current, kArbitraryValidVideoWidth);
+ ASSERT_EQ(state->width->max, kArbitraryValidVideoWidth);
+
+ ASSERT_EQ(state->height->min, kArbitraryValidVideoHeight);
+ ASSERT_EQ(state->height->current, kArbitraryValidVideoHeight);
+ ASSERT_EQ(state->height->max, kArbitraryValidVideoHeight);
+}
+
+// Given an |IMFCaptureSource| offering a video stream and a photo stream to
+// |VideoCaptureDevice|, when asking the photo state from |VideoCaptureDevice|
+// then expect the returned state to match the available photo resolution
+TEST_F(VideoCaptureDeviceMFWinTest, GetPhotoStateViaPhotoStream) {
+ if (ShouldSkipTest())
+ return;
+
+ PrepareMFDeviceWithOneVideoStreamAndOnePhotoStream(MFVideoFormat_MJPG);
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+ VideoCaptureDevice::GetPhotoStateCallback get_photo_state_callback =
+ base::BindOnce(&MockImageCaptureClient::DoOnGetPhotoState,
+ image_capture_client_);
+ device_->GetPhotoState(std::move(get_photo_state_callback));
+
+ mojom::PhotoState* state = image_capture_client_->state.get();
+ ASSERT_EQ(state->width->min, kArbitraryValidPhotoWidth);
+ ASSERT_EQ(state->width->current, kArbitraryValidPhotoWidth);
+ ASSERT_EQ(state->width->max, kArbitraryValidPhotoWidth);
+
+ ASSERT_EQ(state->height->min, kArbitraryValidPhotoHeight);
+ ASSERT_EQ(state->height->current, kArbitraryValidPhotoHeight);
+ ASSERT_EQ(state->height->max, kArbitraryValidPhotoHeight);
+}
+
+// Given an |IMFCaptureSource| offering a video stream and a photo stream to
+// |VideoCaptureDevice|, when taking photo from |VideoCaptureDevice| then
+// expect IMFCaptureEngine::TakePhoto() to be called
+TEST_F(VideoCaptureDeviceMFWinTest, TakePhotoViaPhotoStream) {
+ if (ShouldSkipTest())
+ return;
+
+ PrepareMFDeviceWithOneVideoStreamAndOnePhotoStream(MFVideoFormat_MJPG);
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+
+ EXPECT_CALL(*(engine_.Get()), OnTakePhoto());
+
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+ VideoCaptureDevice::TakePhotoCallback take_photo_callback = base::BindOnce(
+ &MockImageCaptureClient::DoOnPhotoTaken, image_capture_client_);
+ device_->TakePhoto(std::move(take_photo_callback));
+}
+
+class DepthCameraDeviceMFWinTest : public VideoCaptureDeviceMFWinTest,
+ public testing::WithParamInterface<GUID> {};
+
+const GUID kDepthCameraOfferedVideoMediaSubtype[] = {
+ kMediaSubTypeY16, kMediaSubTypeZ16, kMediaSubTypeINVZ};
+
+INSTANTIATE_TEST_CASE_P(
+ DepthCameraDeviceMFWinTests,
+ DepthCameraDeviceMFWinTest,
+ testing::ValuesIn(kDepthCameraOfferedVideoMediaSubtype));
+
+// Given an |IMFCaptureSource| offering a video stream with subtype Y16, Z16 or
+// INVZ , when allocating and starting |VideoCaptureDevice| then expect the MF
+// source and the MF sink to be set to the same media subtype
+TEST_P(DepthCameraDeviceMFWinTest, AllocateAndStartDepthCamera) {
+ if (ShouldSkipTest())
+ return;
+
+ GUID offered_video_media_subtype = GetParam();
+ PrepareMFDeviceWithOneVideoStream(offered_video_media_subtype);
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+
+ EXPECT_CALL(*(capture_source_.get()), DoSetCurrentDeviceMediaType(0, _))
+ .WillOnce(Invoke([offered_video_media_subtype](DWORD stream_index,
+ IMFMediaType* media_type) {
+ GUID source_video_media_subtype;
+ media_type->GetGUID(MF_MT_SUBTYPE, &source_video_media_subtype);
+ EXPECT_EQ(source_video_media_subtype, offered_video_media_subtype);
+ return S_OK;
+ }));
+
+ EXPECT_CALL(*(capture_preview_sink_.get()), DoAddStream(0, _, _, _))
+ .WillOnce(Invoke([offered_video_media_subtype](DWORD stream_index,
+ IMFMediaType* media_type,
+ IMFAttributes* attributes,
+ DWORD* sink_stream_index) {
+ GUID sink_video_media_subtype;
+ media_type->GetGUID(MF_MT_SUBTYPE, &sink_video_media_subtype);
+ EXPECT_EQ(sink_video_media_subtype, offered_video_media_subtype);
+ return S_OK;
+ }));
+
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_device_win.cc b/chromium/media/capture/video/win/video_capture_device_win.cc
index 83b1b641ab4..b016957082f 100644
--- a/chromium/media/capture/video/win/video_capture_device_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_win.cc
@@ -18,11 +18,12 @@
#include "base/win/scoped_variant.h"
#include "media/base/timestamp_constants.h"
#include "media/capture/video/blob_utils.h"
+#include "media/capture/video/win/metrics.h"
#include "media/capture/video/win/video_capture_device_utils_win.h"
-using Microsoft::WRL::ComPtr;
using base::win::ScopedCoMem;
using base::win::ScopedVariant;
+using Microsoft::WRL::ComPtr;
namespace media {
@@ -407,6 +408,15 @@ VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
if (capture_graph_builder_.Get())
capture_graph_builder_.Reset();
+
+ if (!take_photo_callbacks_.empty()) {
+ for (size_t k = 0; k < take_photo_callbacks_.size(); k++) {
+ LogWindowsImageCaptureOutcome(
+ VideoCaptureWinBackend::kDirectShow,
+ ImageCaptureOutcome::kFailedUsingVideoStream,
+ IsHighResolution(capture_format_));
+ }
+ }
}
bool VideoCaptureDeviceWin::Init() {
@@ -526,7 +536,7 @@ void VideoCaptureDeviceWin::AllocateAndStart(
// Get the windows capability from the capture device.
// GetStreamCaps can return S_FALSE which we consider an error. Therefore the
// FAILED macro can't be used.
- hr = stream_config->GetStreamCaps(found_capability.stream_index,
+ hr = stream_config->GetStreamCaps(found_capability.media_type_index,
media_type.Receive(), caps.get());
if (hr != S_OK) {
SetErrorState(FROM_HERE, "Failed to get capture device capabilities", hr);
@@ -856,8 +866,18 @@ void VideoCaptureDeviceWin::FrameReceived(const uint8_t* buffer,
take_photo_callbacks_.pop();
mojom::BlobPtr blob = Blobify(buffer, length, format);
- if (blob)
+ if (blob) {
std::move(cb).Run(std::move(blob));
+ LogWindowsImageCaptureOutcome(
+ VideoCaptureWinBackend::kDirectShow,
+ ImageCaptureOutcome::kSucceededUsingVideoStream,
+ IsHighResolution(format));
+ } else {
+ LogWindowsImageCaptureOutcome(
+ VideoCaptureWinBackend::kDirectShow,
+ ImageCaptureOutcome::kFailedUsingVideoStream,
+ IsHighResolution(format));
+ }
}
}