summaryrefslogtreecommitdiff
path: root/chromium/media/capture/video
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2019-05-24 11:40:17 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2019-05-24 12:42:11 +0000
commit5d87695f37678f96492b258bbab36486c59866b4 (patch)
treebe9783bbaf04fb930c4d74ca9c00b5e7954c8bc6 /chromium/media/capture/video
parent6c11fb357ec39bf087b8b632e2b1e375aef1b38b (diff)
downloadqtwebengine-chromium-5d87695f37678f96492b258bbab36486c59866b4.tar.gz
BASELINE: Update Chromium to 75.0.3770.56
Change-Id: I86d2007fd27a45d5797eee06f4c9369b8b50ac4f Reviewed-by: Alexandru Croitor <alexandru.croitor@qt.io>
Diffstat (limited to 'chromium/media/capture/video')
-rw-r--r--chromium/media/capture/video/OWNERS6
-rw-r--r--chromium/media/capture/video/android/BUILD.gn2
-rw-r--r--chromium/media/capture/video/android/java/src/org/chromium/media/OWNERS6
-rw-r--r--chromium/media/capture/video/android/java/src/org/chromium/media/PhotoCapabilities.java628
-rw-r--r--chromium/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java2
-rw-r--r--chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java84
-rw-r--r--chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java132
-rw-r--r--chromium/media/capture/video/android/photo_capabilities.cc289
-rw-r--r--chromium/media/capture/video/android/photo_capabilities.h129
-rw-r--r--chromium/media/capture/video/android/video_capture_device_android.cc146
-rw-r--r--chromium/media/capture/video/chromeos/DEPS1
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc167
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.h17
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc2
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc38
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.h14
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc48
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc33
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h14
-rw-r--r--chromium/media/capture/video/chromeos/cros_image_capture_impl.cc12
-rw-r--r--chromium/media/capture/video/chromeos/cros_image_capture_impl.h8
-rw-r--r--chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc22
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.cc7
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.h6
-rw-r--r--chromium/media/capture/video/chromeos/mock_vendor_tag_ops.cc67
-rw-r--r--chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h57
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.cc1
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.h1
-rw-r--r--chromium/media/capture/video/chromeos/mojo/BUILD.gn1
-rw-r--r--chromium/media/capture/video/chromeos/mojo/camera_common.mojom36
-rw-r--r--chromium/media/capture/video/chromeos/mojo/cros_camera_service.mojom6
-rw-r--r--chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom19
-rw-r--r--chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.cc63
-rw-r--r--chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.h64
-rw-r--r--chromium/media/capture/video/chromeos/reprocess_manager.cc46
-rw-r--r--chromium/media/capture/video/chromeos/reprocess_manager.h41
-rw-r--r--chromium/media/capture/video/chromeos/request_builder.cc32
-rw-r--r--chromium/media/capture/video/chromeos/request_builder.h13
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.cc269
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.h116
-rw-r--r--chromium/media/capture/video/chromeos/request_manager_unittest.cc28
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.cc83
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.h17
-rw-r--r--chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc154
-rw-r--r--chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h68
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc1
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h2
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.h12
-rw-r--r--chromium/media/capture/video/fake_video_capture_device_factory.cc14
-rw-r--r--chromium/media/capture/video/fake_video_capture_device_unittest.cc27
-rw-r--r--chromium/media/capture/video/mac/DEPS1
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm66
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac.h3
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac.mm19
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc17
-rw-r--r--chromium/media/capture/video/mock_video_capture_device_client.cc5
-rw-r--r--chromium/media/capture/video/mock_video_capture_device_client.h4
-rw-r--r--chromium/media/capture/video/video_capture_device.h1
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc45
-rw-r--r--chromium/media/capture/video/video_capture_device_client.h1
-rw-r--r--chromium/media/capture/video/video_capture_device_descriptor.h16
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.h9
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc19
-rw-r--r--chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc14
-rw-r--r--chromium/media/capture/video/video_capture_jpeg_decoder_impl.h16
-rw-r--r--chromium/media/capture/video/video_frame_receiver.h13
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc5
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc1
68 files changed, 1941 insertions, 1365 deletions
diff --git a/chromium/media/capture/video/OWNERS b/chromium/media/capture/video/OWNERS
index ea5ac3b3660..0a79c53912d 100644
--- a/chromium/media/capture/video/OWNERS
+++ b/chromium/media/capture/video/OWNERS
@@ -1,7 +1,9 @@
emircan@chromium.org
chfremer@chromium.org
-mcasas@chromium.org
tommi@chromium.org
-# TEAM: media-capture-and-streams@grotations.appspotmail.com
+# Original (legacy) owner.
+mcasas@chromium.org
+
+# TEAM: webrtc-dev@chromium.org
# COMPONENT: Blink>GetUserMedia>WebCam
diff --git a/chromium/media/capture/video/android/BUILD.gn b/chromium/media/capture/video/android/BUILD.gn
index 046b6462922..1148a14e129 100644
--- a/chromium/media/capture/video/android/BUILD.gn
+++ b/chromium/media/capture/video/android/BUILD.gn
@@ -52,7 +52,7 @@ java_cpp_enum("media_java_enums_srcjar") {
android_library("capture_java") {
deps = [
"//base:base_java",
- "//third_party/android_deps:android_support_annotations_java",
+ "//third_party/android_deps:com_android_support_support_annotations_java",
]
srcjar_deps = [ ":media_java_enums_srcjar" ]
diff --git a/chromium/media/capture/video/android/java/src/org/chromium/media/OWNERS b/chromium/media/capture/video/android/java/src/org/chromium/media/OWNERS
index e34b150738c..18bc5cb562a 100644
--- a/chromium/media/capture/video/android/java/src/org/chromium/media/OWNERS
+++ b/chromium/media/capture/video/android/java/src/org/chromium/media/OWNERS
@@ -1,5 +1,7 @@
-mcasas@chromium.org
qinmin@chromium.org
-# media-capture-and-streams@grotations.appspotmail.com
+# Original (legacy) owner.
+mcasas@chromium.org
+
+# webrtc-dev@chromium.org
# COMPONENT: Blink>GetUserMedia>WebCam
diff --git a/chromium/media/capture/video/android/java/src/org/chromium/media/PhotoCapabilities.java b/chromium/media/capture/video/android/java/src/org/chromium/media/PhotoCapabilities.java
index 0e9fe49dba1..3ec06e9afc3 100644
--- a/chromium/media/capture/video/android/java/src/org/chromium/media/PhotoCapabilities.java
+++ b/chromium/media/capture/video/android/java/src/org/chromium/media/PhotoCapabilities.java
@@ -12,581 +12,149 @@ import org.chromium.base.annotations.JNINamespace;
**/
@JNINamespace("media")
class PhotoCapabilities {
- public final int maxIso;
- public final int minIso;
- public final int currentIso;
- public final int stepIso;
- public final int maxHeight;
- public final int minHeight;
- public final int currentHeight;
- public final int stepHeight;
- public final int maxWidth;
- public final int minWidth;
- public final int currentWidth;
- public final int stepWidth;
- public final double maxZoom;
- public final double minZoom;
- public final double currentZoom;
- public final double stepZoom;
- public final double currentFocusDistance;
- public final double maxFocusDistance;
- public final double minFocusDistance;
- public final double stepFocusDistance;
- public final int focusMode;
- public final int[] focusModes;
- public final int exposureMode;
- public final int[] exposureModes;
- public final double maxExposureCompensation;
- public final double minExposureCompensation;
- public final double currentExposureCompensation;
- public final double stepExposureCompensation;
- public final double maxExposureTime;
- public final double minExposureTime;
- public final double currentExposureTime;
- public final double stepExposureTime;
- public final int whiteBalanceMode;
- public final int[] whiteBalanceModes;
- public final int[] fillLightModes;
- public final boolean supportsTorch;
- public final boolean torch;
- public final boolean redEyeReduction;
- public final int maxColorTemperature;
- public final int minColorTemperature;
- public final int currentColorTemperature;
- public final int stepColorTemperature;
+ public boolean mBoolCapability[]; // boolean values, indexed by PhotoCapabilityBool
+ public double mDoubleCapability[]; // double values, indexed by PhotoCapabilityDouble
+ public int mIntCapability[]; // int values, indexed by PhotoCapabilityInt
+ public int mFillLightModeArray[]; // list of AndroidFillLightMode values
+ public int mMeteringMode[]; // AndroidMeteringMode values, indexed
+ // by MeteringModeType
+ public int mMeteringModeArray[][]; // lists of AndroidMeteringMode values,
+ // indexed by MeteringModeType
- PhotoCapabilities(int maxIso, int minIso, int currentIso, int stepIso, int maxHeight,
- int minHeight, int currentHeight, int stepHeight, int maxWidth, int minWidth,
- int currentWidth, int stepWidth, double maxZoom, double minZoom, double currentZoom,
- double stepZoom, double currentFocusDistance, double maxFocusDistance,
- double minFocusDistance, double stepFocusDistance, int focusMode, int[] focusModes,
- int exposureMode, int[] exposureModes, double maxExposureCompensation,
- double minExposureCompensation, double currentExposureCompensation,
- double stepExposureCompensation, double maxExposureTime, double minExposureTime,
- double currentExposureTime, double stepExposureTime, int whiteBalanceMode,
- int[] whiteBalanceModes, int[] fillLightModes, boolean supportsTorch, boolean torch,
- boolean redEyeReduction, int maxColorTemperature, int minColorTemperature,
- int currentColorTemperature, int stepColorTemperature) {
- this.maxIso = maxIso;
- this.minIso = minIso;
- this.currentIso = currentIso;
- this.stepIso = stepIso;
- this.maxHeight = maxHeight;
- this.minHeight = minHeight;
- this.currentHeight = currentHeight;
- this.stepHeight = stepHeight;
- this.maxWidth = maxWidth;
- this.minWidth = minWidth;
- this.currentWidth = currentWidth;
- this.stepWidth = stepWidth;
- this.maxZoom = maxZoom;
- this.minZoom = minZoom;
- this.currentZoom = currentZoom;
- this.stepZoom = stepZoom;
- this.currentFocusDistance = currentFocusDistance;
- this.maxFocusDistance = maxFocusDistance;
- this.minFocusDistance = minFocusDistance;
- this.stepFocusDistance = stepFocusDistance;
- this.focusMode = focusMode;
- this.focusModes = focusModes;
- this.exposureMode = exposureMode;
- this.exposureModes = exposureModes;
- this.maxExposureCompensation = maxExposureCompensation;
- this.minExposureCompensation = minExposureCompensation;
- this.currentExposureCompensation = currentExposureCompensation;
- this.stepExposureCompensation = stepExposureCompensation;
- this.maxExposureTime = maxExposureTime;
- this.minExposureTime = minExposureTime;
- this.currentExposureTime = currentExposureTime;
- this.stepExposureTime = stepExposureTime;
- this.whiteBalanceMode = whiteBalanceMode;
- this.whiteBalanceModes = whiteBalanceModes;
- this.fillLightModes = fillLightModes;
- this.supportsTorch = supportsTorch;
- this.torch = torch;
- this.redEyeReduction = redEyeReduction;
- this.maxColorTemperature = maxColorTemperature;
- this.minColorTemperature = minColorTemperature;
- this.currentColorTemperature = currentColorTemperature;
- this.stepColorTemperature = stepColorTemperature;
- }
-
- @CalledByNative
- public int getMinIso() {
- return minIso;
- }
-
- @CalledByNative
- public int getMaxIso() {
- return maxIso;
- }
-
- @CalledByNative
- public int getCurrentIso() {
- return currentIso;
- }
-
- @CalledByNative
- public int getStepIso() {
- return stepIso;
- }
-
- @CalledByNative
- public int getMinHeight() {
- return minHeight;
- }
-
- @CalledByNative
- public int getMaxHeight() {
- return maxHeight;
- }
-
- @CalledByNative
- public int getCurrentHeight() {
- return currentHeight;
- }
-
- @CalledByNative
- public int getStepHeight() {
- return stepHeight;
- }
-
- @CalledByNative
- public int getMinWidth() {
- return minWidth;
- }
-
- @CalledByNative
- public int getMaxWidth() {
- return maxWidth;
- }
-
- @CalledByNative
- public int getCurrentWidth() {
- return currentWidth;
- }
-
- @CalledByNative
- public int getStepWidth() {
- return stepWidth;
- }
-
- @CalledByNative
- public double getMinZoom() {
- return minZoom;
- }
-
- @CalledByNative
- public double getMaxZoom() {
- return maxZoom;
- }
-
- @CalledByNative
- public double getCurrentZoom() {
- return currentZoom;
- }
-
- @CalledByNative
- public double getStepZoom() {
- return stepZoom;
- }
-
- @CalledByNative
- public double getCurrentFocusDistance() {
- return currentFocusDistance;
- }
-
- @CalledByNative
- public double getMaxFocusDistance() {
- return maxFocusDistance;
- }
-
- @CalledByNative
- public double getMinFocusDistance() {
- return minFocusDistance;
- }
-
- @CalledByNative
- public double getStepFocusDistance() {
- return stepFocusDistance;
- }
-
- @CalledByNative
- public int getFocusMode() {
- return focusMode;
- }
-
- @CalledByNative
- public int[] getFocusModes() {
- return focusModes != null ? focusModes.clone() : new int[0];
- }
-
- @CalledByNative
- public int getExposureMode() {
- return exposureMode;
- }
-
- @CalledByNative
- public int[] getExposureModes() {
- return exposureModes != null ? exposureModes.clone() : new int[0];
- }
-
- @CalledByNative
- public double getMinExposureCompensation() {
- return minExposureCompensation;
- }
-
- @CalledByNative
- public double getMaxExposureCompensation() {
- return maxExposureCompensation;
- }
-
- @CalledByNative
- public double getCurrentExposureCompensation() {
- return currentExposureCompensation;
- }
-
- @CalledByNative
- public double getStepExposureCompensation() {
- return stepExposureCompensation;
- }
- @CalledByNative
- public double getMinExposureTime() {
- return minExposureTime;
- }
-
- @CalledByNative
- public double getMaxExposureTime() {
- return maxExposureTime;
- }
-
- @CalledByNative
- public double getCurrentExposureTime() {
- return currentExposureTime;
- }
-
- @CalledByNative
- public double getStepExposureTime() {
- return stepExposureTime;
- }
-
- @CalledByNative
- public int getWhiteBalanceMode() {
- return whiteBalanceMode;
- }
-
- @CalledByNative
- public int[] getWhiteBalanceModes() {
- return whiteBalanceModes != null ? whiteBalanceModes.clone() : new int[0];
- }
-
- @CalledByNative
- public int[] getFillLightModes() {
- return fillLightModes != null ? fillLightModes.clone() : new int[0];
- }
+ PhotoCapabilities(boolean[] boolCapability, double[] doubleCapability, int[] intCapability,
+ int[] fillLightModeArray, int[] meteringMode, int[][] meteringModeArray) {
+ if (boolCapability.length != PhotoCapabilityBool.NUM_ENTRIES
+ || doubleCapability.length != PhotoCapabilityDouble.NUM_ENTRIES
+ || intCapability.length != PhotoCapabilityInt.NUM_ENTRIES
+ || meteringMode.length != MeteringModeType.NUM_ENTRIES
+ || meteringModeArray.length != MeteringModeType.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ if (fillLightModeArray != null) {
+ for (int i = 0; i < fillLightModeArray.length; i++) {
+ if (fillLightModeArray[i] < 0
+ || fillLightModeArray[i] >= AndroidFillLightMode.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ }
+ }
+ for (int i = 0; i < meteringMode.length; i++) {
+ if (meteringMode[i] < 0 || meteringMode[i] >= AndroidMeteringMode.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ }
+ for (int i = 0; i < meteringModeArray.length; i++) {
+ if (meteringModeArray[i] == null) continue;
+ for (int j = 0; j < meteringModeArray[i].length; j++) {
+ if (meteringModeArray[i][j] < 0
+ || meteringModeArray[i][j] >= AndroidMeteringMode.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ }
+ }
- @CalledByNative
- public boolean getSupportsTorch() {
- return supportsTorch;
+ mBoolCapability = boolCapability.clone();
+ mDoubleCapability = doubleCapability.clone();
+ mIntCapability = intCapability.clone();
+ mFillLightModeArray = fillLightModeArray == null ? null : fillLightModeArray.clone();
+ mMeteringMode = meteringMode.clone();
+ mMeteringModeArray = new int[MeteringModeType.NUM_ENTRIES][];
+ for (int i = 0; i < meteringModeArray.length; i++) {
+ mMeteringModeArray[i] =
+ meteringModeArray[i] == null ? null : meteringModeArray[i].clone();
+ }
}
@CalledByNative
- public boolean getTorch() {
- return torch;
+ public boolean getBool(@PhotoCapabilityBool int capability) {
+ if (capability < 0 || capability >= PhotoCapabilityBool.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ return mBoolCapability[capability];
}
@CalledByNative
- public boolean getRedEyeReduction() {
- return redEyeReduction;
+ public double getDouble(@PhotoCapabilityDouble int capability) {
+ if (capability < 0 || capability >= PhotoCapabilityDouble.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ return mDoubleCapability[capability];
}
@CalledByNative
- public int getMinColorTemperature() {
- return minColorTemperature;
+ public int getInt(@PhotoCapabilityInt int capability) {
+ if (capability < 0 || capability >= PhotoCapabilityInt.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ return mIntCapability[capability];
}
@CalledByNative
- public int getMaxColorTemperature() {
- return maxColorTemperature;
+ public int[] getFillLightModeArray() {
+ assert AndroidFillLightMode.NOT_SET == 0;
+ return mFillLightModeArray != null ? mFillLightModeArray.clone() : new int[0];
}
@CalledByNative
- public int getCurrentColorTemperature() {
- return currentColorTemperature;
+ public @AndroidMeteringMode int getMeteringMode(@MeteringModeType int type) {
+ if (type < 0 || type >= MeteringModeType.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ return mMeteringMode[type];
}
@CalledByNative
- public int getStepColorTemperature() {
- return stepColorTemperature;
+ public int[] getMeteringModeArray(@MeteringModeType int type) {
+ if (type < 0 || type >= MeteringModeType.NUM_ENTRIES) {
+ throw new IllegalArgumentException();
+ }
+ assert AndroidMeteringMode.NOT_SET == 0;
+ return mMeteringModeArray[type] != null ? mMeteringModeArray[type].clone() : new int[0];
}
public static class Builder {
- public int maxIso;
- public int minIso;
- public int currentIso;
- public int stepIso;
- public int maxHeight;
- public int minHeight;
- public int currentHeight;
- public int stepHeight;
- public int maxWidth;
- public int minWidth;
- public int currentWidth;
- public int stepWidth;
- public double maxZoom;
- public double minZoom;
- public double currentZoom;
- public double stepZoom;
- public double currentFocusDistance;
- public double maxFocusDistance;
- public double minFocusDistance;
- public double stepFocusDistance;
- public int focusMode;
- public int[] focusModes;
- public int exposureMode;
- public int[] exposureModes;
- public double maxExposureCompensation;
- public double minExposureCompensation;
- public double currentExposureCompensation;
- public double stepExposureCompensation;
- public double maxExposureTime;
- public double minExposureTime;
- public double currentExposureTime;
- public double stepExposureTime;
- public int whiteBalanceMode;
- public int[] whiteBalanceModes;
- public int[] fillLightModes;
- public boolean supportsTorch;
- public boolean torch;
- public boolean redEyeReduction;
- public int maxColorTemperature;
- public int minColorTemperature;
- public int currentColorTemperature;
- public int stepColorTemperature;
+ public boolean mBoolCapability[] = new boolean[PhotoCapabilityBool.NUM_ENTRIES];
+ public double mDoubleCapability[] = new double[PhotoCapabilityDouble.NUM_ENTRIES];
+ public int mIntCapability[] = new int[PhotoCapabilityInt.NUM_ENTRIES];
+ public int mFillLightModeArray[];
+ public int mMeteringMode[] = new int[MeteringModeType.NUM_ENTRIES];
+ public int mMeteringModeArray[][] = new int[MeteringModeType.NUM_ENTRIES][];
public Builder() {}
- public Builder setMaxIso(int maxIso) {
- this.maxIso = maxIso;
- return this;
- }
-
- public Builder setMinIso(int minIso) {
- this.minIso = minIso;
- return this;
- }
-
- public Builder setCurrentIso(int currentIso) {
- this.currentIso = currentIso;
- return this;
- }
-
- public Builder setStepIso(int stepIso) {
- this.stepIso = stepIso;
- return this;
- }
-
- public Builder setMaxHeight(int maxHeight) {
- this.maxHeight = maxHeight;
- return this;
- }
-
- public Builder setMinHeight(int minHeight) {
- this.minHeight = minHeight;
- return this;
- }
-
- public Builder setCurrentHeight(int currentHeight) {
- this.currentHeight = currentHeight;
- return this;
- }
-
- public Builder setStepHeight(int stepHeight) {
- this.stepHeight = stepHeight;
- return this;
- }
-
- public Builder setMaxWidth(int maxWidth) {
- this.maxWidth = maxWidth;
- return this;
- }
-
- public Builder setMinWidth(int minWidth) {
- this.minWidth = minWidth;
- return this;
- }
-
- public Builder setCurrentWidth(int currentWidth) {
- this.currentWidth = currentWidth;
- return this;
- }
-
- public Builder setStepWidth(int stepWidth) {
- this.stepWidth = stepWidth;
- return this;
- }
-
- public Builder setMaxZoom(double maxZoom) {
- this.maxZoom = maxZoom;
- return this;
- }
-
- public Builder setMinZoom(double minZoom) {
- this.minZoom = minZoom;
- return this;
- }
-
- public Builder setCurrentZoom(double currentZoom) {
- this.currentZoom = currentZoom;
- return this;
- }
-
- public Builder setStepZoom(double stepZoom) {
- this.stepZoom = stepZoom;
- return this;
- }
-
- public Builder setCurrentFocusDistance(double currentFocusDistance) {
- this.currentFocusDistance = currentFocusDistance;
- return this;
- }
-
- public Builder setMaxFocusDistance(double maxFocusDistance) {
- this.maxFocusDistance = maxFocusDistance;
- return this;
- }
-
- public Builder setMinFocusDistance(double minFocusDistance) {
- this.minFocusDistance = minFocusDistance;
- return this;
- }
-
- public Builder setStepFocusDistance(double stepFocusDistance) {
- this.stepFocusDistance = stepFocusDistance;
- return this;
- }
-
- public Builder setFocusMode(int focusMode) {
- this.focusMode = focusMode;
- return this;
- }
-
- public Builder setFocusModes(int[] focusModes) {
- this.focusModes = focusModes.clone();
- return this;
- }
-
- public Builder setExposureMode(int exposureMode) {
- this.exposureMode = exposureMode;
- return this;
- }
-
- public Builder setExposureModes(int[] exposureModes) {
- this.exposureModes = exposureModes.clone();
- return this;
- }
-
- public Builder setMaxExposureCompensation(double maxExposureCompensation) {
- this.maxExposureCompensation = maxExposureCompensation;
- return this;
- }
-
- public Builder setMinExposureCompensation(double minExposureCompensation) {
- this.minExposureCompensation = minExposureCompensation;
- return this;
- }
-
- public Builder setCurrentExposureCompensation(double currentExposureCompensation) {
- this.currentExposureCompensation = currentExposureCompensation;
- return this;
- }
-
- public Builder setStepExposureCompensation(double stepExposureCompensation) {
- this.stepExposureCompensation = stepExposureCompensation;
- return this;
- }
-
- public Builder setMaxExposureTime(double maxExposureTime) {
- this.maxExposureTime = maxExposureTime;
- return this;
- }
-
- public Builder setMinExposureTime(double minExposureTime) {
- this.minExposureTime = minExposureTime;
- return this;
- }
-
- public Builder setCurrentExposureTime(double currentExposureTime) {
- this.currentExposureTime = currentExposureTime;
- return this;
- }
-
- public Builder setStepExposureTime(double stepExposureTime) {
- this.stepExposureTime = stepExposureTime;
- return this;
- }
-
- public Builder setWhiteBalanceMode(int whiteBalanceMode) {
- this.whiteBalanceMode = whiteBalanceMode;
- return this;
- }
-
- public Builder setWhiteBalanceModes(int[] whiteBalanceModes) {
- this.whiteBalanceModes = whiteBalanceModes.clone();
- return this;
- }
-
- public Builder setFillLightModes(int[] fillLightModes) {
- this.fillLightModes = fillLightModes.clone();
- return this;
- }
-
- public Builder setSupportsTorch(boolean supportsTorch) {
- this.supportsTorch = supportsTorch;
- return this;
- }
-
- public Builder setTorch(boolean torch) {
- this.torch = torch;
+ public Builder setBool(@PhotoCapabilityBool int capability, boolean value) {
+ this.mBoolCapability[capability] = value;
return this;
}
- public Builder setRedEyeReduction(boolean redEyeReduction) {
- this.redEyeReduction = redEyeReduction;
+ public Builder setDouble(@PhotoCapabilityDouble int capability, double value) {
+ this.mDoubleCapability[capability] = value;
return this;
}
- public Builder setMaxColorTemperature(int maxColorTemperature) {
- this.maxColorTemperature = maxColorTemperature;
+ public Builder setInt(@PhotoCapabilityInt int capability, int value) {
+ this.mIntCapability[capability] = value;
return this;
}
- public Builder setMinColorTemperature(int minColorTemperature) {
- this.minColorTemperature = minColorTemperature;
+ public Builder setFillLightModeArray(int[] value) {
+ this.mFillLightModeArray = value.clone();
return this;
}
- public Builder setCurrentColorTemperature(int currentColorTemperature) {
- this.currentColorTemperature = currentColorTemperature;
+ public Builder setMeteringMode(@MeteringModeType int type, int value) {
+ this.mMeteringMode[type] = value;
return this;
}
- public Builder setStepColorTemperature(int stepColorTemperature) {
- this.stepColorTemperature = stepColorTemperature;
+ public Builder setMeteringModeArray(@MeteringModeType int type, int[] value) {
+ this.mMeteringModeArray[type] = value.clone();
return this;
}
public PhotoCapabilities build() {
- return new PhotoCapabilities(maxIso, minIso, currentIso, stepIso, maxHeight, minHeight,
- currentHeight, stepHeight, maxWidth, minWidth, currentWidth, stepWidth, maxZoom,
- minZoom, currentZoom, stepZoom, currentFocusDistance, maxFocusDistance,
- minFocusDistance, stepFocusDistance, focusMode, focusModes, exposureMode,
- exposureModes, maxExposureCompensation, minExposureCompensation,
- currentExposureCompensation, stepExposureCompensation, maxExposureTime,
- minExposureTime, currentExposureTime, stepExposureTime, whiteBalanceMode,
- whiteBalanceModes, fillLightModes, supportsTorch, torch, redEyeReduction,
- maxColorTemperature, minColorTemperature, currentColorTemperature,
- stepColorTemperature);
+ return new PhotoCapabilities(mBoolCapability, mDoubleCapability, mIntCapability,
+ mFillLightModeArray, mMeteringMode, mMeteringModeArray);
}
}
}
diff --git a/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java b/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java
index 4f346ade5a5..9853d9c1328 100644
--- a/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java
+++ b/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCapture.java
@@ -102,7 +102,7 @@ public abstract class VideoCapture {
*/
@CalledByNative
public abstract void setPhotoOptions(double zoom, int focusMode, double focusDistance,
- int exposureMode, double width, double height, float[] pointsOfInterest2D,
+ int exposureMode, double width, double height, double[] pointsOfInterest2D,
boolean hasExposureCompensation, double exposureCompensation, double exposureTime,
int whiteBalanceMode, double iso, boolean hasRedEyeReduction, boolean redEyeReduction,
int fillLightMode, boolean hasTorch, boolean torch, double colorTemperature);
diff --git a/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java b/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java
index c0c17abaffb..0e725215636 100644
--- a/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java
+++ b/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera.java
@@ -494,7 +494,10 @@ public class VideoCaptureCamera
// Before the Camera2 API there was no official way to retrieve the supported, if any, ISO
// values from |parameters|; some platforms had "iso-values", others "iso-mode-values" etc.
// Ignore them.
- builder.setMinIso(0).setMaxIso(0).setCurrentIso(0).setStepIso(0);
+ builder.setInt(PhotoCapabilityInt.MIN_ISO, 0)
+ .setInt(PhotoCapabilityInt.MAX_ISO, 0)
+ .setInt(PhotoCapabilityInt.CURRENT_ISO, 0)
+ .setInt(PhotoCapabilityInt.STEP_ISO, 0);
List<android.hardware.Camera.Size> supportedSizes = parameters.getSupportedPictureSizes();
int minWidth = Integer.MAX_VALUE;
@@ -507,10 +510,15 @@ public class VideoCaptureCamera
if (size.width > maxWidth) maxWidth = size.width;
if (size.height > maxHeight) maxHeight = size.height;
}
- builder.setMinHeight(minHeight).setMaxHeight(maxHeight).setStepHeight(1);
- builder.setMinWidth(minWidth).setMaxWidth(maxWidth).setStepWidth(1);
final android.hardware.Camera.Size currentSize = parameters.getPreviewSize();
- builder.setCurrentHeight(currentSize.height).setCurrentWidth(currentSize.width);
+ builder.setInt(PhotoCapabilityInt.MIN_HEIGHT, minHeight)
+ .setInt(PhotoCapabilityInt.MAX_HEIGHT, maxHeight)
+ .setInt(PhotoCapabilityInt.STEP_HEIGHT, 1)
+ .setInt(PhotoCapabilityInt.CURRENT_HEIGHT, currentSize.height)
+ .setInt(PhotoCapabilityInt.MIN_WIDTH, minWidth)
+ .setInt(PhotoCapabilityInt.MAX_WIDTH, maxWidth)
+ .setInt(PhotoCapabilityInt.STEP_WIDTH, 1)
+ .setInt(PhotoCapabilityInt.CURRENT_WIDTH, currentSize.width);
int maxZoom = 0;
int currentZoom = 0;
@@ -524,8 +532,10 @@ public class VideoCaptureCamera
stepZoom = parameters.getZoomRatios().get(1) - parameters.getZoomRatios().get(0);
}
}
- builder.setMinZoom(minZoom).setMaxZoom(maxZoom);
- builder.setCurrentZoom(currentZoom).setStepZoom(stepZoom);
+ builder.setDouble(PhotoCapabilityDouble.MIN_ZOOM, minZoom)
+ .setDouble(PhotoCapabilityDouble.MAX_ZOOM, maxZoom)
+ .setDouble(PhotoCapabilityDouble.CURRENT_ZOOM, currentZoom)
+ .setDouble(PhotoCapabilityDouble.STEP_ZOOM, stepZoom);
// Classify the Focus capabilities and state. In CONTINUOUS and SINGLE_SHOT, we can call
// autoFocus(AutoFocusCallback) to configure region(s) to focus onto.
@@ -547,7 +557,8 @@ public class VideoCaptureCamera
|| focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_FIXED)) {
jniFocusModes.add(Integer.valueOf(AndroidMeteringMode.FIXED));
}
- builder.setFocusModes(integerArrayListToArray(jniFocusModes));
+ builder.setMeteringModeArray(
+ MeteringModeType.FOCUS, integerArrayListToArray(jniFocusModes));
final String focusMode = parameters.getFocusMode();
int jniFocusMode = AndroidMeteringMode.NONE;
@@ -563,7 +574,7 @@ public class VideoCaptureCamera
|| focusMode.equals(android.hardware.Camera.Parameters.FOCUS_MODE_FIXED)) {
jniFocusMode = AndroidMeteringMode.FIXED;
}
- builder.setFocusMode(jniFocusMode);
+ builder.setMeteringMode(MeteringModeType.FOCUS, jniFocusMode);
// Auto Exposure is understood to be supported always; besides that, only "locked"
// (equivalent to AndroidMeteringMode.FIXED) may be supported and/or configured.
@@ -572,19 +583,23 @@ public class VideoCaptureCamera
if (parameters.isAutoExposureLockSupported()) {
jniExposureModes.add(AndroidMeteringMode.FIXED);
}
- builder.setExposureModes(integerArrayListToArray(jniExposureModes));
+ builder.setMeteringModeArray(
+ MeteringModeType.EXPOSURE, integerArrayListToArray(jniExposureModes));
int jniExposureMode = AndroidMeteringMode.CONTINUOUS;
if (parameters.isAutoExposureLockSupported() && parameters.getAutoExposureLock()) {
jniExposureMode = AndroidMeteringMode.FIXED;
}
- builder.setExposureMode(jniExposureMode);
+ builder.setMeteringMode(MeteringModeType.EXPOSURE, jniExposureMode);
final float step = parameters.getExposureCompensationStep();
- builder.setStepExposureCompensation(step);
- builder.setMinExposureCompensation(parameters.getMinExposureCompensation() * step);
- builder.setMaxExposureCompensation(parameters.getMaxExposureCompensation() * step);
- builder.setCurrentExposureCompensation(parameters.getExposureCompensation() * step);
+ builder.setDouble(PhotoCapabilityDouble.STEP_EXPOSURE_COMPENSATION, step)
+ .setDouble(PhotoCapabilityDouble.MIN_EXPOSURE_COMPENSATION,
+ parameters.getMinExposureCompensation() * step)
+ .setDouble(PhotoCapabilityDouble.MAX_EXPOSURE_COMPENSATION,
+ parameters.getMaxExposureCompensation() * step)
+ .setDouble(PhotoCapabilityDouble.CURRENT_EXPOSURE_COMPENSATION,
+ parameters.getExposureCompensation() * step);
ArrayList<Integer> jniWhiteBalanceModes = new ArrayList<Integer>(2);
List<String> whiteBalanceModes = parameters.getSupportedWhiteBalance();
@@ -598,32 +613,36 @@ public class VideoCaptureCamera
jniWhiteBalanceModes.add(AndroidMeteringMode.FIXED);
}
}
- builder.setWhiteBalanceModes(integerArrayListToArray(jniWhiteBalanceModes));
+ builder.setMeteringModeArray(
+ MeteringModeType.WHITE_BALANCE, integerArrayListToArray(jniWhiteBalanceModes));
int jniWhiteBalanceMode = AndroidMeteringMode.CONTINUOUS;
if (parameters.isAutoWhiteBalanceLockSupported() && parameters.getAutoWhiteBalanceLock()) {
jniWhiteBalanceMode = AndroidMeteringMode.FIXED;
}
- builder.setWhiteBalanceMode(jniWhiteBalanceMode);
+ builder.setMeteringMode(MeteringModeType.WHITE_BALANCE, jniWhiteBalanceMode);
- builder.setMinColorTemperature(COLOR_TEMPERATURES_MAP.keyAt(0));
- builder.setMaxColorTemperature(
- COLOR_TEMPERATURES_MAP.keyAt(COLOR_TEMPERATURES_MAP.size() - 1));
+ builder.setInt(PhotoCapabilityInt.MIN_COLOR_TEMPERATURE, COLOR_TEMPERATURES_MAP.keyAt(0))
+ .setInt(PhotoCapabilityInt.MAX_COLOR_TEMPERATURE,
+ COLOR_TEMPERATURES_MAP.keyAt(COLOR_TEMPERATURES_MAP.size() - 1))
+ .setInt(PhotoCapabilityInt.STEP_COLOR_TEMPERATURE, 50);
if (jniWhiteBalanceMode == AndroidMeteringMode.FIXED) {
final int index = COLOR_TEMPERATURES_MAP.indexOfValue(parameters.getWhiteBalance());
- if (index >= 0) builder.setCurrentColorTemperature(COLOR_TEMPERATURES_MAP.keyAt(index));
+ if (index >= 0)
+ builder.setInt(PhotoCapabilityInt.CURRENT_COLOR_TEMPERATURE,
+ COLOR_TEMPERATURES_MAP.keyAt(index));
}
- builder.setStepColorTemperature(50);
final List<String> flashModes = parameters.getSupportedFlashModes();
if (flashModes != null) {
- builder.setSupportsTorch(
- flashModes.contains(android.hardware.Camera.Parameters.FLASH_MODE_TORCH));
- builder.setTorch(android.hardware.Camera.Parameters.FLASH_MODE_TORCH.equals(
- parameters.getFlashMode()));
-
- builder.setRedEyeReduction(
- flashModes.contains(android.hardware.Camera.Parameters.FLASH_MODE_RED_EYE));
+ builder.setBool(PhotoCapabilityBool.SUPPORTS_TORCH,
+ flashModes.contains(android.hardware.Camera.Parameters.FLASH_MODE_TORCH))
+ .setBool(PhotoCapabilityBool.TORCH,
+ android.hardware.Camera.Parameters.FLASH_MODE_TORCH.equals(
+ parameters.getFlashMode()))
+ .setBool(PhotoCapabilityBool.RED_EYE_REDUCTION,
+ flashModes.contains(
+ android.hardware.Camera.Parameters.FLASH_MODE_RED_EYE));
ArrayList<Integer> modes = new ArrayList<Integer>(0);
if (flashModes.contains(android.hardware.Camera.Parameters.FLASH_MODE_OFF)) {
@@ -635,8 +654,7 @@ public class VideoCaptureCamera
if (flashModes.contains(android.hardware.Camera.Parameters.FLASH_MODE_ON)) {
modes.add(Integer.valueOf(AndroidFillLightMode.FLASH));
}
-
- builder.setFillLightModes(integerArrayListToArray(modes));
+ builder.setFillLightModeArray(integerArrayListToArray(modes));
}
nativeOnGetPhotoCapabilitiesReply(
@@ -645,7 +663,7 @@ public class VideoCaptureCamera
@Override
public void setPhotoOptions(double zoom, int focusMode, double focusDistance, int exposureMode,
- double width, double height, float[] pointsOfInterest2D,
+ double width, double height, double[] pointsOfInterest2D,
boolean hasExposureCompensation, double exposureCompensation, double exposureTime,
int whiteBalanceMode, double iso, boolean hasRedEyeReduction, boolean redEyeReduction,
int fillLightMode, boolean hasTorch, boolean torch, double colorTemperature) {
@@ -703,8 +721,8 @@ public class VideoCaptureCamera
assert pointsOfInterest2D[1] <= 1.0 && pointsOfInterest2D[1] >= 0.0;
// Calculate a Rect of 1/8 the canvas, which is fixed to Rect(-1000, -1000, 1000, 1000),
// see https://developer.android.com/reference/android/hardware/Camera.Area.html
- final int centerX = Math.round(pointsOfInterest2D[0] * 2000) - 1000;
- final int centerY = Math.round(pointsOfInterest2D[1] * 2000) - 1000;
+ final int centerX = (int) (Math.round(pointsOfInterest2D[0] * 2000) - 1000);
+ final int centerY = (int) (Math.round(pointsOfInterest2D[1] * 2000) - 1000);
final int regionWidth = 2000 / 8;
final int regionHeight = 2000 / 8;
final int weight = 1000;
diff --git a/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java b/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java
index aadc984ce12..123493dfa36 100644
--- a/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java
+++ b/chromium/media/capture/video/android/java/src/org/chromium/media/VideoCaptureCamera2.java
@@ -368,9 +368,12 @@ public class VideoCaptureCamera2 extends VideoCapture {
minIso = iso_range.getLower();
maxIso = iso_range.getUpper();
}
- builder.setMinIso(minIso).setMaxIso(maxIso).setStepIso(1);
+ builder.setInt(PhotoCapabilityInt.MIN_ISO, minIso)
+ .setInt(PhotoCapabilityInt.MAX_ISO, maxIso)
+ .setInt(PhotoCapabilityInt.STEP_ISO, 1);
if (mPreviewRequest.get(CaptureRequest.SENSOR_SENSITIVITY) != null) {
- builder.setCurrentIso(mPreviewRequest.get(CaptureRequest.SENSOR_SENSITIVITY));
+ builder.setInt(PhotoCapabilityInt.CURRENT_ISO,
+ mPreviewRequest.get(CaptureRequest.SENSOR_SENSITIVITY));
}
final StreamConfigurationMap streamMap = cameraCharacteristics.get(
@@ -386,11 +389,16 @@ public class VideoCaptureCamera2 extends VideoCapture {
if (size.getWidth() > maxWidth) maxWidth = size.getWidth();
if (size.getHeight() > maxHeight) maxHeight = size.getHeight();
}
- builder.setMinHeight(minHeight).setMaxHeight(maxHeight).setStepHeight(1);
- builder.setMinWidth(minWidth).setMaxWidth(maxWidth).setStepWidth(1);
- builder.setCurrentHeight(
- (mPhotoHeight > 0) ? mPhotoHeight : mCaptureFormat.getHeight());
- builder.setCurrentWidth((mPhotoWidth > 0) ? mPhotoWidth : mCaptureFormat.getWidth());
+ builder.setInt(PhotoCapabilityInt.MIN_HEIGHT, minHeight)
+ .setInt(PhotoCapabilityInt.MAX_HEIGHT, maxHeight)
+ .setInt(PhotoCapabilityInt.STEP_HEIGHT, 1)
+ .setInt(PhotoCapabilityInt.CURRENT_HEIGHT,
+ (mPhotoHeight > 0) ? mPhotoHeight : mCaptureFormat.getHeight())
+ .setInt(PhotoCapabilityInt.MIN_WIDTH, minWidth)
+ .setInt(PhotoCapabilityInt.MAX_WIDTH, maxWidth)
+ .setInt(PhotoCapabilityInt.STEP_WIDTH, 1)
+ .setInt(PhotoCapabilityInt.CURRENT_WIDTH,
+ (mPhotoWidth > 0) ? mPhotoWidth : mCaptureFormat.getWidth());
float currentZoom = 1.0f;
if (cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE)
@@ -402,8 +410,10 @@ public class VideoCaptureCamera2 extends VideoCapture {
/ (float) mPreviewRequest.get(CaptureRequest.SCALER_CROP_REGION).width();
}
// There is no min-zoom per se, so clamp it to always 1.
- builder.setMinZoom(1.0).setMaxZoom(mMaxZoom);
- builder.setCurrentZoom(currentZoom).setStepZoom(0.1);
+ builder.setDouble(PhotoCapabilityDouble.MIN_ZOOM, 1.0)
+ .setDouble(PhotoCapabilityDouble.MAX_ZOOM, mMaxZoom)
+ .setDouble(PhotoCapabilityDouble.CURRENT_ZOOM, currentZoom)
+ .setDouble(PhotoCapabilityDouble.STEP_ZOOM, 0.1);
// Classify the Focus capabilities. In CONTINUOUS and SINGLE_SHOT, we can call
// autoFocus(AutoFocusCallback) to configure region(s) to focus onto.
@@ -454,7 +464,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
Log.d(TAG, "infinity focus.");
mCurrentFocusDistance = (long) Double.POSITIVE_INFINITY;
} else if (mCurrentFocusDistance > 0)
- builder.setCurrentFocusDistance(1 / mCurrentFocusDistance);
+ builder.setDouble(PhotoCapabilityDouble.CURRENT_FOCUS_DISTANCE,
+ 1 / mCurrentFocusDistance);
} else { // null value
Log.d(TAG, "LENS_FOCUS_DISTANCE is null");
}
@@ -462,12 +473,13 @@ public class VideoCaptureCamera2 extends VideoCapture {
for (int mode : jniFocusModes) {
if (mode == CameraMetadata.CONTROL_AF_MODE_OFF) {
focusModes.add(Integer.valueOf(AndroidMeteringMode.FIXED));
- builder.setMinFocusDistance(minFocusDistance);
- builder.setMaxFocusDistance(maxFocusDistance);
// Smallest step by which focus distance can be changed. This value is not
// exposed by Android.
float mStepFocusDistance = 0.01f;
- builder.setStepFocusDistance(mStepFocusDistance);
+ builder.setDouble(PhotoCapabilityDouble.MIN_FOCUS_DISTANCE, minFocusDistance)
+ .setDouble(PhotoCapabilityDouble.MAX_FOCUS_DISTANCE, maxFocusDistance)
+ .setDouble(
+ PhotoCapabilityDouble.STEP_FOCUS_DISTANCE, mStepFocusDistance);
} else if (mode == CameraMetadata.CONTROL_AF_MODE_AUTO
|| mode == CameraMetadata.CONTROL_AF_MODE_MACRO) {
// CONTROL_AF_MODE_{AUTO,MACRO} do not imply continuously focusing.
@@ -482,7 +494,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
}
}
}
- builder.setFocusModes(integerArrayListToArray(focusModes));
+ builder.setMeteringModeArray(
+ MeteringModeType.FOCUS, integerArrayListToArray(focusModes));
int jniFocusMode = AndroidMeteringMode.NONE;
if (mPreviewRequest.get(CaptureRequest.CONTROL_AF_MODE) != null) {
@@ -497,12 +510,13 @@ public class VideoCaptureCamera2 extends VideoCapture {
jniFocusMode = AndroidMeteringMode.FIXED;
// Set focus distance here.
if (mCurrentFocusDistance > 0)
- builder.setCurrentFocusDistance(1 / mCurrentFocusDistance);
+ builder.setDouble(PhotoCapabilityDouble.CURRENT_FOCUS_DISTANCE,
+ 1 / mCurrentFocusDistance);
} else {
assert jniFocusMode == CameraMetadata.CONTROL_AF_MODE_EDOF;
}
}
- builder.setFocusMode(jniFocusMode);
+ builder.setMeteringMode(MeteringModeType.FOCUS, jniFocusMode);
// Auto Exposure is the usual capability and state, unless AE is not available at all,
// which is signalled by an empty CONTROL_AE_AVAILABLE_MODES list. Exposure Compensation
@@ -536,16 +550,17 @@ public class VideoCaptureCamera2 extends VideoCapture {
final long maxExposureTime = range.getUpper();
if (minExposureTime != 0 && maxExposureTime != 0) {
- builder.setMaxExposureTime(
- maxExposureTime / kNanosecondsPer100Microsecond);
- builder.setMinExposureTime(
- minExposureTime / kNanosecondsPer100Microsecond);
+ builder.setDouble(PhotoCapabilityDouble.MAX_EXPOSURE_TIME,
+ maxExposureTime / kNanosecondsPer100Microsecond)
+ .setDouble(PhotoCapabilityDouble.MIN_EXPOSURE_TIME,
+ minExposureTime / kNanosecondsPer100Microsecond);
}
// Smallest step by which exposure time can be changed. This value is not
// exposed by Android.
- builder.setStepExposureTime(10000 / kNanosecondsPer100Microsecond);
- builder.setCurrentExposureTime(
- mLastExposureTimeNs / kNanosecondsPer100Microsecond);
+ builder.setDouble(PhotoCapabilityDouble.STEP_EXPOSURE_TIME,
+ 10000 / kNanosecondsPer100Microsecond)
+ .setDouble(PhotoCapabilityDouble.CURRENT_EXPOSURE_TIME,
+ mLastExposureTimeNs / kNanosecondsPer100Microsecond);
}
}
}
@@ -556,7 +571,8 @@ public class VideoCaptureCamera2 extends VideoCapture {
} catch (NoSuchFieldError e) {
// Ignore this exception, it means CONTROL_AE_LOCK_AVAILABLE is not known.
}
- builder.setExposureModes(integerArrayListToArray(exposureModes));
+ builder.setMeteringModeArray(
+ MeteringModeType.EXPOSURE, integerArrayListToArray(exposureModes));
int jniExposureMode = AndroidMeteringMode.CONTINUOUS;
if ((mPreviewRequest.get(CaptureRequest.CONTROL_AE_MODE) != null)
@@ -567,20 +583,22 @@ public class VideoCaptureCamera2 extends VideoCapture {
if (mPreviewRequest.get(CaptureRequest.CONTROL_AE_LOCK)) {
jniExposureMode = AndroidMeteringMode.FIXED;
}
- builder.setExposureMode(jniExposureMode);
+ builder.setMeteringMode(MeteringModeType.EXPOSURE, jniExposureMode);
final float step =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP)
.floatValue();
- builder.setStepExposureCompensation(step);
+ builder.setDouble(PhotoCapabilityDouble.STEP_EXPOSURE_COMPENSATION, step);
final Range<Integer> exposureCompensationRange =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
- builder.setMinExposureCompensation(exposureCompensationRange.getLower() * step);
- builder.setMaxExposureCompensation(exposureCompensationRange.getUpper() * step);
+ builder.setDouble(PhotoCapabilityDouble.MIN_EXPOSURE_COMPENSATION,
+ exposureCompensationRange.getLower() * step)
+ .setDouble(PhotoCapabilityDouble.MAX_EXPOSURE_COMPENSATION,
+ exposureCompensationRange.getUpper() * step);
if (mPreviewRequest.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION) != null) {
- builder.setCurrentExposureCompensation(
+ builder.setDouble(PhotoCapabilityDouble.CURRENT_EXPOSURE_COMPENSATION,
mPreviewRequest.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION)
- * step);
+ * step);
}
final int[] jniWhiteBalanceMode =
@@ -599,42 +617,48 @@ public class VideoCaptureCamera2 extends VideoCapture {
} catch (NoSuchFieldError e) {
// Ignore this exception, it means CONTROL_AWB_LOCK_AVAILABLE is not known.
}
- builder.setWhiteBalanceModes(integerArrayListToArray(whiteBalanceModes));
+ builder.setMeteringModeArray(
+ MeteringModeType.WHITE_BALANCE, integerArrayListToArray(whiteBalanceModes));
int whiteBalanceMode = CameraMetadata.CONTROL_AWB_MODE_AUTO;
if (mPreviewRequest.get(CaptureRequest.CONTROL_AWB_MODE) != null) {
whiteBalanceMode = mPreviewRequest.get(CaptureRequest.CONTROL_AWB_MODE);
if (whiteBalanceMode == CameraMetadata.CONTROL_AWB_MODE_OFF) {
- builder.setWhiteBalanceMode(AndroidMeteringMode.NONE);
- } else if (whiteBalanceMode == CameraMetadata.CONTROL_AWB_MODE_AUTO) {
- builder.setWhiteBalanceMode(AndroidMeteringMode.CONTINUOUS);
+ builder.setMeteringMode(
+ MeteringModeType.WHITE_BALANCE, AndroidMeteringMode.NONE);
} else {
- builder.setWhiteBalanceMode(AndroidMeteringMode.FIXED);
+ builder.setMeteringMode(MeteringModeType.WHITE_BALANCE,
+ whiteBalanceMode == CameraMetadata.CONTROL_AWB_MODE_AUTO
+ ? AndroidMeteringMode.CONTINUOUS
+ : AndroidMeteringMode.FIXED);
}
}
- builder.setMinColorTemperature(COLOR_TEMPERATURES_MAP.keyAt(0));
- builder.setMaxColorTemperature(
- COLOR_TEMPERATURES_MAP.keyAt(COLOR_TEMPERATURES_MAP.size() - 1));
+ builder.setInt(PhotoCapabilityInt.MIN_COLOR_TEMPERATURE,
+ COLOR_TEMPERATURES_MAP.keyAt(0))
+ .setInt(PhotoCapabilityInt.MAX_COLOR_TEMPERATURE,
+ COLOR_TEMPERATURES_MAP.keyAt(COLOR_TEMPERATURES_MAP.size() - 1))
+ .setInt(PhotoCapabilityInt.STEP_COLOR_TEMPERATURE, 50);
final int index = COLOR_TEMPERATURES_MAP.indexOfValue(whiteBalanceMode);
if (index >= 0) {
- builder.setCurrentColorTemperature(COLOR_TEMPERATURES_MAP.keyAt(index));
+ builder.setInt(PhotoCapabilityInt.CURRENT_COLOR_TEMPERATURE,
+ COLOR_TEMPERATURES_MAP.keyAt(index));
}
- builder.setStepColorTemperature(50);
if (!cameraCharacteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)) {
- builder.setSupportsTorch(false);
- builder.setRedEyeReduction(false);
+ builder.setBool(PhotoCapabilityBool.SUPPORTS_TORCH, false)
+ .setBool(PhotoCapabilityBool.RED_EYE_REDUCTION, false);
} else {
// There's no way to query if torch and/or red eye reduction modes are available
// using Camera2 API but since there's a Flash unit, we assume so.
- builder.setSupportsTorch(true);
+ builder.setBool(PhotoCapabilityBool.SUPPORTS_TORCH, true)
+ .setBool(PhotoCapabilityBool.RED_EYE_REDUCTION, true);
+
if (mPreviewRequest.get(CaptureRequest.FLASH_MODE) != null) {
- builder.setTorch(mPreviewRequest.get(CaptureRequest.FLASH_MODE)
- == CameraMetadata.FLASH_MODE_TORCH);
+ builder.setBool(PhotoCapabilityBool.TORCH,
+ mPreviewRequest.get(CaptureRequest.FLASH_MODE)
+ == CameraMetadata.FLASH_MODE_TORCH);
}
- builder.setRedEyeReduction(true);
-
final int[] flashModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
ArrayList<Integer> modes = new ArrayList<Integer>(0);
@@ -647,7 +671,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
modes.add(Integer.valueOf(AndroidFillLightMode.FLASH));
}
}
- builder.setFillLightModes(integerArrayListToArray(modes));
+ builder.setFillLightModeArray(integerArrayListToArray(modes));
}
nativeOnGetPhotoCapabilitiesReply(
@@ -662,7 +686,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
public final int exposureMode;
public final double width;
public final double height;
- public final float[] pointsOfInterest2D;
+ public final double[] pointsOfInterest2D;
public final boolean hasExposureCompensation;
public final double exposureCompensation;
public final double exposureTime;
@@ -676,7 +700,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
public final double colorTemperature;
public PhotoOptions(double zoom, int focusMode, double currentFocusDistance,
- int exposureMode, double width, double height, float[] pointsOfInterest2D,
+ int exposureMode, double width, double height, double[] pointsOfInterest2D,
boolean hasExposureCompensation, double exposureCompensation, double exposureTime,
int whiteBalanceMode, double iso, boolean hasRedEyeReduction,
boolean redEyeReduction, int fillLightMode, boolean hasTorch, boolean torch,
@@ -766,8 +790,10 @@ public class VideoCaptureCamera2 extends VideoCapture {
// Calculate a Rect of 1/8 the |visibleRect| dimensions, and center it w.r.t.
// |canvas|.
final Rect visibleRect = (mCropRect.isEmpty()) ? canvas : mCropRect;
- int centerX = Math.round(mOptions.pointsOfInterest2D[0] * visibleRect.width());
- int centerY = Math.round(mOptions.pointsOfInterest2D[1] * visibleRect.height());
+ int centerX =
+ (int) Math.round(mOptions.pointsOfInterest2D[0] * visibleRect.width());
+ int centerY =
+ (int) Math.round(mOptions.pointsOfInterest2D[1] * visibleRect.height());
if (visibleRect.equals(mCropRect)) {
centerX += (canvas.width() - visibleRect.width()) / 2;
centerY += (canvas.height() - visibleRect.height()) / 2;
@@ -1517,7 +1543,7 @@ public class VideoCaptureCamera2 extends VideoCapture {
@Override
public void setPhotoOptions(double zoom, int focusMode, double currentFocusDistance,
- int exposureMode, double width, double height, float[] pointsOfInterest2D,
+ int exposureMode, double width, double height, double[] pointsOfInterest2D,
boolean hasExposureCompensation, double exposureCompensation, double exposureTime,
int whiteBalanceMode, double iso, boolean hasRedEyeReduction, boolean redEyeReduction,
int fillLightMode, boolean hasTorch, boolean torch, double colorTemperature) {
diff --git a/chromium/media/capture/video/android/photo_capabilities.cc b/chromium/media/capture/video/android/photo_capabilities.cc
index 8a04c207b81..5fd9510ae99 100644
--- a/chromium/media/capture/video/android/photo_capabilities.cc
+++ b/chromium/media/capture/video/android/photo_capabilities.cc
@@ -13,247 +13,83 @@ using base::android::AttachCurrentThread;
namespace media {
-namespace {
-
-static_assert(
- std::is_same<int,
- std::underlying_type<
- PhotoCapabilities::AndroidMeteringMode>::type>::value,
- "AndroidMeteringMode underlying type should be int");
-
-std::vector<PhotoCapabilities::AndroidMeteringMode> ToAndroidMeteringModes(
- base::android::ScopedJavaLocalRef<jintArray> jni_modes) {
- JNIEnv* env = AttachCurrentThread();
- std::vector<PhotoCapabilities::AndroidMeteringMode> modes;
- if (jni_modes.obj()) {
- base::android::JavaIntArrayToIntVector(
- env, jni_modes, reinterpret_cast<std::vector<int>*>(&modes));
- }
- return modes;
-}
-
-} // anonymous namespace
-
PhotoCapabilities::PhotoCapabilities(
base::android::ScopedJavaLocalRef<jobject> object)
: object_(object) {}
PhotoCapabilities::~PhotoCapabilities() {}
-int PhotoCapabilities::getMinIso() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMinIso(AttachCurrentThread(), object_);
-}
-
-int PhotoCapabilities::getMaxIso() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMaxIso(AttachCurrentThread(), object_);
-}
-
-int PhotoCapabilities::getCurrentIso() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getCurrentIso(AttachCurrentThread(), object_);
-}
-
-int PhotoCapabilities::getStepIso() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getStepIso(AttachCurrentThread(), object_);
-}
-
-int PhotoCapabilities::getMinHeight() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMinHeight(AttachCurrentThread(), object_);
-}
-
-int PhotoCapabilities::getMaxHeight() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMaxHeight(AttachCurrentThread(), object_);
-}
-
-int PhotoCapabilities::getCurrentHeight() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getCurrentHeight(AttachCurrentThread(),
- object_);
-}
-
-int PhotoCapabilities::getStepHeight() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getStepHeight(AttachCurrentThread(), object_);
-}
-
-int PhotoCapabilities::getMinWidth() const {
+bool PhotoCapabilities::getBool(PhotoCapabilityBool capability) const {
DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMinWidth(AttachCurrentThread(), object_);
+ DCHECK(capability != PhotoCapabilityBool::NUM_ENTRIES);
+ return Java_PhotoCapabilities_getBool(
+ AttachCurrentThread(), object_,
+ JniIntWrapper(static_cast<int>(capability)));
}
-int PhotoCapabilities::getMaxWidth() const {
+double PhotoCapabilities::getDouble(PhotoCapabilityDouble capability) const {
DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMaxWidth(AttachCurrentThread(), object_);
+ DCHECK(capability != PhotoCapabilityDouble::NUM_ENTRIES);
+ return Java_PhotoCapabilities_getDouble(
+ AttachCurrentThread(), object_,
+ JniIntWrapper(static_cast<int>(capability)));
}
-int PhotoCapabilities::getCurrentWidth() const {
+int PhotoCapabilities::getInt(PhotoCapabilityInt capability) const {
DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getCurrentWidth(AttachCurrentThread(), object_);
+ DCHECK(capability != PhotoCapabilityInt::NUM_ENTRIES);
+ return Java_PhotoCapabilities_getInt(
+ AttachCurrentThread(), object_,
+ JniIntWrapper(static_cast<int>(capability)));
}
-int PhotoCapabilities::getStepWidth() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getStepWidth(AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getMinZoom() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMinZoom(AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getMaxZoom() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMaxZoom(AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getCurrentZoom() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getCurrentZoom(AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getStepZoom() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getStepZoom(AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getCurrentFocusDistance() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getCurrentFocusDistance(AttachCurrentThread(),
- object_);
-}
-double PhotoCapabilities::getMaxFocusDistance() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMaxFocusDistance(AttachCurrentThread(),
- object_);
-}
-double PhotoCapabilities::getMinFocusDistance() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMinFocusDistance(AttachCurrentThread(),
- object_);
-}
-double PhotoCapabilities::getStepFocusDistance() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getStepFocusDistance(AttachCurrentThread(),
- object_);
-}
-
-PhotoCapabilities::AndroidMeteringMode PhotoCapabilities::getFocusMode() const {
- DCHECK(!object_.is_null());
- return static_cast<AndroidMeteringMode>(
- Java_PhotoCapabilities_getFocusMode(AttachCurrentThread(), object_));
-}
-
-std::vector<PhotoCapabilities::AndroidMeteringMode>
-PhotoCapabilities::getFocusModes() const {
+std::vector<PhotoCapabilities::AndroidFillLightMode>
+PhotoCapabilities::getFillLightModeArray() const {
DCHECK(!object_.is_null());
JNIEnv* env = AttachCurrentThread();
- base::android::ScopedJavaLocalRef<jintArray> jni_modes =
- Java_PhotoCapabilities_getFocusModes(env, object_);
- return ToAndroidMeteringModes(jni_modes);
-}
-PhotoCapabilities::AndroidMeteringMode PhotoCapabilities::getExposureMode()
- const {
- DCHECK(!object_.is_null());
- return static_cast<AndroidMeteringMode>(
- Java_PhotoCapabilities_getExposureMode(AttachCurrentThread(), object_));
-}
-
-std::vector<PhotoCapabilities::AndroidMeteringMode>
-PhotoCapabilities::getExposureModes() const {
- DCHECK(!object_.is_null());
+ std::vector<AndroidFillLightMode> modes;
+ static_assert(
+ std::is_same<int,
+ std::underlying_type<AndroidFillLightMode>::type>::value,
+ "AndroidFillLightMode underlying type should be int");
- JNIEnv* env = AttachCurrentThread();
base::android::ScopedJavaLocalRef<jintArray> jni_modes =
- Java_PhotoCapabilities_getExposureModes(env, object_);
- return ToAndroidMeteringModes(jni_modes);
-}
-
-double PhotoCapabilities::getMinExposureCompensation() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMinExposureCompensation(
- AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getMaxExposureCompensation() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMaxExposureCompensation(
- AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getCurrentExposureCompensation() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getCurrentExposureCompensation(
- AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getStepExposureCompensation() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getStepExposureCompensation(
- AttachCurrentThread(), object_);
-}
-
-double PhotoCapabilities::getMinExposureTime() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMinExposureTime(AttachCurrentThread(),
- object_);
-}
-
-double PhotoCapabilities::getMaxExposureTime() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMaxExposureTime(AttachCurrentThread(),
- object_);
-}
-
-double PhotoCapabilities::getCurrentExposureTime() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getCurrentExposureTime(AttachCurrentThread(),
- object_);
-}
-
-double PhotoCapabilities::getStepExposureTime() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getStepExposureTime(AttachCurrentThread(),
- object_);
+ Java_PhotoCapabilities_getFillLightModeArray(env, object_);
+ if (jni_modes.obj()) {
+ base::android::JavaIntArrayToIntVector(
+ env, jni_modes, reinterpret_cast<std::vector<int>*>(&modes));
+ }
+ return modes;
}
-PhotoCapabilities::AndroidMeteringMode PhotoCapabilities::getWhiteBalanceMode()
- const {
+PhotoCapabilities::AndroidMeteringMode PhotoCapabilities::getMeteringMode(
+ MeteringModeType type) const {
DCHECK(!object_.is_null());
+ DCHECK(type != MeteringModeType::NUM_ENTRIES);
return static_cast<AndroidMeteringMode>(
- Java_PhotoCapabilities_getWhiteBalanceMode(AttachCurrentThread(),
- object_));
+ Java_PhotoCapabilities_getMeteringMode(
+ AttachCurrentThread(), object_,
+ JniIntWrapper(static_cast<int>(type))));
}
std::vector<PhotoCapabilities::AndroidMeteringMode>
-PhotoCapabilities::getWhiteBalanceModes() const {
- DCHECK(!object_.is_null());
-
- JNIEnv* env = AttachCurrentThread();
- base::android::ScopedJavaLocalRef<jintArray> jni_modes =
- Java_PhotoCapabilities_getWhiteBalanceModes(env, object_);
- return ToAndroidMeteringModes(jni_modes);
-}
-
-std::vector<PhotoCapabilities::AndroidFillLightMode>
-PhotoCapabilities::getFillLightModes() const {
+PhotoCapabilities::getMeteringModeArray(MeteringModeType type) const {
DCHECK(!object_.is_null());
+ DCHECK(type != MeteringModeType::NUM_ENTRIES);
JNIEnv* env = AttachCurrentThread();
- std::vector<AndroidFillLightMode> modes;
+ std::vector<PhotoCapabilities::AndroidMeteringMode> modes;
static_assert(
std::is_same<int,
- std::underlying_type<AndroidFillLightMode>::type>::value,
- "AndroidFillLightMode underlying type should be int");
+ std::underlying_type<
+ PhotoCapabilities::AndroidMeteringMode>::type>::value,
+ "AndroidMeteringMode underlying type should be int");
base::android::ScopedJavaLocalRef<jintArray> jni_modes =
- Java_PhotoCapabilities_getFillLightModes(env, object_);
+ Java_PhotoCapabilities_getMeteringModeArray(
+ env, object_, JniIntWrapper(static_cast<int>(type)));
if (jni_modes.obj()) {
base::android::JavaIntArrayToIntVector(
env, jni_modes, reinterpret_cast<std::vector<int>*>(&modes));
@@ -261,45 +97,4 @@ PhotoCapabilities::getFillLightModes() const {
return modes;
}
-bool PhotoCapabilities::getSupportsTorch() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getSupportsTorch(AttachCurrentThread(),
- object_);
-}
-
-bool PhotoCapabilities::getTorch() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getTorch(AttachCurrentThread(), object_);
-}
-
-bool PhotoCapabilities::getRedEyeReduction() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getRedEyeReduction(AttachCurrentThread(),
- object_);
-}
-
-int PhotoCapabilities::getMinColorTemperature() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMinColorTemperature(AttachCurrentThread(),
- object_);
-}
-
-int PhotoCapabilities::getMaxColorTemperature() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getMaxColorTemperature(AttachCurrentThread(),
- object_);
-}
-
-int PhotoCapabilities::getCurrentColorTemperature() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getCurrentColorTemperature(
- AttachCurrentThread(), object_);
-}
-
-int PhotoCapabilities::getStepColorTemperature() const {
- DCHECK(!object_.is_null());
- return Java_PhotoCapabilities_getStepColorTemperature(AttachCurrentThread(),
- object_);
-}
-
} // namespace media
diff --git a/chromium/media/capture/video/android/photo_capabilities.h b/chromium/media/capture/video/android/photo_capabilities.h
index 0e5d509adac..49e711586b1 100644
--- a/chromium/media/capture/video/android/photo_capabilities.h
+++ b/chromium/media/capture/video/android/photo_capabilities.h
@@ -20,11 +20,23 @@ class PhotoCapabilities {
// A Java counterpart will be generated for this enum.
// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
enum class AndroidMeteringMode {
- NOT_SET,
+ NOT_SET, // Update Java code if this value is not equal 0.
NONE,
FIXED,
SINGLE_SHOT,
CONTINUOUS,
+
+ NUM_ENTRIES
+ };
+
+ // A Java counterpart will be generated for this enum.
+ // GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
+ enum class MeteringModeType {
+ FOCUS,
+ EXPOSURE,
+ WHITE_BALANCE,
+
+ NUM_ENTRIES
};
// Fill light modes from Java side, equivalent to media.mojom::FillLightMode,
@@ -32,57 +44,86 @@ class PhotoCapabilities {
// A Java counterpart will be generated for this enum.
// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
enum class AndroidFillLightMode {
- NOT_SET,
+ NOT_SET, // Update Java code when this value is not equal 0.
OFF,
AUTO,
FLASH,
+
+ NUM_ENTRIES
+ };
+
+ // A Java counterpart will be generated for this enum.
+ // GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
+ enum class PhotoCapabilityBool {
+ SUPPORTS_TORCH,
+ TORCH,
+ RED_EYE_REDUCTION,
+
+ NUM_ENTRIES
+ };
+
+ // A Java counterpart will be generated for this enum.
+ // GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
+ enum class PhotoCapabilityDouble {
+ MIN_ZOOM,
+ MAX_ZOOM,
+ CURRENT_ZOOM,
+ STEP_ZOOM,
+
+ MIN_FOCUS_DISTANCE,
+ MAX_FOCUS_DISTANCE,
+ CURRENT_FOCUS_DISTANCE,
+ STEP_FOCUS_DISTANCE,
+
+ MIN_EXPOSURE_COMPENSATION,
+ MAX_EXPOSURE_COMPENSATION,
+ CURRENT_EXPOSURE_COMPENSATION,
+ STEP_EXPOSURE_COMPENSATION,
+
+ MIN_EXPOSURE_TIME,
+ MAX_EXPOSURE_TIME,
+ CURRENT_EXPOSURE_TIME,
+ STEP_EXPOSURE_TIME,
+
+ NUM_ENTRIES
+ };
+
+ // A Java counterpart will be generated for this enum.
+ // GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
+ enum class PhotoCapabilityInt {
+ MIN_ISO,
+ MAX_ISO,
+ CURRENT_ISO,
+ STEP_ISO,
+
+ MIN_HEIGHT,
+ MAX_HEIGHT,
+ CURRENT_HEIGHT,
+ STEP_HEIGHT,
+
+ MIN_WIDTH,
+ MAX_WIDTH,
+ CURRENT_WIDTH,
+ STEP_WIDTH,
+
+ MIN_COLOR_TEMPERATURE,
+ MAX_COLOR_TEMPERATURE,
+ CURRENT_COLOR_TEMPERATURE,
+ STEP_COLOR_TEMPERATURE,
+
+ NUM_ENTRIES
};
explicit PhotoCapabilities(base::android::ScopedJavaLocalRef<jobject> object);
~PhotoCapabilities();
- int getMinIso() const;
- int getMaxIso() const;
- int getCurrentIso() const;
- int getStepIso() const;
- int getMinHeight() const;
- int getMaxHeight() const;
- int getCurrentHeight() const;
- int getStepHeight() const;
- int getMinWidth() const;
- int getMaxWidth() const;
- int getCurrentWidth() const;
- int getStepWidth() const;
- double getMinZoom() const;
- double getMaxZoom() const;
- double getCurrentZoom() const;
- double getStepZoom() const;
- double getCurrentFocusDistance() const;
- double getMaxFocusDistance() const;
- double getMinFocusDistance() const;
- double getStepFocusDistance() const;
- AndroidMeteringMode getFocusMode() const;
- std::vector<AndroidMeteringMode> getFocusModes() const;
- AndroidMeteringMode getExposureMode() const;
- std::vector<AndroidMeteringMode> getExposureModes() const;
- double getMinExposureCompensation() const;
- double getMaxExposureCompensation() const;
- double getCurrentExposureCompensation() const;
- double getStepExposureCompensation() const;
- double getMinExposureTime() const;
- double getMaxExposureTime() const;
- double getCurrentExposureTime() const;
- double getStepExposureTime() const;
- AndroidMeteringMode getWhiteBalanceMode() const;
- std::vector<AndroidMeteringMode> getWhiteBalanceModes() const;
- std::vector<AndroidFillLightMode> getFillLightModes() const;
- bool getSupportsTorch() const;
- bool getTorch() const;
- bool getRedEyeReduction() const;
- int getMinColorTemperature() const;
- int getMaxColorTemperature() const;
- int getCurrentColorTemperature() const;
- int getStepColorTemperature() const;
+ int getInt(PhotoCapabilityInt capability) const;
+ double getDouble(PhotoCapabilityDouble capability) const;
+ bool getBool(PhotoCapabilityBool capability) const;
+ std::vector<AndroidFillLightMode> getFillLightModeArray() const;
+ AndroidMeteringMode getMeteringMode(MeteringModeType type) const;
+ std::vector<AndroidMeteringMode> getMeteringModeArray(
+ MeteringModeType type) const;
private:
const base::android::ScopedJavaLocalRef<jobject> object_;
diff --git a/chromium/media/capture/video/android/video_capture_device_android.cc b/chromium/media/capture/video/android/video_capture_device_android.cc
index c40804b2af1..777e8229e36 100644
--- a/chromium/media/capture/video/android/video_capture_device_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_android.cc
@@ -45,6 +45,7 @@ mojom::MeteringMode ToMojomMeteringMode(
case PhotoCapabilities::AndroidMeteringMode::NONE:
return mojom::MeteringMode::NONE;
case PhotoCapabilities::AndroidMeteringMode::NOT_SET:
+ case PhotoCapabilities::AndroidMeteringMode::NUM_ENTRIES:
NOTREACHED();
}
return mojom::MeteringMode::NONE;
@@ -76,6 +77,7 @@ mojom::FillLightMode ToMojomFillLightMode(
case PhotoCapabilities::AndroidFillLightMode::OFF:
return mojom::FillLightMode::OFF;
case PhotoCapabilities::AndroidFillLightMode::NOT_SET:
+ case PhotoCapabilities::AndroidFillLightMode::NUM_ENTRIES:
NOTREACHED();
}
NOTREACHED();
@@ -407,63 +409,82 @@ void VideoCaptureDeviceAndroid::OnGetPhotoCapabilitiesReply(
// PhotoCapabilities to mojom::PhotoStatePtr, https://crbug.com/622002.
mojom::PhotoStatePtr photo_capabilities = mojo::CreateEmptyPhotoState();
- const auto jni_white_balance_modes = caps.getWhiteBalanceModes();
+ const auto jni_white_balance_modes = caps.getMeteringModeArray(
+ PhotoCapabilities::MeteringModeType::WHITE_BALANCE);
std::vector<mojom::MeteringMode> white_balance_modes;
for (const auto& white_balance_mode : jni_white_balance_modes)
white_balance_modes.push_back(ToMojomMeteringMode(white_balance_mode));
photo_capabilities->supported_white_balance_modes = white_balance_modes;
- photo_capabilities->current_white_balance_mode =
- ToMojomMeteringMode(caps.getWhiteBalanceMode());
+ photo_capabilities->current_white_balance_mode = ToMojomMeteringMode(
+ caps.getMeteringMode(PhotoCapabilities::MeteringModeType::WHITE_BALANCE));
- const auto jni_exposure_modes = caps.getExposureModes();
+ const auto jni_exposure_modes =
+ caps.getMeteringModeArray(PhotoCapabilities::MeteringModeType::EXPOSURE);
std::vector<mojom::MeteringMode> exposure_modes;
for (const auto& exposure_mode : jni_exposure_modes)
exposure_modes.push_back(ToMojomMeteringMode(exposure_mode));
photo_capabilities->supported_exposure_modes = exposure_modes;
- photo_capabilities->current_exposure_mode =
- ToMojomMeteringMode(caps.getExposureMode());
+ photo_capabilities->current_exposure_mode = ToMojomMeteringMode(
+ caps.getMeteringMode(PhotoCapabilities::MeteringModeType::EXPOSURE));
- const auto jni_focus_modes = caps.getFocusModes();
+ const auto jni_focus_modes =
+ caps.getMeteringModeArray(PhotoCapabilities::MeteringModeType::FOCUS);
std::vector<mojom::MeteringMode> focus_modes;
for (const auto& focus_mode : jni_focus_modes)
focus_modes.push_back(ToMojomMeteringMode(focus_mode));
photo_capabilities->supported_focus_modes = focus_modes;
- photo_capabilities->current_focus_mode =
- ToMojomMeteringMode(caps.getFocusMode());
+ photo_capabilities->current_focus_mode = ToMojomMeteringMode(
+ caps.getMeteringMode(PhotoCapabilities::MeteringModeType::FOCUS));
photo_capabilities->focus_distance = mojom::Range::New();
- photo_capabilities->focus_distance->current = caps.getCurrentFocusDistance();
- photo_capabilities->focus_distance->max = caps.getMaxFocusDistance();
- photo_capabilities->focus_distance->min = caps.getMinFocusDistance();
- photo_capabilities->focus_distance->step = caps.getStepFocusDistance();
+ photo_capabilities->focus_distance->current = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::CURRENT_FOCUS_DISTANCE);
+ photo_capabilities->focus_distance->max = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::MAX_FOCUS_DISTANCE);
+ photo_capabilities->focus_distance->min = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::MIN_FOCUS_DISTANCE);
+ photo_capabilities->focus_distance->step = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::STEP_FOCUS_DISTANCE);
photo_capabilities->exposure_compensation = mojom::Range::New();
- photo_capabilities->exposure_compensation->current =
- caps.getCurrentExposureCompensation();
- photo_capabilities->exposure_compensation->max =
- caps.getMaxExposureCompensation();
- photo_capabilities->exposure_compensation->min =
- caps.getMinExposureCompensation();
- photo_capabilities->exposure_compensation->step =
- caps.getStepExposureCompensation();
+ photo_capabilities->exposure_compensation->current = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::CURRENT_EXPOSURE_COMPENSATION);
+ photo_capabilities->exposure_compensation->max = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::MAX_EXPOSURE_COMPENSATION);
+ photo_capabilities->exposure_compensation->min = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::MIN_EXPOSURE_COMPENSATION);
+ photo_capabilities->exposure_compensation->step = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::STEP_EXPOSURE_COMPENSATION);
photo_capabilities->exposure_time = mojom::Range::New();
- photo_capabilities->exposure_time->current = caps.getCurrentExposureTime();
- photo_capabilities->exposure_time->max = caps.getMaxExposureTime();
- photo_capabilities->exposure_time->min = caps.getMinExposureTime();
- photo_capabilities->exposure_time->step = caps.getStepExposureTime();
+ photo_capabilities->exposure_time->current = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::CURRENT_EXPOSURE_TIME);
+ photo_capabilities->exposure_time->max = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::MAX_EXPOSURE_TIME);
+ photo_capabilities->exposure_time->min = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::MIN_EXPOSURE_TIME);
+ photo_capabilities->exposure_time->step = caps.getDouble(
+ PhotoCapabilities::PhotoCapabilityDouble::STEP_EXPOSURE_TIME);
photo_capabilities->color_temperature = mojom::Range::New();
- photo_capabilities->color_temperature->current =
- caps.getCurrentColorTemperature();
- photo_capabilities->color_temperature->max = caps.getMaxColorTemperature();
- photo_capabilities->color_temperature->min = caps.getMinColorTemperature();
- photo_capabilities->color_temperature->step = caps.getStepColorTemperature();
+ photo_capabilities->color_temperature->current = caps.getInt(
+ PhotoCapabilities::PhotoCapabilityInt::CURRENT_COLOR_TEMPERATURE);
+ photo_capabilities->color_temperature->max =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::MAX_COLOR_TEMPERATURE);
+ photo_capabilities->color_temperature->min =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::MIN_COLOR_TEMPERATURE);
+ photo_capabilities->color_temperature->step = caps.getInt(
+ PhotoCapabilities::PhotoCapabilityInt::STEP_COLOR_TEMPERATURE);
+
photo_capabilities->iso = mojom::Range::New();
- photo_capabilities->iso->current = caps.getCurrentIso();
- photo_capabilities->iso->max = caps.getMaxIso();
- photo_capabilities->iso->min = caps.getMinIso();
- photo_capabilities->iso->step = caps.getStepIso();
+ photo_capabilities->iso->current =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::CURRENT_ISO);
+ photo_capabilities->iso->max =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::MAX_ISO);
+ photo_capabilities->iso->min =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::MIN_ISO);
+ photo_capabilities->iso->step =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::STEP_ISO);
photo_capabilities->brightness = mojom::Range::New();
photo_capabilities->contrast = mojom::Range::New();
@@ -471,28 +492,43 @@ void VideoCaptureDeviceAndroid::OnGetPhotoCapabilitiesReply(
photo_capabilities->sharpness = mojom::Range::New();
photo_capabilities->zoom = mojom::Range::New();
- photo_capabilities->zoom->current = caps.getCurrentZoom();
- photo_capabilities->zoom->max = caps.getMaxZoom();
- photo_capabilities->zoom->min = caps.getMinZoom();
- photo_capabilities->zoom->step = caps.getStepZoom();
-
- photo_capabilities->supports_torch = caps.getSupportsTorch();
- photo_capabilities->torch = caps.getTorch();
+ photo_capabilities->zoom->current =
+ caps.getDouble(PhotoCapabilities::PhotoCapabilityDouble::CURRENT_ZOOM);
+ photo_capabilities->zoom->max =
+ caps.getDouble(PhotoCapabilities::PhotoCapabilityDouble::MAX_ZOOM);
+ photo_capabilities->zoom->min =
+ caps.getDouble(PhotoCapabilities::PhotoCapabilityDouble::MIN_ZOOM);
+ photo_capabilities->zoom->step =
+ caps.getDouble(PhotoCapabilities::PhotoCapabilityDouble::STEP_ZOOM);
+
+ photo_capabilities->supports_torch =
+ caps.getBool(PhotoCapabilities::PhotoCapabilityBool::SUPPORTS_TORCH);
+ photo_capabilities->torch =
+ caps.getBool(PhotoCapabilities::PhotoCapabilityBool::TORCH);
photo_capabilities->red_eye_reduction =
- caps.getRedEyeReduction() ? mojom::RedEyeReduction::CONTROLLABLE
- : mojom::RedEyeReduction::NEVER;
+ caps.getBool(PhotoCapabilities::PhotoCapabilityBool::RED_EYE_REDUCTION)
+ ? mojom::RedEyeReduction::CONTROLLABLE
+ : mojom::RedEyeReduction::NEVER;
photo_capabilities->height = mojom::Range::New();
- photo_capabilities->height->current = caps.getCurrentHeight();
- photo_capabilities->height->max = caps.getMaxHeight();
- photo_capabilities->height->min = caps.getMinHeight();
- photo_capabilities->height->step = caps.getStepHeight();
+ photo_capabilities->height->current =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::CURRENT_HEIGHT);
+ photo_capabilities->height->max =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::MAX_HEIGHT);
+ photo_capabilities->height->min =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::MIN_HEIGHT);
+ photo_capabilities->height->step =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::STEP_HEIGHT);
photo_capabilities->width = mojom::Range::New();
- photo_capabilities->width->current = caps.getCurrentWidth();
- photo_capabilities->width->max = caps.getMaxWidth();
- photo_capabilities->width->min = caps.getMinWidth();
- photo_capabilities->width->step = caps.getStepWidth();
- const auto fill_light_modes = caps.getFillLightModes();
+ photo_capabilities->width->current =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::CURRENT_WIDTH);
+ photo_capabilities->width->max =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::MAX_WIDTH);
+ photo_capabilities->width->min =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::MIN_WIDTH);
+ photo_capabilities->width->step =
+ caps.getInt(PhotoCapabilities::PhotoCapabilityInt::STEP_WIDTH);
+ const auto fill_light_modes = caps.getFillLightModeArray();
std::vector<mojom::FillLightMode> modes;
for (const auto& fill_light_mode : fill_light_modes)
modes.push_back(ToMojomFillLightMode(fill_light_mode));
@@ -700,13 +736,13 @@ void VideoCaptureDeviceAndroid::DoSetPhotoOptions(
const double width = settings->has_width ? settings->width : 0.0;
const double height = settings->has_height ? settings->height : 0.0;
- std::vector<float> points_of_interest_marshalled;
+ std::vector<double> points_of_interest_marshalled;
for (const auto& point : settings->points_of_interest) {
points_of_interest_marshalled.push_back(point->x);
points_of_interest_marshalled.push_back(point->y);
}
- ScopedJavaLocalRef<jfloatArray> points_of_interest =
- base::android::ToJavaFloatArray(env, points_of_interest_marshalled);
+ ScopedJavaLocalRef<jdoubleArray> points_of_interest =
+ base::android::ToJavaDoubleArray(env, points_of_interest_marshalled);
const double exposure_compensation = settings->has_exposure_compensation
? settings->exposure_compensation
diff --git a/chromium/media/capture/video/chromeos/DEPS b/chromium/media/capture/video/chromeos/DEPS
index f1958045109..156f915bf69 100644
--- a/chromium/media/capture/video/chromeos/DEPS
+++ b/chromium/media/capture/video/chromeos/DEPS
@@ -1,4 +1,5 @@
include_rules = [
"+chromeos/dbus",
+ "+components/chromeos_camera",
"+third_party/libsync",
]
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index d0c61e8c6e8..3beb0dfe8a8 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -90,12 +90,22 @@ void TakePhotoCallbackBundle(VideoCaptureDevice::TakePhotoCallback callback,
} // namespace
+bool IsInputStream(StreamType stream_type) {
+ // Currently there is only one input stream. Modify this method if there is
+ // any other input streams.
+ return stream_type == StreamType::kYUVInput;
+}
+
StreamType StreamIdToStreamType(uint64_t stream_id) {
switch (stream_id) {
case 0:
- return StreamType::kPreview;
+ return StreamType::kPreviewOutput;
case 1:
- return StreamType::kStillCapture;
+ return StreamType::kJpegOutput;
+ case 2:
+ return StreamType::kYUVInput;
+ case 3:
+ return StreamType::kYUVOutput;
default:
return StreamType::kUnknown;
}
@@ -103,10 +113,14 @@ StreamType StreamIdToStreamType(uint64_t stream_id) {
std::string StreamTypeToString(StreamType stream_type) {
switch (stream_type) {
- case StreamType::kPreview:
- return std::string("StreamType::kPreview");
- case StreamType::kStillCapture:
- return std::string("StreamType::kStillCapture");
+ case StreamType::kPreviewOutput:
+ return std::string("StreamType::kPreviewOutput");
+ case StreamType::kJpegOutput:
+ return std::string("StreamType::kJpegOutput");
+ case StreamType::kYUVInput:
+ return std::string("StreamType::kYUVInput");
+ case StreamType::kYUVOutput:
+ return std::string("StreamType::kYUVOutput");
default:
return std::string("Unknown StreamType value: ") +
std::to_string(static_cast<int32_t>(stream_type));
@@ -179,6 +193,8 @@ void CameraDeviceDelegate::StopAndDeAllocate(
base::OnceClosure device_close_callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ reprocess_manager_->FlushReprocessOptions(device_descriptor_.device_id);
+
if (!device_context_ ||
device_context_->GetState() == CameraDeviceContext::State::kStopped ||
(device_context_->GetState() == CameraDeviceContext::State::kError &&
@@ -268,7 +284,7 @@ void CameraDeviceDelegate::SetPhotoOptions(
return;
}
- if (request_manager_->GetNumberOfStreams() < kMaxConfiguredStreams) {
+ if (!request_manager_->HasStreamsConfiguredForTakePhoto()) {
request_manager_->StopPreview(
base::BindOnce(&CameraDeviceDelegate::OnFlushed, GetWeakPtr()));
set_photo_option_callback_ = std::move(callback);
@@ -293,9 +309,9 @@ void CameraDeviceDelegate::TakePhotoImpl() {
auto construct_request_cb =
base::BindOnce(&CameraDeviceDelegate::ConstructDefaultRequestSettings,
- GetWeakPtr(), StreamType::kStillCapture);
+ GetWeakPtr(), StreamType::kJpegOutput);
- if (request_manager_->GetNumberOfStreams() >= kMaxConfiguredStreams) {
+ if (request_manager_->HasStreamsConfiguredForTakePhoto()) {
camera_3a_controller_->Stabilize3AForStillCapture(
std::move(construct_request_cb));
return;
@@ -392,6 +408,9 @@ void CameraDeviceDelegate::OnGotCameraInfo(
SortCameraMetadata(&camera_info->static_camera_characteristics);
static_metadata_ = std::move(camera_info->static_camera_characteristics);
+ reprocess_manager_->UpdateSupportedEffects(device_descriptor_.device_id,
+ static_metadata_);
+
const cros::mojom::CameraMetadataEntryPtr* sensor_orientation =
GetMetadataEntry(
static_metadata_,
@@ -491,7 +510,7 @@ void CameraDeviceDelegate::ConfigureStreams(bool require_photo) {
// Set up context for preview stream.
cros::mojom::Camera3StreamPtr preview_stream =
cros::mojom::Camera3Stream::New();
- preview_stream->id = static_cast<uint64_t>(StreamType::kPreview);
+ preview_stream->id = static_cast<uint64_t>(StreamType::kPreviewOutput);
preview_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
preview_stream->width =
@@ -521,7 +540,7 @@ void CameraDeviceDelegate::ConfigureStreams(bool require_photo) {
cros::mojom::Camera3StreamPtr still_capture_stream =
cros::mojom::Camera3Stream::New();
- still_capture_stream->id = static_cast<uint64_t>(StreamType::kStillCapture);
+ still_capture_stream->id = static_cast<uint64_t>(StreamType::kJpegOutput);
still_capture_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
still_capture_stream->width = max_blob_width;
@@ -532,6 +551,38 @@ void CameraDeviceDelegate::ConfigureStreams(bool require_photo) {
still_capture_stream->rotation =
cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
stream_config->streams.push_back(std::move(still_capture_stream));
+
+ int32_t max_yuv_width = 0, max_yuv_height = 0;
+ if (IsYUVReprocessingSupported(&max_yuv_width, &max_yuv_height)) {
+ auto reprocessing_stream_input = cros::mojom::Camera3Stream::New();
+ reprocessing_stream_input->id =
+ static_cast<uint64_t>(StreamType::kYUVInput);
+ reprocessing_stream_input->stream_type =
+ cros::mojom::Camera3StreamType::CAMERA3_STREAM_INPUT;
+ reprocessing_stream_input->width = max_yuv_width;
+ reprocessing_stream_input->height = max_yuv_height;
+ reprocessing_stream_input->format =
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
+ reprocessing_stream_input->data_space = 0;
+ reprocessing_stream_input->rotation =
+ cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
+
+ auto reprocessing_stream_output = cros::mojom::Camera3Stream::New();
+ reprocessing_stream_output->id =
+ static_cast<uint64_t>(StreamType::kYUVOutput);
+ reprocessing_stream_output->stream_type =
+ cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
+ reprocessing_stream_output->width = max_yuv_width;
+ reprocessing_stream_output->height = max_yuv_height;
+ reprocessing_stream_output->format =
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
+ reprocessing_stream_output->data_space = 0;
+ reprocessing_stream_output->rotation =
+ cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
+
+ stream_config->streams.push_back(std::move(reprocessing_stream_input));
+ stream_config->streams.push_back(std::move(reprocessing_stream_output));
+ }
}
stream_config->operation_mode = cros::mojom::Camera3StreamConfigurationMode::
@@ -562,7 +613,7 @@ void CameraDeviceDelegate::OnConfiguredStreams(
}
if (!updated_config ||
updated_config->streams.size() > kMaxConfiguredStreams ||
- updated_config->streams.size() < 1) {
+ updated_config->streams.size() < kMinConfiguredStreams) {
device_context_->SetErrorState(
media::VideoCaptureError::
kCrosHalV3DeviceDelegateWrongNumberOfStreamsConfigured,
@@ -578,7 +629,76 @@ void CameraDeviceDelegate::OnConfiguredStreams(
device_context_->SetState(CameraDeviceContext::State::kStreamConfigured);
// Kick off the preview stream.
- ConstructDefaultRequestSettings(StreamType::kPreview);
+ ConstructDefaultRequestSettings(StreamType::kPreviewOutput);
+}
+
+bool CameraDeviceDelegate::IsYUVReprocessingSupported(int* max_width,
+ int* max_height) {
+ bool has_yuv_reprocessing_capability = [&] {
+ auto capabilities = GetMetadataEntryAsSpan<uint8_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ auto capability_yuv_reprocessing = static_cast<uint8_t>(
+ cros::mojom::AndroidRequestAvailableCapabilities::
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
+ for (auto capability : capabilities) {
+ if (capability == capability_yuv_reprocessing) {
+ return true;
+ }
+ }
+ return false;
+ }();
+
+ if (!has_yuv_reprocessing_capability) {
+ return false;
+ }
+
+ bool has_yuv_input_blob_output = [&] {
+ auto formats_map = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::
+ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP);
+ // The formats map looks like: [
+ // {INPUT_FORMAT, NUM_OF_OUTPUTS, OUTPUT_FORMAT_1, OUTPUT_FORMAT_2, ...},
+ // {...},
+ // ...
+ // ]
+ auto format_yuv = static_cast<int32_t>(
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888);
+ auto format_blob = static_cast<int32_t>(
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB);
+
+ size_t idx = 0;
+ while (idx < formats_map.size() && !has_yuv_input_blob_output) {
+ auto in_format = formats_map[idx++];
+ auto out_amount = formats_map[idx++];
+ if (in_format != format_yuv) {
+ idx += out_amount;
+ continue;
+ }
+ for (size_t idx_end = idx + out_amount; idx < idx_end; idx++) {
+ auto out_format = formats_map[idx];
+ if (out_format == format_blob) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }();
+
+ if (!has_yuv_input_blob_output) {
+ return false;
+ }
+
+ GetMaxStreamResolution(
+ static_metadata_, cros::mojom::Camera3StreamType::CAMERA3_STREAM_INPUT,
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888, max_width,
+ max_height);
+ if (max_width == 0 || max_height == 0) {
+ return false;
+ }
+
+ return true;
}
void CameraDeviceDelegate::ConstructDefaultRequestSettings(
@@ -588,18 +708,20 @@ void CameraDeviceDelegate::ConstructDefaultRequestSettings(
CameraDeviceContext::State::kStreamConfigured ||
device_context_->GetState() == CameraDeviceContext::State::kCapturing);
- if (stream_type == StreamType::kPreview) {
+ if (stream_type == StreamType::kPreviewOutput) {
device_ops_->ConstructDefaultRequestSettings(
cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW,
base::BindOnce(
&CameraDeviceDelegate::OnConstructedDefaultPreviewRequestSettings,
GetWeakPtr()));
- } else { // stream_type == StreamType::kStillCapture
+ } else if (stream_type == StreamType::kJpegOutput) {
device_ops_->ConstructDefaultRequestSettings(
cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_STILL_CAPTURE,
base::BindOnce(&CameraDeviceDelegate::
OnConstructedDefaultStillCaptureRequestSettings,
GetWeakPtr()));
+ } else {
+ NOTREACHED() << "No default request settings for stream: " << stream_type;
}
}
@@ -638,12 +760,15 @@ void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
while (!take_photo_callbacks_.empty()) {
- request_manager_->TakePhoto(
- settings.Clone(),
+ reprocess_manager_->ConsumeReprocessOptions(
+ device_descriptor_.device_id,
base::BindOnce(
&TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
- camera_3a_controller_->GetWeakPtr())));
+ camera_3a_controller_->GetWeakPtr())),
+ media::BindToCurrentLoop(base::BindOnce(&RequestManager::TakePhoto,
+ request_manager_->GetWeakPtr(),
+ settings.Clone())));
take_photo_callbacks_.pop();
}
}
@@ -695,11 +820,11 @@ bool CameraDeviceDelegate::SetPointsOfInterest(
// the closest allowed value.
// ref: https://www.w3.org/TR/image-capture/#points-of-interest
- float x = base::ClampToRange(points_of_interest[0]->x, 0.0f, 1.0f);
- float y = base::ClampToRange(points_of_interest[0]->y, 0.0f, 1.0f);
+ double x = base::ClampToRange(points_of_interest[0]->x, 0.0, 1.0);
+ double y = base::ClampToRange(points_of_interest[0]->y, 0.0, 1.0);
// Handle rotation, still in normalized square space.
- std::tie(x, y) = [&]() -> std::pair<float, float> {
+ std::tie(x, y) = [&]() -> std::pair<double, double> {
switch (device_context_->GetCameraFrameOrientation()) {
case 0:
return {x, y};
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.h b/chromium/media/capture/video/chromeos/camera_device_delegate.h
index 05fe7f59dc2..fb3a2ae4ed4 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.h
@@ -24,11 +24,16 @@ class ReprocessManager;
class RequestManager;
enum class StreamType : uint64_t {
- kPreview = 0,
- kStillCapture = 1,
+ kPreviewOutput = 0,
+ kJpegOutput = 1,
+ kYUVInput = 2,
+ kYUVOutput = 3,
kUnknown,
};
+// Returns true if the given stream type is an input stream.
+bool IsInputStream(StreamType stream_type);
+
StreamType StreamIdToStreamType(uint64_t stream_id);
std::string StreamTypeToString(StreamType stream_type);
@@ -131,6 +136,14 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
int32_t result,
cros::mojom::Camera3StreamConfigurationPtr updated_config);
+ // Checks metadata in |static_metadata_| to ensure field
+ // request.availableCapabilities contains YUV reprocessing and field
+ // scaler.availableInputOutputFormatsMap contains YUV => BLOB mapping.
+ // If above checks both pass, fill the max yuv width and height in
+ // |max_width| and |max_height| and return true if both width and height are
+ // positive numbers. Return false otherwise.
+ bool IsYUVReprocessingSupported(int* max_width, int* max_height);
+
// ConstructDefaultRequestSettings asks the camera HAL for the default request
// settings of the stream in |stream_context_|.
// OnConstructedDefaultRequestSettings sets the request settings in
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index 427de1a4476..48b29f160ad 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -426,6 +426,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
}
protected:
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
std::unique_ptr<CameraDeviceDelegate> camera_device_delegate_;
@@ -445,7 +446,6 @@ class CameraDeviceDelegateTest : public ::testing::Test {
size_t num_streams_;
private:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
base::Thread hal_delegate_thread_;
std::unique_ptr<base::RunLoop> run_loop_;
DISALLOW_COPY_AND_ASSIGN(CameraDeviceDelegateTest);
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index 68bdbfeda5b..a4f2b17b50a 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -14,6 +14,7 @@
#include "base/bind_helpers.h"
#include "base/posix/safe_strerror.h"
#include "base/process/launch.h"
+#include "base/strings/strcat.h"
#include "base/strings/string_piece.h"
#include "base/strings/string_split.h"
#include "base/system/system_monitor.h"
@@ -99,7 +100,8 @@ CameraHalDelegate::CameraHalDelegate(
num_builtin_cameras_(0),
camera_buffer_factory_(new CameraBufferFactory()),
ipc_task_runner_(std::move(ipc_task_runner)),
- camera_module_callbacks_(this) {
+ camera_module_callbacks_(this),
+ vendor_tag_ops_delegate_(ipc_task_runner_) {
DETACH_FROM_SEQUENCE(sequence_checker_);
}
@@ -248,12 +250,32 @@ void CameraHalDelegate::GetDeviceDescriptors(
desc.facing = VideoFacingMode::MEDIA_VIDEO_FACING_USER;
desc.set_display_name("Front Camera");
break;
- case cros::mojom::CameraFacing::CAMERA_FACING_EXTERNAL:
+ case cros::mojom::CameraFacing::CAMERA_FACING_EXTERNAL: {
desc.facing = VideoFacingMode::MEDIA_VIDEO_FACING_NONE;
- desc.set_display_name("External Camera");
+
+ auto get_vendor_string = [&](const std::string& key) -> const char* {
+ const VendorTagInfo* info =
+ vendor_tag_ops_delegate_.GetInfoByName(key);
+ if (info == nullptr) {
+ return nullptr;
+ }
+ auto val = GetMetadataEntryAsSpan<char>(
+ camera_info->static_camera_characteristics, info->tag);
+ return val.empty() ? nullptr : val.data();
+ };
+
+ auto* name = get_vendor_string("com.google.usb.modelName");
+ desc.set_display_name(name != nullptr ? name : "External Camera");
+
+ auto* vid = get_vendor_string("com.google.usb.vendorId");
+ auto* pid = get_vendor_string("com.google.usb.productId");
+ if (vid != nullptr && pid != nullptr) {
+ desc.model_id = base::StrCat({vid, ":", pid});
+ }
break;
// Mojo validates the input parameters for us so we don't need to worry
// about malformed values.
+ }
}
device_descriptors->push_back(desc);
}
@@ -309,6 +331,7 @@ void CameraHalDelegate::ResetMojoInterfaceOnIpcThread() {
if (camera_module_callbacks_.is_bound()) {
camera_module_callbacks_.Close();
}
+ vendor_tag_ops_delegate_.Reset();
builtin_camera_info_updated_.Reset();
camera_module_has_been_set_.Reset();
has_camera_connected_.Reset();
@@ -365,6 +388,10 @@ void CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras) {
camera_module_->SetCallbacks(
std::move(camera_module_callbacks_ptr),
base::BindOnce(&CameraHalDelegate::OnSetCallbacksOnIpcThread, this));
+
+ camera_module_->GetVendorTagOps(
+ vendor_tag_ops_delegate_.MakeRequest(),
+ base::BindOnce(&CameraHalDelegate::OnGotVendorTagOpsOnIpcThread, this));
}
void CameraHalDelegate::OnSetCallbacksOnIpcThread(int32_t result) {
@@ -390,6 +417,11 @@ void CameraHalDelegate::OnSetCallbacksOnIpcThread(int32_t result) {
}
}
+void CameraHalDelegate::OnGotVendorTagOpsOnIpcThread() {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ vendor_tag_ops_delegate_.Initialize();
+}
+
void CameraHalDelegate::GetCameraInfoOnIpcThread(
int32_t camera_id,
GetCameraInfoCallback callback) {
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.h b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
index bad13be241f..395ff32ba26 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
@@ -17,6 +17,7 @@
#include "base/threading/thread.h"
#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+#include "media/capture/video/chromeos/vendor_tag_ops_delegate.h"
#include "media/capture/video/video_capture_device_factory.h"
#include "media/capture/video_capture_types.h"
#include "mojo/public/cpp/bindings/binding.h"
@@ -47,7 +48,7 @@ class CAPTURE_EXPORT CameraHalDelegate final
void SetCameraModule(cros::mojom::CameraModulePtrInfo camera_module_ptr_info);
- // Resets |camera_module_| and |camera_module_callbacks_|.
+ // Resets various mojo bindings, WaitableEvents, and cached information.
void Reset();
// Delegation methods for the VideoCaptureDeviceFactory interface. These
@@ -94,13 +95,20 @@ class CAPTURE_EXPORT CameraHalDelegate final
// GetDeviceDescriptors.
bool UpdateBuiltInCameraInfo();
void UpdateBuiltInCameraInfoOnIpcThread();
+
// Callback for GetNumberOfCameras Mojo IPC function. GetNumberOfCameras
// returns the number of built-in cameras on the device.
void OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras);
+
// Callback for SetCallbacks Mojo IPC function. SetCallbacks is called after
// GetNumberOfCameras is called for the first time, and before any other calls
// to |camera_module_|.
void OnSetCallbacksOnIpcThread(int32_t result);
+
+ // Callback for GetVendorTagOps Mojo IPC function, which will initialize the
+ // |vendor_tag_ops_delegate_|.
+ void OnGotVendorTagOpsOnIpcThread();
+
void GetCameraInfoOnIpcThread(int32_t camera_id,
GetCameraInfoCallback callback);
void OnGotCameraInfoOnIpcThread(int32_t camera_id,
@@ -163,6 +171,10 @@ class CAPTURE_EXPORT CameraHalDelegate final
// |ipc_task_runner_|.
mojo::Binding<cros::mojom::CameraModuleCallbacks> camera_module_callbacks_;
+ // An internal delegate to handle VendorTagOps mojo connection and query
+ // information of vendor tags. Bound to |ipc_task_runner_|.
+ VendorTagOpsDelegate vendor_tag_ops_delegate_;
+
DISALLOW_COPY_AND_ASSIGN(CameraHalDelegate);
};
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
index 4f1f101aa1a..44dcb1e0b80 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
@@ -12,7 +12,9 @@
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
+#include "base/test/scoped_task_environment.h"
#include "media/capture/video/chromeos/mock_camera_module.h"
+#include "media/capture/video/chromeos/mock_vendor_tag_ops.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -21,14 +23,13 @@
using testing::_;
using testing::A;
using testing::Invoke;
+using testing::Return;
namespace media {
class CameraHalDelegateTest : public ::testing::Test {
public:
- CameraHalDelegateTest()
- : message_loop_(new base::MessageLoop),
- hal_delegate_thread_("HalDelegateThread") {}
+ CameraHalDelegateTest() : hal_delegate_thread_("HalDelegateThread") {}
void SetUp() override {
VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
@@ -51,12 +52,13 @@ class CameraHalDelegateTest : public ::testing::Test {
}
protected:
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
testing::StrictMock<unittest_internal::MockCameraModule> mock_camera_module_;
+ testing::StrictMock<unittest_internal::MockVendorTagOps> mock_vendor_tag_ops_;
unittest_internal::MockGpuMemoryBufferManager mock_gpu_memory_buffer_manager_;
private:
- std::unique_ptr<base::MessageLoop> message_loop_;
base::Thread hal_delegate_thread_;
std::unique_ptr<base::RunLoop> run_loop_;
DISALLOW_COPY_AND_ASSIGN(CameraHalDelegateTest);
@@ -118,6 +120,12 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
std::move(cb).Run(0, std::move(camera_info));
};
+ auto get_vendor_tag_ops_cb =
+ [&](cros::mojom::VendorTagOpsRequest& vendor_tag_ops_request,
+ cros::mojom::CameraModule::GetVendorTagOpsCallback& cb) {
+ mock_vendor_tag_ops_.Bind(std::move(vendor_tag_ops_request));
+ };
+
EXPECT_CALL(mock_camera_module_, DoGetNumberOfCameras(_))
.Times(1)
.WillOnce(Invoke(get_number_of_cameras_cb));
@@ -127,6 +135,12 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
A<cros::mojom::CameraModule::SetCallbacksCallback&>()))
.Times(1);
EXPECT_CALL(mock_camera_module_,
+ DoGetVendorTagOps(
+ A<cros::mojom::VendorTagOpsRequest&>(),
+ A<cros::mojom::CameraModule::GetVendorTagOpsCallback&>()))
+ .Times(1)
+ .WillOnce(Invoke(get_vendor_tag_ops_cb));
+ EXPECT_CALL(mock_camera_module_,
DoGetCameraInfo(
0, A<cros::mojom::CameraModule::GetCameraInfoCallback&>()))
.Times(1)
@@ -137,6 +151,29 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
.Times(1)
.WillOnce(Invoke(get_camera_info_cb));
+ EXPECT_CALL(mock_vendor_tag_ops_, DoGetTagCount())
+ .Times(1)
+ .WillOnce(Return(1));
+
+ const uint32_t kFakeTag = 0x80000000;
+
+ EXPECT_CALL(mock_vendor_tag_ops_, DoGetAllTags())
+ .Times(1)
+ .WillOnce(Return(std::vector<uint32_t>{kFakeTag}));
+
+ EXPECT_CALL(mock_vendor_tag_ops_, DoGetSectionName(kFakeTag))
+ .Times(1)
+ .WillOnce(Return("com.google"));
+
+ EXPECT_CALL(mock_vendor_tag_ops_, DoGetTagName(kFakeTag))
+ .Times(1)
+ .WillOnce(Return("fake.foo.bar"));
+
+ EXPECT_CALL(mock_vendor_tag_ops_, DoGetTagType(kFakeTag))
+ .Times(1)
+ .WillOnce(
+ Return(static_cast<int32_t>(cros::mojom::EntryType::TYPE_BYTE)));
+
VideoCaptureDeviceDescriptors descriptors;
camera_hal_delegate_->GetDeviceDescriptors(&descriptors);
@@ -148,6 +185,9 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
ASSERT_EQ(VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT,
descriptors[1].facing);
+ // TODO(shik): Test external camera. Check the fields |display_name| and
+ // |model_id| are set properly according to the vendor tags.
+
EXPECT_CALL(mock_gpu_memory_buffer_manager_,
CreateGpuMemoryBuffer(_, gfx::BufferFormat::YUV_420_BIPLANAR,
gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
index c85897d2275..2edb18c8906 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
@@ -41,21 +41,6 @@ std::string GenerateRandomToken() {
return base::HexEncode(random_bytes, 16);
}
-// Creates a pipe. Returns true on success, otherwise false.
-// On success, |read_fd| will be set to the fd of the read side, and
-// |write_fd| will be set to the one of write side.
-bool CreatePipe(base::ScopedFD* read_fd, base::ScopedFD* write_fd) {
- int fds[2];
- if (pipe2(fds, O_NONBLOCK | O_CLOEXEC) < 0) {
- PLOG(ERROR) << "pipe2()";
- return false;
- }
-
- read_fd->reset(fds[0]);
- write_fd->reset(fds[1]);
- return true;
-}
-
// Waits until |raw_socket_fd| is readable. We signal |raw_cancel_fd| when we
// want to cancel the blocking wait and stop serving connections on
// |raw_socket_fd|. To notify such a situation, |raw_cancel_fd| is also passed
@@ -123,7 +108,7 @@ bool CameraHalDispatcherImpl::StartThreads() {
}
bool CameraHalDispatcherImpl::Start(
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
+ MojoMjpegDecodeAcceleratorFactoryCB jda_factory,
MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
DCHECK(!IsStarted());
if (!StartThreads()) {
@@ -212,7 +197,7 @@ void CameraHalDispatcherImpl::RegisterClient(
}
void CameraHalDispatcherImpl::GetJpegDecodeAccelerator(
- media::mojom::JpegDecodeAcceleratorRequest jda_request) {
+ media::mojom::MjpegDecodeAcceleratorRequest jda_request) {
jda_factory_.Run(std::move(jda_request));
}
@@ -294,9 +279,9 @@ void CameraHalDispatcherImpl::StartServiceLoop(base::ScopedFD socket_fd,
DCHECK(socket_fd.is_valid());
base::ScopedFD cancel_fd;
- if (!CreatePipe(&cancel_fd, &cancel_pipe_)) {
+ if (!base::CreatePipe(&cancel_fd, &cancel_pipe_, true)) {
+ PLOG(ERROR) << "Failed to create cancel pipe";
started->Signal();
- LOG(ERROR) << "Failed to create cancel pipe";
return;
}
@@ -382,12 +367,10 @@ void CameraHalDispatcherImpl::OnCameraHalServerConnectionError() {
void CameraHalDispatcherImpl::OnCameraHalClientConnectionError(
CameraClientObserver* client_observer) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
- for (auto& it : client_observers_) {
- if (it.get() == client_observer) {
- client_observers_.erase(it);
- VLOG(1) << "Camera HAL client connection lost";
- break;
- }
+ auto it = client_observers_.find(client_observer);
+ if (it != client_observers_.end()) {
+ client_observers_.erase(it);
+ VLOG(1) << "Camera HAL client connection lost";
}
}
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
index 02cd8fabda4..d36c792f218 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
@@ -8,9 +8,11 @@
#include <memory>
#include <set>
+#include "base/containers/unique_ptr_adapters.h"
#include "base/files/scoped_file.h"
#include "base/memory/singleton.h"
#include "base/threading/thread.h"
+#include "components/chromeos_camera/common/jpeg_encode_accelerator.mojom.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/chromeos/mojo/cros_camera_service.mojom.h"
#include "media/capture/video/video_capture_device_factory.h"
@@ -27,6 +29,9 @@ class WaitableEvent;
namespace media {
+using MojoJpegEncodeAcceleratorFactoryCB =
+ base::RepeatingCallback<void(media::mojom::JpegEncodeAcceleratorRequest)>;
+
class CAPTURE_EXPORT CameraClientObserver {
public:
virtual ~CameraClientObserver();
@@ -49,7 +54,7 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
public:
static CameraHalDispatcherImpl* GetInstance();
- bool Start(MojoJpegDecodeAcceleratorFactoryCB jda_factory,
+ bool Start(MojoMjpegDecodeAcceleratorFactoryCB jda_factory,
MojoJpegEncodeAcceleratorFactoryCB jea_factory);
void AddClientObserver(std::unique_ptr<CameraClientObserver> observer);
@@ -60,7 +65,7 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
void RegisterServer(cros::mojom::CameraHalServerPtr server) final;
void RegisterClient(cros::mojom::CameraHalClientPtr client) final;
void GetJpegDecodeAccelerator(
- media::mojom::JpegDecodeAcceleratorRequest jda_request) final;
+ media::mojom::MjpegDecodeAcceleratorRequest jda_request) final;
void GetJpegEncodeAccelerator(
media::mojom::JpegEncodeAcceleratorRequest jea_request) final;
@@ -115,9 +120,10 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
cros::mojom::CameraHalServerPtr camera_hal_server_;
- std::set<std::unique_ptr<CameraClientObserver>> client_observers_;
+ std::set<std::unique_ptr<CameraClientObserver>, base::UniquePtrComparator>
+ client_observers_;
- MojoJpegDecodeAcceleratorFactoryCB jda_factory_;
+ MojoMjpegDecodeAcceleratorFactoryCB jda_factory_;
MojoJpegEncodeAcceleratorFactoryCB jea_factory_;
diff --git a/chromium/media/capture/video/chromeos/cros_image_capture_impl.cc b/chromium/media/capture/video/chromeos/cros_image_capture_impl.cc
index 6574fef2712..1bad8a6eb8f 100644
--- a/chromium/media/capture/video/chromeos/cros_image_capture_impl.cc
+++ b/chromium/media/capture/video/chromeos/cros_image_capture_impl.cc
@@ -4,10 +4,12 @@
#include "media/capture/video/chromeos/cros_image_capture_impl.h"
+#include <string>
#include <utility>
#include <vector>
#include "base/task/post_task.h"
+#include "media/base/bind_to_current_loop.h"
namespace media {
@@ -22,16 +24,20 @@ void CrosImageCaptureImpl::BindRequest(
}
void CrosImageCaptureImpl::GetSupportedEffects(
+ const std::string& device_id,
GetSupportedEffectsCallback callback) {
reprocess_manager_->GetSupportedEffects(
- base::BindOnce(&CrosImageCaptureImpl::OnGetSupportedEffects,
- base::Unretained(this), std::move(callback)));
+ device_id, media::BindToCurrentLoop(base::BindOnce(
+ &CrosImageCaptureImpl::OnGetSupportedEffects,
+ base::Unretained(this), std::move(callback))));
}
void CrosImageCaptureImpl::SetReprocessOption(
+ const std::string& device_id,
cros::mojom::Effect effect,
SetReprocessOptionCallback callback) {
- reprocess_manager_->SetReprocessOption(effect, std::move(callback));
+ reprocess_manager_->SetReprocessOption(
+ device_id, effect, media::BindToCurrentLoop(std::move(callback)));
}
void CrosImageCaptureImpl::OnGetSupportedEffects(
diff --git a/chromium/media/capture/video/chromeos/cros_image_capture_impl.h b/chromium/media/capture/video/chromeos/cros_image_capture_impl.h
index cc9d80c41a7..278a403ffa0 100644
--- a/chromium/media/capture/video/chromeos/cros_image_capture_impl.h
+++ b/chromium/media/capture/video/chromeos/cros_image_capture_impl.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CROS_IMAGE_CAPTURE_IMPL_H_
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CROS_IMAGE_CAPTURE_IMPL_H_
+#include <string>
+
#include "base/containers/flat_set.h"
#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
#include "media/capture/video/chromeos/reprocess_manager.h"
@@ -21,8 +23,10 @@ class CrosImageCaptureImpl : public cros::mojom::CrosImageCapture {
// cros::mojom::CrosImageCapture implementations.
- void GetSupportedEffects(GetSupportedEffectsCallback callback) override;
- void SetReprocessOption(cros::mojom::Effect effect,
+ void GetSupportedEffects(const std::string& device_id,
+ GetSupportedEffectsCallback callback) override;
+ void SetReprocessOption(const std::string& device_id,
+ cros::mojom::Effect effect,
SetReprocessOptionCallback callback) override;
private:
diff --git a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
index acc18fdbe74..27a6a7c7745 100644
--- a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
@@ -71,13 +71,12 @@ class GpuMemoryBufferImplGbm : public gfx::GpuMemoryBuffer {
handle_.type = gfx::NATIVE_PIXMAP;
// Set a dummy id since this is for testing only.
handle_.id = gfx::GpuMemoryBufferId(0);
- for (size_t i = 0; i < gbm_bo_get_num_planes(buffer_object); ++i) {
- handle_.native_pixmap_handle.fds.push_back(
- base::FileDescriptor(gbm_bo_get_plane_fd(buffer_object, i), true));
- handle_.native_pixmap_handle.planes.push_back(
- gfx::NativePixmapPlane(gbm_bo_get_plane_stride(buffer_object, i),
- gbm_bo_get_plane_offset(buffer_object, i),
- gbm_bo_get_plane_size(buffer_object, i)));
+ for (size_t i = 0; i < gbm_bo_get_plane_count(buffer_object); ++i) {
+ handle_.native_pixmap_handle.planes.push_back(gfx::NativePixmapPlane(
+ gbm_bo_get_stride_for_plane(buffer_object, i),
+ gbm_bo_get_offset(buffer_object, i),
+ gbm_bo_get_plane_size(buffer_object, i),
+ base::ScopedFD(gbm_bo_get_plane_fd(buffer_object, i))));
}
}
@@ -86,11 +85,6 @@ class GpuMemoryBufferImplGbm : public gfx::GpuMemoryBuffer {
Unmap();
}
- for (const auto& fd : handle_.native_pixmap_handle.fds) {
- // Close fds.
- DCHECK(fd.auto_close);
- close(fd.fd);
- }
gbm_bo_destroy(buffer_object_);
}
@@ -98,7 +92,7 @@ class GpuMemoryBufferImplGbm : public gfx::GpuMemoryBuffer {
if (mapped_) {
return true;
}
- size_t num_planes = gbm_bo_get_num_planes(buffer_object_);
+ size_t num_planes = gbm_bo_get_plane_count(buffer_object_);
uint32_t stride;
mapped_planes_.resize(num_planes);
for (size_t i = 0; i < num_planes; ++i) {
@@ -151,7 +145,7 @@ class GpuMemoryBufferImplGbm : public gfx::GpuMemoryBuffer {
gfx::BufferFormat GetFormat() const override { return format_; }
int stride(size_t plane) const override {
- return gbm_bo_get_plane_stride(buffer_object_, plane);
+ return gbm_bo_get_stride_for_plane(buffer_object_, plane);
}
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.cc b/chromium/media/capture/video/chromeos/mock_camera_module.cc
index 86a840045dc..9f0d04e1550 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.cc
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.cc
@@ -59,6 +59,13 @@ void MockCameraModule::SetTorchMode(int32_t camera_id,
std::move(callback).Run(0);
}
+void MockCameraModule::GetVendorTagOps(
+ cros::mojom::VendorTagOpsRequest vendor_tag_ops_request,
+ GetVendorTagOpsCallback callback) {
+ DoGetVendorTagOps(vendor_tag_ops_request, callback);
+ std::move(callback).Run();
+}
+
cros::mojom::CameraModulePtrInfo MockCameraModule::GetInterfacePtrInfo() {
base::WaitableEvent done(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.h b/chromium/media/capture/video/chromeos/mock_camera_module.h
index cc482f5232d..2063b4c17f7 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.h
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.h
@@ -57,6 +57,12 @@ class MockCameraModule : public cros::mojom::CameraModule {
bool enabled,
SetTorchModeCallback& callback));
+ void GetVendorTagOps(cros::mojom::VendorTagOpsRequest vendor_tag_ops_request,
+ GetVendorTagOpsCallback callback) override;
+ MOCK_METHOD2(DoGetVendorTagOps,
+ void(cros::mojom::VendorTagOpsRequest& vendor_tag_ops_request,
+ GetVendorTagOpsCallback& callback));
+
cros::mojom::CameraModulePtrInfo GetInterfacePtrInfo();
private:
diff --git a/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.cc b/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.cc
new file mode 100644
index 00000000000..d4799904d81
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.cc
@@ -0,0 +1,67 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/mock_vendor_tag_ops.h"
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/synchronization/waitable_event.h"
+
+namespace media {
+namespace unittest_internal {
+
+MockVendorTagOps::MockVendorTagOps()
+ : mock_vendor_tag_ops_thread_("MockVendorTagOpsThread"), binding_(this) {
+ CHECK(mock_vendor_tag_ops_thread_.Start());
+}
+
+MockVendorTagOps::~MockVendorTagOps() {
+ mock_vendor_tag_ops_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&MockVendorTagOps::CloseBindingOnThread,
+ base::Unretained(this)));
+ mock_vendor_tag_ops_thread_.Stop();
+}
+
+void MockVendorTagOps::Bind(cros::mojom::VendorTagOpsRequest request) {
+ base::WaitableEvent done(base::WaitableEvent::ResetPolicy::MANUAL,
+ base::WaitableEvent::InitialState::NOT_SIGNALED);
+ cros::mojom::CameraModulePtrInfo ptr_info;
+ mock_vendor_tag_ops_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&MockVendorTagOps::BindOnThread, base::Unretained(this),
+ base::Unretained(&done), std::move(request)));
+ done.Wait();
+}
+
+void MockVendorTagOps::GetTagCount(GetTagCountCallback callback) {
+ std::move(callback).Run(DoGetTagCount());
+}
+
+void MockVendorTagOps::GetAllTags(GetAllTagsCallback callback) {
+ std::move(callback).Run(DoGetAllTags());
+}
+
+void MockVendorTagOps::GetSectionName(uint32_t tag,
+ GetSectionNameCallback callback) {
+ std::move(callback).Run(DoGetSectionName(tag));
+}
+
+void MockVendorTagOps::GetTagName(uint32_t tag, GetTagNameCallback callback) {
+ std::move(callback).Run(DoGetTagName(tag));
+}
+
+void MockVendorTagOps::CloseBindingOnThread() {
+ if (binding_.is_bound()) {
+ binding_.Close();
+ }
+}
+
+void MockVendorTagOps::BindOnThread(base::WaitableEvent* done,
+ cros::mojom::VendorTagOpsRequest request) {
+ binding_.Bind(std::move(request));
+ done->Signal();
+}
+
+} // namespace unittest_internal
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h b/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h
new file mode 100644
index 00000000000..b39a9384a8c
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h
@@ -0,0 +1,57 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_MOCK_VENDOR_TAG_OPS_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_MOCK_VENDOR_TAG_OPS_H_
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "base/threading/thread.h"
+#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+#include "mojo/public/cpp/bindings/binding.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace media {
+namespace unittest_internal {
+
+class MockVendorTagOps : public cros::mojom::VendorTagOps {
+ public:
+ MockVendorTagOps();
+ ~MockVendorTagOps();
+
+ void Bind(cros::mojom::VendorTagOpsRequest request);
+
+ MOCK_METHOD0(DoGetTagCount, int32_t());
+ void GetTagCount(GetTagCountCallback callback);
+
+ MOCK_METHOD0(DoGetAllTags, std::vector<uint32_t>());
+ void GetAllTags(GetAllTagsCallback callback);
+
+ MOCK_METHOD1(DoGetSectionName, base::Optional<std::string>(uint32_t tag));
+ void GetSectionName(uint32_t tag, GetSectionNameCallback callback);
+
+ MOCK_METHOD1(DoGetTagName, base::Optional<std::string>(uint32_t tag));
+ void GetTagName(uint32_t tag, GetTagNameCallback callback);
+
+ MOCK_METHOD1(DoGetTagType, int32_t(uint32_t tag));
+ void GetTagType(uint32_t tag, GetTagTypeCallback callback) {
+ std::move(callback).Run(DoGetTagType(tag));
+ }
+
+ private:
+ void CloseBindingOnThread();
+
+ void BindOnThread(base::WaitableEvent* done,
+ cros::mojom::VendorTagOpsRequest request);
+
+ base::Thread mock_vendor_tag_ops_thread_;
+ mojo::Binding<cros::mojom::VendorTagOps> binding_;
+};
+
+} // namespace unittest_internal
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_MOCK_VENDOR_TAG_OPS_H_
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
index 2d90502fcb0..04e26a48fba 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
@@ -90,6 +90,7 @@ void MockVideoCaptureClient::OnIncomingCapturedBuffer(
void MockVideoCaptureClient::OnIncomingCapturedBufferExt(
Buffer buffer,
const VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.h b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
index 88039c7e492..7ab44a083a1 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.h
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
@@ -64,6 +64,7 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
void OnIncomingCapturedBufferExt(
Buffer buffer,
const VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,
diff --git a/chromium/media/capture/video/chromeos/mojo/BUILD.gn b/chromium/media/capture/video/chromeos/mojo/BUILD.gn
index ccd75a95f57..43598d190d8 100644
--- a/chromium/media/capture/video/chromeos/mojo/BUILD.gn
+++ b/chromium/media/capture/video/chromeos/mojo/BUILD.gn
@@ -15,6 +15,7 @@ mojom("cros_camera") {
]
deps = [
+ "//components/chromeos_camera/common",
"//media/capture/mojom:image_capture",
"//media/mojo/interfaces",
]
diff --git a/chromium/media/capture/video/chromeos/mojo/camera_common.mojom b/chromium/media/capture/video/chromeos/mojo/camera_common.mojom
index 5e7f1c4330f..65689d61c0f 100644
--- a/chromium/media/capture/video/chromeos/mojo/camera_common.mojom
+++ b/chromium/media/capture/video/chromeos/mojo/camera_common.mojom
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Next min version: 2
+// Next min version: 3
module cros.mojom;
@@ -57,11 +57,37 @@ interface CameraModuleCallbacks {
TorchModeStatusChange@1(int32 camera_id, TorchModeStatus new_status);
};
+// VendorTagOps is a translation of the vendor_tag_ops_t API
+// (https://goo.gl/3aLWv3). This structure contains basic functions for
+// enumerating an immutable set of vendor-defined camera metadata tags, and
+// querying static information about their structure/type.
+//
+// Next method ID: 5
+// TODO(hywu): evaluate passing an array of vendor tags along with their
+// section names, tag names, and types
+interface VendorTagOps {
+ // Get the number of vendor tags supported on this platform. Return -1 on
+ // error.
+ GetTagCount@0() => (int32 result);
+
+ // Fill an array with all of the supported vendor tags on this platform.
+ GetAllTags@1() => (array<uint32> tag_array);
+
+ // Get the vendor section name for a vendor-specified entry tag.
+ GetSectionName@2(uint32 tag) => (string? name);
+
+ // Get the tag name for a vendor-specified entry tag.
+ GetTagName@3(uint32 tag) => (string? name);
+
+ // Get tag type for a vendor-specified entry tag. Return -1 on error.
+ GetTagType@4(uint32 tag) => (int32 type);
+};
+
// CameraModule is a translation of the camera_module_t API
// (https://goo.gl/8Hf8S8). CameraModule is the interface to interact with a
// camera HAL to query device info and open camera devices.
//
-// Next method ID: 6
+// Next method ID: 7
interface CameraModule {
// Opens the camera device specified by |camera_id|. On success, the camera
// device is accessible through the |device_ops| returned.
@@ -87,4 +113,10 @@ interface CameraModule {
// one-time operations.
[MinVersion=1]
Init@5() => (int32 result);
+
+ // Get methods to query for vendor extension metadata tag information. The HAL
+ // should fill in all the vendor tag operation methods, or leave ops unchanged
+ // if no vendor tags are defined.
+ [MinVersion=2]
+ GetVendorTagOps@6(VendorTagOps& vendor_tag_ops_request) => ();
};
diff --git a/chromium/media/capture/video/chromeos/mojo/cros_camera_service.mojom b/chromium/media/capture/video/chromeos/mojo/cros_camera_service.mojom
index 0d8b4ff1e72..db693175159 100644
--- a/chromium/media/capture/video/chromeos/mojo/cros_camera_service.mojom
+++ b/chromium/media/capture/video/chromeos/mojo/cros_camera_service.mojom
@@ -6,9 +6,9 @@
module cros.mojom;
+import "components/chromeos_camera/common/jpeg_encode_accelerator.mojom";
import "media/capture/video/chromeos/mojo/camera_common.mojom";
-import "media/mojo/interfaces/jpeg_decode_accelerator.mojom";
-import "media/mojo/interfaces/jpeg_encode_accelerator.mojom";
+import "media/mojo/interfaces/mjpeg_decode_accelerator.mojom";
// The CrOS camera HAL v3 Mojo dispatcher. The dispatcher acts as a proxy and
// waits for the server and the clients to register. There can only be one
@@ -29,7 +29,7 @@ interface CameraHalDispatcher {
// Get JpegDecodeAccelerator from dispatcher.
[MinVersion=1] GetJpegDecodeAccelerator@2(
- media.mojom.JpegDecodeAccelerator& jda_request);
+ media.mojom.MjpegDecodeAccelerator& jda_request);
// Get JpegEncodeAccelerator from dispatcher.
[MinVersion=2] GetJpegEncodeAccelerator@3(
diff --git a/chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom b/chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom
index 88b32ef9eb2..53a47fa4d27 100644
--- a/chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom
+++ b/chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom
@@ -8,21 +8,26 @@ import "media/capture/mojom/image_capture.mojom";
// Effect that recognized by Chrome OS.
enum Effect {
- PORTRAIT_MODE = 0,
- NO_EFFECT
+ NO_EFFECT = 0,
+ PORTRAIT_MODE = 1,
};
// Interface for Chrome OS specific Image Capture API which supports reprocess
-// mechanism.
+// mechanism. The |source_id| parameter in following methods might not be the
+// actual device id if it is called by renderer. It needs to be
+// translated to the actual video device id to be used in CrosImageCapture
+// implementation.
interface CrosImageCapture {
- // Gets supported effects for current active device.
- GetSupportedEffects() => (array<Effect> supported_effects);
+ // Gets supported effects that recognized by CrOS for device. The |source_id|
+ // might need translation to be actual video device id.
+ GetSupportedEffects(string source_id) => (array<Effect> supported_effects);
// Sets reprocess option to bind with the coming take photo request. When this
// method is called, the reprocess option will be queued. All reprocess
// options in the queue will be consumed when TakePhoto() method in Image
// Capture API is triggered and all the queued reprocess options will be bound
- // to that take photo request.
- SetReprocessOption(Effect effect)
+ // to that take photo request. The |source_id| might need translation to be
+ // actual video device id.
+ SetReprocessOption(string source_id, Effect effect)
=> (int32 status, media.mojom.Blob blob);
}; \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.cc b/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.cc
new file mode 100644
index 00000000000..184c47ed45b
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.cc
@@ -0,0 +1,63 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/renderer_facing_cros_image_capture.h"
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "base/task/post_task.h"
+#include "media/base/bind_to_current_loop.h"
+
+namespace media {
+
+RendererFacingCrosImageCapture::RendererFacingCrosImageCapture(
+ cros::mojom::CrosImageCapturePtr api_ptr,
+ DeviceIdMappingCallback mapping_callback)
+ : cros_image_capture_(std::move(api_ptr)),
+ mapping_callback_(std::move(mapping_callback)),
+ weak_ptr_factory_(this) {}
+
+RendererFacingCrosImageCapture::~RendererFacingCrosImageCapture() = default;
+
+void RendererFacingCrosImageCapture::GetSupportedEffectsWithRealId(
+ GetSupportedEffectsCallback callback,
+ const base::Optional<std::string>& device_id) {
+ DCHECK(device_id.has_value());
+ cros_image_capture_->GetSupportedEffects(*device_id, std::move(callback));
+}
+
+void RendererFacingCrosImageCapture::SetReprocessOptionWithRealId(
+ cros::mojom::Effect effect,
+ SetReprocessOptionCallback callback,
+ const base::Optional<std::string>& device_id) {
+ DCHECK(device_id.has_value());
+ cros_image_capture_->SetReprocessOption(*device_id, effect,
+ std::move(callback));
+}
+
+void RendererFacingCrosImageCapture::GetSupportedEffects(
+ const std::string& source_id,
+ GetSupportedEffectsCallback callback) {
+ mapping_callback_.Run(
+ source_id,
+ media::BindToCurrentLoop(base::BindOnce(
+ &RendererFacingCrosImageCapture::GetSupportedEffectsWithRealId,
+ weak_ptr_factory_.GetWeakPtr(), std::move(callback))));
+}
+
+void RendererFacingCrosImageCapture::SetReprocessOption(
+ const std::string& source_id,
+ cros::mojom::Effect effect,
+ SetReprocessOptionCallback callback) {
+ mapping_callback_.Run(
+ source_id,
+ media::BindToCurrentLoop(base::BindOnce(
+ &RendererFacingCrosImageCapture::SetReprocessOptionWithRealId,
+ weak_ptr_factory_.GetWeakPtr(), effect, std::move(callback))));
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.h b/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.h
new file mode 100644
index 00000000000..5eac4f9a1c6
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.h
@@ -0,0 +1,64 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_RENDERER_FACING_CROS_IMAGE_CAPTURE_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_RENDERER_FACING_CROS_IMAGE_CAPTURE_H_
+
+#include <string>
+
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
+#include "mojo/public/cpp/bindings/binding_set.h"
+
+namespace media {
+
+// Intermediate layer for communicating from renderer to CrosImageCapture
+// implementation. It will map the source id recognized by renderer to the
+// actual video device id.
+class CAPTURE_EXPORT RendererFacingCrosImageCapture
+ : public cros::mojom::CrosImageCapture {
+ public:
+ using WithRealIdCallback =
+ base::OnceCallback<void(const base::Optional<std::string>&)>;
+ using DeviceIdMappingCallback =
+ base::RepeatingCallback<void(const std::string&, WithRealIdCallback)>;
+
+ // Create an intermediate layer between renderer to the actual
+ // CrosImageCapture implementation. This class should use |api_ptr| to
+ // communicate with the actual CrosImageCapture implementation and use
+ // |mapping_callback| to map the device id for every calls that inputs device
+ // id.
+ RendererFacingCrosImageCapture(cros::mojom::CrosImageCapturePtr api_ptr,
+ DeviceIdMappingCallback mapping_callback);
+ ~RendererFacingCrosImageCapture() override;
+
+ void GetSupportedEffectsWithRealId(
+ GetSupportedEffectsCallback callback,
+ const base::Optional<std::string>& device_id);
+
+ void SetReprocessOptionWithRealId(
+ cros::mojom::Effect effect,
+ SetReprocessOptionCallback callback,
+ const base::Optional<std::string>& device_id);
+
+ // cros::mojom::CrosImageCapture implementations.
+ void GetSupportedEffects(const std::string& source_id,
+ GetSupportedEffectsCallback callback) override;
+ void SetReprocessOption(const std::string& source_id,
+ cros::mojom::Effect effect,
+ SetReprocessOptionCallback callback) override;
+
+ private:
+ cros::mojom::CrosImageCapturePtr cros_image_capture_;
+
+ DeviceIdMappingCallback mapping_callback_;
+
+ base::WeakPtrFactory<RendererFacingCrosImageCapture> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(RendererFacingCrosImageCapture);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_RENDERER_FACING_CROS_IMAGE_CAPTURE_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/reprocess_manager.cc b/chromium/media/capture/video/chromeos/reprocess_manager.cc
index d226b28fdb2..64a7b2607a6 100644
--- a/chromium/media/capture/video/chromeos/reprocess_manager.cc
+++ b/chromium/media/capture/video/chromeos/reprocess_manager.cc
@@ -2,11 +2,12 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include "media/capture/video/chromeos/reprocess_manager.h"
+
#include <functional>
#include <utility>
#include "media/capture/video/chromeos/camera_metadata_utils.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
namespace media {
@@ -32,7 +33,7 @@ ReprocessTask::~ReprocessTask() = default;
// static
int ReprocessManager::GetReprocessReturnCode(
cros::mojom::Effect effect,
- cros::mojom::CameraMetadataPtr* metadata) {
+ const cros::mojom::CameraMetadataPtr* metadata) {
if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
auto* portrait_mode_segmentation_result = GetMetadataEntry(
*metadata, static_cast<cros::mojom::CameraMetadataTag>(
@@ -53,6 +54,7 @@ ReprocessManager::~ReprocessManager() {
}
void ReprocessManager::SetReprocessOption(
+ const std::string& device_id,
cros::mojom::Effect effect,
cros::mojom::CrosImageCapture::SetReprocessOptionCallback
reprocess_result_callback) {
@@ -60,45 +62,48 @@ void ReprocessManager::SetReprocessOption(
FROM_HERE,
base::BindOnce(
&ReprocessManager::ReprocessManagerImpl::SetReprocessOption,
- base::Unretained(impl.get()), effect,
+ base::Unretained(impl.get()), device_id, effect,
std::move(reprocess_result_callback)));
}
void ReprocessManager::ConsumeReprocessOptions(
+ const std::string& device_id,
media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback) {
sequenced_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&ReprocessManager::ReprocessManagerImpl::ConsumeReprocessOptions,
- base::Unretained(impl.get()), std::move(take_photo_callback),
- std::move(consumption_callback)));
+ base::Unretained(impl.get()), device_id,
+ std::move(take_photo_callback), std::move(consumption_callback)));
}
-void ReprocessManager::FlushReprocessOptions() {
+void ReprocessManager::FlushReprocessOptions(const std::string& device_id) {
sequenced_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&ReprocessManager::ReprocessManagerImpl::FlushReprocessOptions,
- base::Unretained(impl.get())));
+ base::Unretained(impl.get()), device_id));
}
void ReprocessManager::GetSupportedEffects(
+ const std::string& device_id,
GetSupportedEffectsCallback callback) {
sequenced_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&ReprocessManager::ReprocessManagerImpl::GetSupportedEffects,
- base::Unretained(impl.get()), std::move(callback)));
+ base::Unretained(impl.get()), device_id, std::move(callback)));
}
void ReprocessManager::UpdateSupportedEffects(
+ const std::string& device_id,
const cros::mojom::CameraMetadataPtr& metadata) {
sequenced_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&ReprocessManager::ReprocessManagerImpl::UpdateSupportedEffects,
- base::Unretained(impl.get()), std::cref(metadata)));
+ base::Unretained(impl.get()), device_id, metadata.Clone()));
}
ReprocessManager::ReprocessManagerImpl::ReprocessManagerImpl() {}
@@ -106,6 +111,7 @@ ReprocessManager::ReprocessManagerImpl::ReprocessManagerImpl() {}
ReprocessManager::ReprocessManagerImpl::~ReprocessManagerImpl() = default;
void ReprocessManager::ReprocessManagerImpl::SetReprocessOption(
+ const std::string& device_id,
cros::mojom::Effect effect,
cros::mojom::CrosImageCapture::SetReprocessOptionCallback
reprocess_result_callback) {
@@ -126,10 +132,11 @@ void ReprocessManager::ReprocessManagerImpl::SetReprocessOption(
task.extra_metadata.push_back(std::move(e));
}
- reprocess_task_queue_.push(std::move(task));
+ reprocess_task_queue_map_[device_id].push(std::move(task));
}
void ReprocessManager::ReprocessManagerImpl::ConsumeReprocessOptions(
+ const std::string& device_id,
media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback) {
ReprocessTaskQueue result_task_queue;
@@ -140,7 +147,7 @@ void ReprocessManager::ReprocessManagerImpl::ConsumeReprocessOptions(
base::BindOnce(&OnStillCaptureDone, std::move(take_photo_callback));
result_task_queue.push(std::move(still_capture_task));
- auto& task_queue = reprocess_task_queue_;
+ auto& task_queue = reprocess_task_queue_map_[device_id];
while (!task_queue.empty()) {
result_task_queue.push(std::move(task_queue.front()));
task_queue.pop();
@@ -148,26 +155,31 @@ void ReprocessManager::ReprocessManagerImpl::ConsumeReprocessOptions(
std::move(consumption_callback).Run(std::move(result_task_queue));
}
-void ReprocessManager::ReprocessManagerImpl::FlushReprocessOptions() {
+void ReprocessManager::ReprocessManagerImpl::FlushReprocessOptions(
+ const std::string& device_id) {
auto empty_queue = ReprocessTaskQueue();
- reprocess_task_queue_.swap(empty_queue);
+ reprocess_task_queue_map_[device_id].swap(empty_queue);
}
void ReprocessManager::ReprocessManagerImpl::GetSupportedEffects(
+ const std::string& device_id,
GetSupportedEffectsCallback callback) {
std::move(callback).Run(
- base::flat_set<cros::mojom::Effect>(supported_effects_));
+ base::flat_set<cros::mojom::Effect>(supported_effects_map_[device_id]));
}
void ReprocessManager::ReprocessManagerImpl::UpdateSupportedEffects(
- const cros::mojom::CameraMetadataPtr& metadata) {
+ const std::string& device_id,
+ const cros::mojom::CameraMetadataPtr metadata) {
const cros::mojom::CameraMetadataEntryPtr* portrait_mode =
media::GetMetadataEntry(
metadata,
static_cast<cros::mojom::CameraMetadataTag>(kPortraitModeVendorKey));
- supported_effects_.clear();
+ supported_effects_map_[device_id].clear();
+ supported_effects_map_[device_id].insert(cros::mojom::Effect::NO_EFFECT);
if (portrait_mode) {
- supported_effects_.insert(cros::mojom::Effect::PORTRAIT_MODE);
+ supported_effects_map_[device_id].insert(
+ cros::mojom::Effect::PORTRAIT_MODE);
}
}
diff --git a/chromium/media/capture/video/chromeos/reprocess_manager.h b/chromium/media/capture/video/chromeos/reprocess_manager.h
index 7150819251d..a9e2050b2ba 100644
--- a/chromium/media/capture/video/chromeos/reprocess_manager.h
+++ b/chromium/media/capture/video/chromeos/reprocess_manager.h
@@ -7,6 +7,7 @@
#include <queue>
#include <set>
+#include <string>
#include <vector>
#include "base/containers/flat_set.h"
@@ -32,6 +33,7 @@ struct ReprocessTask {
using ReprocessTaskQueue = base::queue<ReprocessTask>;
+// TODO(shik): Get the keys from VendorTagOps by names instead (b/130774415).
constexpr uint32_t kPortraitModeVendorKey = 0x80000000;
constexpr uint32_t kPortraitModeSegmentationResultVendorKey = 0x80000001;
constexpr int32_t kReprocessSuccess = 0;
@@ -53,35 +55,43 @@ class CAPTURE_EXPORT ReprocessManager {
~ReprocessManagerImpl();
void SetReprocessOption(
+ const std::string& device_id,
cros::mojom::Effect effect,
cros::mojom::CrosImageCapture::SetReprocessOptionCallback
reprocess_result_callback);
void ConsumeReprocessOptions(
+ const std::string& device_id,
media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback);
- void FlushReprocessOptions();
+ void FlushReprocessOptions(const std::string& device_id);
- void GetSupportedEffects(GetSupportedEffectsCallback callback);
+ void GetSupportedEffects(const std::string& device_id,
+ GetSupportedEffectsCallback callback);
- void UpdateSupportedEffects(const cros::mojom::CameraMetadataPtr& metadata);
+ void UpdateSupportedEffects(const std::string& device_id,
+ const cros::mojom::CameraMetadataPtr metadata);
private:
- base::queue<ReprocessTask> reprocess_task_queue_;
- base::flat_set<cros::mojom::Effect> supported_effects_;
+ base::flat_map<std::string, base::queue<ReprocessTask>>
+ reprocess_task_queue_map_;
+ base::flat_map<std::string, base::flat_set<cros::mojom::Effect>>
+ supported_effects_map_;
DISALLOW_COPY_AND_ASSIGN(ReprocessManagerImpl);
};
- static int GetReprocessReturnCode(cros::mojom::Effect effect,
- cros::mojom::CameraMetadataPtr* metadata);
+ static int GetReprocessReturnCode(
+ cros::mojom::Effect effect,
+ const cros::mojom::CameraMetadataPtr* metadata);
ReprocessManager();
~ReprocessManager();
- // Sets the reprocess option for given effect. Each reprocess
+ // Sets the reprocess option for given device id and effect. Each reprocess
// option has a corressponding callback.
void SetReprocessOption(
+ const std::string& device_id,
cros::mojom::Effect effect,
cros::mojom::CrosImageCapture::SetReprocessOptionCallback
reprocess_result_callback);
@@ -89,18 +99,21 @@ class CAPTURE_EXPORT ReprocessManager {
// Consumes all ReprocessTasks in the queue. A default NO_EFFECT task will be
// added on the top of the result queue.
void ConsumeReprocessOptions(
+ const std::string& device_id,
media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback);
- // Clears all remaining ReprocessTasks in the queue.
- void FlushReprocessOptions();
+ // Clears all remaining ReprocessTasks in the queue for given device id.
+ void FlushReprocessOptions(const std::string& device_id);
// Gets supported effects for current active device.
- void GetSupportedEffects(GetSupportedEffectsCallback callback);
+ void GetSupportedEffects(const std::string& device_id,
+ GetSupportedEffectsCallback callback);
- // Updates supported effects for given active device. This method should be
- // triggered whenever the camera characteristics is updated.
- void UpdateSupportedEffects(const cros::mojom::CameraMetadataPtr& metadata);
+ // Updates supported effects for given device. This method should be triggered
+ // whenever the camera characteristics is updated.
+ void UpdateSupportedEffects(const std::string& device_id,
+ const cros::mojom::CameraMetadataPtr& metadata);
private:
scoped_refptr<base::SequencedTaskRunner> sequenced_task_runner_;
diff --git a/chromium/media/capture/video/chromeos/request_builder.cc b/chromium/media/capture/video/chromeos/request_builder.cc
index b55a93a92bc..de7de42e984 100644
--- a/chromium/media/capture/video/chromeos/request_builder.cc
+++ b/chromium/media/capture/video/chromeos/request_builder.cc
@@ -15,17 +15,24 @@ namespace media {
RequestBuilder::RequestBuilder(CameraDeviceContext* device_context,
RequestBufferCallback request_buffer_callback)
: device_context_(device_context),
+ frame_number_(0),
request_buffer_callback_(std::move(request_buffer_callback)) {}
RequestBuilder::~RequestBuilder() = default;
cros::mojom::Camera3CaptureRequestPtr RequestBuilder::BuildRequest(
std::set<StreamType> stream_types,
- cros::mojom::CameraMetadataPtr settings) {
+ cros::mojom::CameraMetadataPtr settings,
+ base::Optional<uint64_t> input_buffer_id) {
auto capture_request = cros::mojom::Camera3CaptureRequest::New();
for (StreamType stream_type : stream_types) {
- base::Optional<BufferInfo> buffer_info =
- request_buffer_callback_.Run(stream_type);
+ base::Optional<BufferInfo> buffer_info;
+ if (IsInputStream(stream_type)) {
+ DCHECK(input_buffer_id.has_value());
+ buffer_info = request_buffer_callback_.Run(stream_type, input_buffer_id);
+ } else {
+ buffer_info = request_buffer_callback_.Run(stream_type, {});
+ }
if (!buffer_info) {
return capture_request;
}
@@ -33,10 +40,15 @@ cros::mojom::Camera3CaptureRequestPtr RequestBuilder::BuildRequest(
auto buffer_handle = CreateCameraBufferHandle(stream_type, *buffer_info);
auto stream_buffer =
CreateStreamBuffer(stream_type, buffer_id, std::move(buffer_handle));
- capture_request->output_buffers.push_back(std::move(stream_buffer));
+ if (IsInputStream(stream_type)) {
+ capture_request->input_buffer = std::move(stream_buffer);
+ } else {
+ capture_request->output_buffers.push_back(std::move(stream_buffer));
+ }
}
-
capture_request->settings = std::move(settings);
+ capture_request->frame_number = frame_number_++;
+
return capture_request;
}
@@ -55,16 +67,10 @@ cros::mojom::CameraBufferHandlePtr RequestBuilder::CreateCameraBufferHandle(
buffer_info.gpu_memory_buffer->CloneHandle().native_pixmap_handle;
size_t num_planes = native_pixmap_handle.planes.size();
- DCHECK_EQ(num_planes, native_pixmap_handle.fds.size());
- // Take ownership of fds.
- std::vector<base::ScopedFD> fds(num_planes);
- for (size_t i = 0; i < num_planes; ++i)
- fds[i] = base::ScopedFD(native_pixmap_handle.fds[i].fd);
-
std::vector<StreamCaptureInterface::Plane> planes(num_planes);
for (size_t i = 0; i < num_planes; ++i) {
- mojo::ScopedHandle mojo_fd =
- mojo::WrapPlatformHandle(mojo::PlatformHandle(std::move(fds[i])));
+ mojo::ScopedHandle mojo_fd = mojo::WrapPlatformHandle(
+ mojo::PlatformHandle(std::move(native_pixmap_handle.planes[i].fd)));
if (!mojo_fd.is_valid()) {
device_context_->SetErrorState(
media::VideoCaptureError::
diff --git a/chromium/media/capture/video/chromeos/request_builder.h b/chromium/media/capture/video/chromeos/request_builder.h
index 6dbf9010a20..b67d57063e3 100644
--- a/chromium/media/capture/video/chromeos/request_builder.h
+++ b/chromium/media/capture/video/chromeos/request_builder.h
@@ -32,8 +32,8 @@ struct BufferInfo {
// HAL process.
class CAPTURE_EXPORT RequestBuilder {
public:
- using RequestBufferCallback =
- base::RepeatingCallback<base::Optional<BufferInfo>(StreamType)>;
+ using RequestBufferCallback = base::RepeatingCallback<
+ base::Optional<BufferInfo>(StreamType, base::Optional<uint64_t>)>;
RequestBuilder(CameraDeviceContext* device_context,
// Callback to request buffer from StreamBufferManager. Having
@@ -42,10 +42,12 @@ class CAPTURE_EXPORT RequestBuilder {
RequestBufferCallback request_buffer_callback);
~RequestBuilder();
- // Builds a capture request by given streams and settings.
+ // Builds a capture request by given streams and settings. The
+ // |input_buffer_id| is used for reprocess request.
cros::mojom::Camera3CaptureRequestPtr BuildRequest(
std::set<StreamType> stream_types,
- cros::mojom::CameraMetadataPtr settings);
+ cros::mojom::CameraMetadataPtr settings,
+ base::Optional<uint64_t> input_buffer_id);
private:
cros::mojom::CameraBufferHandlePtr CreateCameraBufferHandle(
@@ -59,6 +61,9 @@ class CAPTURE_EXPORT RequestBuilder {
CameraDeviceContext* device_context_;
+ // The frame number. Increased by one for each capture request sent.
+ uint32_t frame_number_;
+
RequestBufferCallback request_buffer_callback_;
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/request_manager.cc b/chromium/media/capture/video/chromeos/request_manager.cc
index 3a165f79822..3f3331b0b34 100644
--- a/chromium/media/capture/video/chromeos/request_manager.cc
+++ b/chromium/media/capture/video/chromeos/request_manager.cc
@@ -5,6 +5,7 @@
#include "media/capture/video/chromeos/request_manager.h"
#include <sync/sync.h>
+#include <initializer_list>
#include <map>
#include <set>
#include <string>
@@ -17,6 +18,7 @@
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
+#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
#include "mojo/public/cpp/platform/platform_handle.h"
#include "mojo/public/cpp/system/platform_handle.h"
@@ -25,8 +27,15 @@ namespace media {
namespace {
constexpr uint32_t kUndefinedFrameNumber = 0xFFFFFFFF;
+
+constexpr std::initializer_list<StreamType> kYUVReprocessStreams = {
+ StreamType::kYUVInput, StreamType::kJpegOutput};
} // namespace
+ReprocessTasksInfo::ReprocessTasksInfo() = default;
+
+ReprocessTasksInfo::~ReprocessTasksInfo() = default;
+
RequestManager::RequestManager(
cros::mojom::Camera3CallbackOpsRequest callback_ops_request,
std::unique_ptr<StreamCaptureInterface> capture_interface,
@@ -43,7 +52,6 @@ RequestManager::RequestManager(
blobify_callback_(std::move(blobify_callback)),
ipc_task_runner_(std::move(ipc_task_runner)),
capturing_(false),
- frame_number_(0),
partial_result_count_(1),
first_frame_shutter_time_(base::TimeTicks()),
weak_ptr_factory_(this) {
@@ -94,6 +102,17 @@ cros::mojom::Camera3StreamPtr RequestManager::GetStreamConfiguration(
return stream_buffer_manager_->GetStreamConfiguration(stream_type);
}
+bool RequestManager::HasStreamsConfiguredForTakePhoto() {
+ if (stream_buffer_manager_->IsReprocessSupported()) {
+ return stream_buffer_manager_->HasStreamsConfigured(
+ {StreamType::kPreviewOutput, StreamType::kJpegOutput,
+ StreamType::kYUVInput, StreamType::kYUVOutput});
+ } else {
+ return stream_buffer_manager_->HasStreamsConfigured(
+ {StreamType::kPreviewOutput, StreamType::kJpegOutput});
+ }
+}
+
void RequestManager::StartPreview(
cros::mojom::CameraMetadataPtr preview_settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -116,9 +135,24 @@ void RequestManager::StopPreview(base::OnceCallback<void(int32_t)> callback) {
}
void RequestManager::TakePhoto(cros::mojom::CameraMetadataPtr settings,
- VideoCaptureDevice::TakePhotoCallback callback) {
+ ReprocessTaskQueue reprocess_tasks) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ if (stream_buffer_manager_->IsReprocessSupported()) {
+ pending_reprocess_tasks_queue_.push(std::move(reprocess_tasks));
+ } else {
+ // There should be only one reprocess task in the queue which is format
+ // conversion task.
+ DCHECK_EQ(reprocess_tasks.size(), 1lu);
+
+ take_photo_callback_queue_.push(
+ std::move(reprocess_tasks.front().callback));
+ }
+ take_photo_settings_queue_.push(std::move(settings));
+}
+
+void RequestManager::SetJpegOrientation(
+ cros::mojom::CameraMetadataPtr* settings) {
std::vector<uint8_t> frame_orientation(sizeof(int32_t));
*reinterpret_cast<int32_t*>(frame_orientation.data()) =
base::checked_cast<int32_t>(device_context_->GetCameraFrameOrientation());
@@ -128,10 +162,7 @@ void RequestManager::TakePhoto(cros::mojom::CameraMetadataPtr settings,
e->type = cros::mojom::EntryType::TYPE_INT32;
e->count = 1;
e->data = std::move(frame_orientation);
- AddOrUpdateMetadataEntry(&settings, std::move(e));
-
- oneshot_request_settings_.push(std::move(settings));
- take_photo_callback_queue_.push(std::move(callback));
+ AddOrUpdateMetadataEntry(settings, std::move(e));
}
void RequestManager::PrepareCaptureRequest() {
@@ -140,57 +171,165 @@ void RequestManager::PrepareCaptureRequest() {
if (!capturing_) {
return;
}
+
+ // There are two types of devices, each has several possible combinations of
+ // streams.
+ //
+ // For device with reprocess capability:
+ // 1. Preview
+ // 2. Capture (YuvOutput)
+ // 3. Preview + Capture (YuvOutput)
+ // 4. Reprocess (YuvInput + BlobOutput)
+ //
+ // For device without reprocess capability:
+ // 1. Preview
+ // 2. Capture (BlobOutput)
+ // 3. Preview + Capture (BlobOutput)
std::set<StreamType> stream_types;
cros::mojom::CameraMetadataPtr settings;
-
- // Reqular request should always have repeating request if the preview is
- // on.
- stream_types.insert(StreamType::kPreview);
- if (!stream_buffer_manager_->HasFreeBuffers(stream_types)) {
- return;
- }
- bool has_still_capture_streams = false;
- if (!oneshot_request_settings_.empty() &&
- stream_buffer_manager_->HasFreeBuffers({StreamType::kStillCapture})) {
- stream_types.insert(StreamType::kStillCapture);
- settings = std::move(oneshot_request_settings_.front());
- oneshot_request_settings_.pop();
- has_still_capture_streams = true;
- } else {
- settings = repeating_request_settings_.Clone();
- }
-
- auto capture_request = request_builder_->BuildRequest(std::move(stream_types),
- std::move(settings));
- if (has_still_capture_streams) {
- SendCaptureRequest(std::move(capture_request),
- std::move(take_photo_callback_queue_.front()));
- take_photo_callback_queue_.pop();
- } else {
- SendCaptureRequest(std::move(capture_request), base::DoNothing());
+ TakePhotoCallback callback = base::DoNothing();
+ base::Optional<uint64_t> input_buffer_id;
+ cros::mojom::Effect reprocess_effect = cros::mojom::Effect::NO_EFFECT;
+
+ bool is_reprocess_request = false;
+ bool is_preview_request = false;
+ bool is_oneshot_request = false;
+
+ // First, check if there are pending reprocess tasks.
+ is_reprocess_request = TryPrepareReprocessRequest(
+ &stream_types, &settings, &callback, &input_buffer_id, &reprocess_effect);
+
+ // If there is no pending reprocess task, then check if there are pending
+ // one-shot requests. And also try to put preview in the request.
+ if (!is_reprocess_request) {
+ is_preview_request = TryPreparePreviewRequest(&stream_types, &settings);
+
+ // Order matters here. If the preview request and oneshot request are both
+ // added in single capture request, the settings will be overridden by the
+ // later.
+ is_oneshot_request =
+ TryPrepareOneShotRequest(&stream_types, &settings, &callback);
}
-}
-void RequestManager::SendCaptureRequest(
- cros::mojom::Camera3CaptureRequestPtr capture_request,
- VideoCaptureDevice::TakePhotoCallback take_photo_callback) {
- DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- if (!capturing_) {
+ if (!is_reprocess_request && !is_oneshot_request && !is_preview_request) {
return;
}
- CaptureResult& pending_result = pending_results_[frame_number_];
- pending_result.still_capture_callback = std::move(take_photo_callback);
+ auto capture_request = request_builder_->BuildRequest(
+ std::move(stream_types), std::move(settings), input_buffer_id);
+
+ CaptureResult& pending_result =
+ pending_results_[capture_request->frame_number];
pending_result.unsubmitted_buffer_count =
capture_request->output_buffers.size();
+ pending_result.input_buffer_id = input_buffer_id;
+ pending_result.reprocess_effect = reprocess_effect;
+ pending_result.still_capture_callback = std::move(callback);
+
+ // For reprocess supported devices, bind the ReprocessTaskQueue with this
+ // frame number. Once the shot result is returned, we will rebind the
+ // ReprocessTaskQueue with the id of YUV buffer which contains the result.
+ if (is_oneshot_request && stream_buffer_manager_->IsReprocessSupported() &&
+ !pending_reprocess_tasks_queue_.empty()) {
+ frame_number_reprocess_tasks_map_[capture_request->frame_number] =
+ std::move(pending_reprocess_tasks_queue_.front());
+ pending_reprocess_tasks_queue_.pop();
+ }
UpdateCaptureSettings(&capture_request->settings);
- capture_request->frame_number = frame_number_;
capture_interface_->ProcessCaptureRequest(
std::move(capture_request),
- base::BindOnce(&RequestManager::OnProcessedCaptureRequest,
- weak_ptr_factory_.GetWeakPtr()));
- frame_number_++;
+ base::BindOnce(&RequestManager::OnProcessedCaptureRequest, GetWeakPtr()));
+}
+
+bool RequestManager::TryPrepareReprocessRequest(
+ std::set<StreamType>* stream_types,
+ cros::mojom::CameraMetadataPtr* settings,
+ TakePhotoCallback* callback,
+ base::Optional<uint64_t>* input_buffer_id,
+ cros::mojom::Effect* reprocess_effect) {
+ if (buffer_id_reprocess_tasks_map_.empty() ||
+ !stream_buffer_manager_->HasFreeBuffers(kYUVReprocessStreams)) {
+ return false;
+ }
+
+ // Consume reprocess task.
+ ReprocessTaskQueue* reprocess_task_queue;
+ for (auto& it : buffer_id_reprocess_tasks_map_) {
+ if (processing_buffer_ids_.count(it.first) == 0) {
+ *input_buffer_id = it.first;
+ reprocess_task_queue = &it.second;
+ break;
+ }
+ }
+
+ if (!*input_buffer_id) {
+ return false;
+ }
+
+ ReprocessTask task = std::move(reprocess_task_queue->front());
+ reprocess_task_queue->pop();
+
+ stream_types->insert(kYUVReprocessStreams);
+ // Prepare metadata by adding extra metadata.
+ *settings = repeating_request_settings_.Clone();
+ SetJpegOrientation(settings);
+ for (auto& metadata : task.extra_metadata) {
+ AddOrUpdateMetadataEntry(settings, std::move(metadata));
+ }
+ *callback = std::move(task.callback);
+ *reprocess_effect = task.effect;
+ processing_buffer_ids_.insert(**input_buffer_id);
+
+ // Remove the mapping from map if all tasks consumed.
+ if (reprocess_task_queue->empty()) {
+ buffer_id_reprocess_tasks_map_.erase(**input_buffer_id);
+ }
+ return true;
+}
+
+bool RequestManager::TryPreparePreviewRequest(
+ std::set<StreamType>* stream_types,
+ cros::mojom::CameraMetadataPtr* settings) {
+ if (!stream_buffer_manager_->HasFreeBuffers({StreamType::kPreviewOutput})) {
+ return false;
+ }
+
+ stream_types->insert({StreamType::kPreviewOutput});
+ *settings = repeating_request_settings_.Clone();
+ return true;
+}
+
+bool RequestManager::TryPrepareOneShotRequest(
+ std::set<StreamType>* stream_types,
+ cros::mojom::CameraMetadataPtr* settings,
+ TakePhotoCallback* callback) {
+ if (stream_buffer_manager_->IsReprocessSupported()) {
+ // For devices that support reprocess, fill the frame data in YUV buffer and
+ // reprocess on that YUV buffer.
+ if (take_photo_settings_queue_.empty() ||
+ !stream_buffer_manager_->HasFreeBuffers({StreamType::kYUVOutput})) {
+ return false;
+ }
+ stream_types->insert({StreamType::kYUVOutput});
+ *settings = std::move(take_photo_settings_queue_.front());
+ } else {
+ // For devices that do not support reprocess, fill the frame data in BLOB
+ // buffer and fill the callback.
+ if (take_photo_settings_queue_.empty() ||
+ take_photo_callback_queue_.empty() ||
+ !stream_buffer_manager_->HasFreeBuffers({StreamType::kJpegOutput})) {
+ return false;
+ }
+ stream_types->insert({StreamType::kJpegOutput});
+ *callback = std::move(take_photo_callback_queue_.front());
+ take_photo_callback_queue_.pop();
+
+ *settings = std::move(take_photo_settings_queue_.front());
+ SetJpegOrientation(settings);
+ }
+ take_photo_settings_queue_.pop();
+ return true;
}
void RequestManager::OnProcessedCaptureRequest(int32_t result) {
@@ -517,16 +656,17 @@ void RequestManager::SubmitCaptureResult(
}
}
+ bool should_release_buffer = true;
// Deliver the captured data to client.
if (stream_buffer->status ==
cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK) {
gfx::GpuMemoryBuffer* buffer =
stream_buffer_manager_->GetBufferById(stream_type, buffer_id);
- if (stream_type == StreamType::kPreview) {
+ if (stream_type == StreamType::kPreviewOutput) {
device_context_->SubmitCapturedData(
buffer, stream_buffer_manager_->GetStreamCaptureFormat(stream_type),
pending_result.reference_time, pending_result.timestamp);
- } else if (stream_type == StreamType::kStillCapture) {
+ } else if (stream_type == StreamType::kJpegOutput) {
DCHECK(pending_result.still_capture_callback);
const Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
reinterpret_cast<uintptr_t>(buffer->memory(0)) +
@@ -543,14 +683,43 @@ void RequestManager::SubmitCaptureResult(
reinterpret_cast<uint8_t*>(buffer->memory(0)), header->jpeg_size,
stream_buffer_manager_->GetStreamCaptureFormat(stream_type), 0);
if (blob) {
- std::move(pending_result.still_capture_callback).Run(std::move(blob));
+ int task_status = kReprocessSuccess;
+ if (stream_buffer_manager_->IsReprocessSupported()) {
+ task_status = ReprocessManager::GetReprocessReturnCode(
+ pending_result.reprocess_effect, &pending_result.metadata);
+ }
+ std::move(pending_result.still_capture_callback)
+ .Run(task_status, std::move(blob));
} else {
// TODO(wtlee): If it is fatal, we should set error state here.
LOG(ERROR) << "Failed to blobify the captured JPEG image";
}
+
+ if (pending_result.input_buffer_id) {
+ // Remove the id from processing list to run next reprocess task.
+ processing_buffer_ids_.erase(*pending_result.input_buffer_id);
+
+ // If all reprocess tasks are done for this buffer, release the buffer.
+ if (!base::ContainsKey(buffer_id_reprocess_tasks_map_,
+ *pending_result.input_buffer_id)) {
+ stream_buffer_manager_->ReleaseBuffer(
+ StreamType::kYUVOutput, *pending_result.input_buffer_id);
+ }
+ }
+ } else if (stream_type == StreamType::kYUVOutput) {
+ buffer_id_reprocess_tasks_map_[buffer_id] =
+ std::move(frame_number_reprocess_tasks_map_[frame_number]);
+ frame_number_reprocess_tasks_map_.erase(frame_number);
+
+ // Don't release the buffer since we will need it as input buffer for
+ // reprocessing. We will release it until all reprocess tasks for this
+ // buffer are done.
+ should_release_buffer = false;
}
}
- stream_buffer_manager_->ReleaseBuffer(stream_type, buffer_id);
+ if (should_release_buffer) {
+ stream_buffer_manager_->ReleaseBuffer(stream_type, buffer_id);
+ }
pending_result.unsubmitted_buffer_count--;
if (pending_result.unsubmitted_buffer_count == 0) {
@@ -561,8 +730,8 @@ void RequestManager::SubmitCaptureResult(
PrepareCaptureRequest();
}
-size_t RequestManager::GetNumberOfStreams() {
- return stream_buffer_manager_->GetNumberOfStreams();
+base::WeakPtr<RequestManager> RequestManager::GetWeakPtr() {
+ return weak_ptr_factory_.GetWeakPtr();
}
void RequestManager::AddResultMetadataObserver(
diff --git a/chromium/media/capture/video/chromeos/request_manager.h b/chromium/media/capture/video/chromeos/request_manager.h
index fb9110b419a..d6532c7bef1 100644
--- a/chromium/media/capture/video/chromeos/request_manager.h
+++ b/chromium/media/capture/video/chromeos/request_manager.h
@@ -10,14 +10,14 @@
#include <memory>
#include <queue>
#include <set>
-#include <unordered_map>
-#include <unordered_set>
#include <vector>
#include "base/memory/weak_ptr.h"
+#include "base/optional.h"
#include "media/capture/mojom/image_capture.mojom.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
+#include "media/capture/video/chromeos/reprocess_manager.h"
#include "media/capture/video/chromeos/request_builder.h"
#include "media/capture/video/chromeos/stream_buffer_manager.h"
#include "media/capture/video_capture_types.h"
@@ -36,7 +36,18 @@ struct Camera3JpegBlob {
uint32_t jpeg_size;
};
-static const int kMaxConfiguredStreams = 2;
+// Minimum configured streams should at least contain kPreviewOutput.
+constexpr int32_t kMinConfiguredStreams = 1;
+
+// Maximum configured streams could contain two optional YUV streams.
+constexpr int32_t kMaxConfiguredStreams = 4;
+
+struct ReprocessTasksInfo {
+ ReprocessTasksInfo();
+ ~ReprocessTasksInfo();
+ uint64_t input_buffer_id;
+ ReprocessTaskQueue task_queue;
+};
// Interface that provides API to let Camera3AController to update the metadata
// that will be sent with capture request.
@@ -78,6 +89,8 @@ class CAPTURE_EXPORT RequestManager final
const uint32_t bytesused,
const VideoCaptureFormat& capture_format,
int screen_rotation)>;
+ using TakePhotoCallback =
+ base::OnceCallback<void(int status, media::mojom::BlobPtr blob_result)>;
// CaptureResult is used to hold the pending capture results for each frame.
struct CaptureResult {
@@ -103,7 +116,12 @@ class CAPTURE_EXPORT RequestManager final
size_t unsubmitted_buffer_count;
// The callback used to return the captured still capture JPEG buffer. Set
// if and only if the capture request was sent with a still capture buffer.
- VideoCaptureDevice::TakePhotoCallback still_capture_callback;
+ TakePhotoCallback still_capture_callback;
+ // The reprocess effect that this capture request is used. Will be set to
+ // NO_EFFECT if it is not a reprocess request.
+ cros::mojom::Effect reprocess_effect;
+ // The input buffer id for this capture request.
+ base::Optional<uint64_t> input_buffer_id;
};
RequestManager(cros::mojom::Camera3CallbackOpsRequest callback_ops_request,
@@ -123,26 +141,33 @@ class CAPTURE_EXPORT RequestManager final
cros::mojom::Camera3StreamPtr GetStreamConfiguration(StreamType stream_type);
+ bool HasStreamsConfiguredForTakePhoto();
+
// StartPreview is the entry point to starting the video capture. The way
// the video capture loop works is:
//
- // (1) Preparing capture request by mixing repeating request with still
- // capture request if it exists. And build the capture request by
+ // (1) Preparing capture request by mixing preview request, one-shot request
+ // and reprocess request if they exists. And build the capture request by
// RequestBuilder.
- // (2) Once the capture request is built, it triggers SendCaptureRequest() to
- // send the capture request and it will go back to (1) to generate next
- // request.
+ // (2) Once the capture request is built, it sends the capture request and
+ // it will go back to (1) to generate next capture request.
// (3) The camera HAL returns the shutter time of a capture request through
- // Notify, and the filled buffer through ProcessCaptureResult.
- // (4) Once all the result metadata are collected, TrySubmitPendingBuffers()
- // is passed and trigger SubmitCaptureResult() to deliver the filled
- // buffer to Chrome. After the buffer is consumed by Chrome it is
- // enqueued back to the free buffer queue. Goto (1) to start another
- // capture loop.
+ // Notify(), and the filled buffer through ProcessCaptureResult().
+ // (4) Once all the result metadata are collected, it would pass
+ // TrySubmitPendingBuffers() and SubmitCaptureResult() will be triggered
+ // to deliver the filled buffer to Chrome. After the buffer is consumed
+ // by Chrome it is enqueued back to the free buffer queue. Goto (1) to
+ // start another capture loop.
//
- // When TakePhoto() is called, an additional BLOB buffer is queued in step (2)
- // to let the HAL fill the still capture JPEG image. When the JPEG image is
- // returned in (4), it's passed to upper layer through the TakePhotoCallback.
+ // When TakePhoto() is called, an additional YUV buffer is queued in step (2)
+ // to let the HAL fill the photo result in YUV format. If it is a regular
+ // capture, only one reprocess task will be added into the queue which asks
+ // HAL to convert YUV photo to JPEG format. If it is a request with
+ // special effect (e.g. Portrait mode shot), there will be more than one
+ // reprocess task added in the queue and it will be processed sequentially.
+ //
+ // For every reprocess task, there is a corresponding callback which will
+ // return the photo result in JPEG format.
void StartPreview(cros::mojom::CameraMetadataPtr preview_settings);
// Stops the capture loop. After StopPreview is called |callback_ops_| is
@@ -151,9 +176,9 @@ class CAPTURE_EXPORT RequestManager final
void StopPreview(base::OnceCallback<void(int32_t)> callback);
void TakePhoto(cros::mojom::CameraMetadataPtr settings,
- VideoCaptureDevice::TakePhotoCallback callback);
+ ReprocessTaskQueue reprocess_tasks);
- size_t GetNumberOfStreams();
+ base::WeakPtr<RequestManager> GetWeakPtr();
// CaptureMetadataDispatcher implementations.
void AddResultMetadataObserver(ResultMetadataObserver* observer) override;
@@ -177,15 +202,26 @@ class CAPTURE_EXPORT RequestManager final
private:
friend class RequestManagerTest;
- // Prepares a capture request by mixing repeating request with still capture
- // request if it exists.
+ // Puts Jpeg orientation information into the metadata.
+ void SetJpegOrientation(cros::mojom::CameraMetadataPtr* settings);
+
+ // Prepares a capture request by mixing repeating request with one-shot
+ // request if it exists. If there are reprocess requests in the queue, just
+ // build the reprocess capture request without mixing the repeating request.
void PrepareCaptureRequest();
- // Decorates the frame number and settings for the capture request and send it
- // to HAL.
- void SendCaptureRequest(
- cros::mojom::Camera3CaptureRequestPtr capture_request,
- VideoCaptureDevice::TakePhotoCallback take_photo_callback);
+ bool TryPrepareReprocessRequest(std::set<StreamType>* stream_types,
+ cros::mojom::CameraMetadataPtr* settings,
+ TakePhotoCallback* callback,
+ base::Optional<uint64_t>* input_buffer_id,
+ cros::mojom::Effect* reprocess_effect);
+
+ bool TryPreparePreviewRequest(std::set<StreamType>* stream_types,
+ cros::mojom::CameraMetadataPtr* settings);
+
+ bool TryPrepareOneShotRequest(std::set<StreamType>* stream_types,
+ cros::mojom::CameraMetadataPtr* settings,
+ TakePhotoCallback* callback);
// Callback for ProcessCaptureRequest().
void OnProcessedCaptureRequest(int32_t result);
@@ -243,10 +279,6 @@ class CAPTURE_EXPORT RequestManager final
// A flag indicating whether the capture loops is running.
bool capturing_;
- // The frame number. Increased by one for each capture request sent; reset
- // to zero in AllocateAndStart.
- uint32_t frame_number_;
-
// The number of partial stages. |partial_result_count_| is learned by
// querying |static_metadata_|. In case the result count is absent in
// |static_metadata_|, it defaults to one which means all the result
@@ -284,9 +316,29 @@ class CAPTURE_EXPORT RequestManager final
// Stores the pending capture results of the current in-flight frames.
std::map<uint32_t, CaptureResult> pending_results_;
+ std::queue<cros::mojom::CameraMetadataPtr> take_photo_settings_queue_;
+
+ // Queue that contains ReprocessTaskQueue that will be consumed by
+ // reprocess-supported devices.
+ std::queue<ReprocessTaskQueue> pending_reprocess_tasks_queue_;
+
// Callback for TakePhoto(). When preparing capture request, the callback will
// be popped and moved to CaptureResult.
- std::queue<VideoCaptureDevice::TakePhotoCallback> take_photo_callback_queue_;
+ std::queue<base::OnceCallback<void(int, mojom::BlobPtr)>>
+ take_photo_callback_queue_;
+
+ // Map that maps buffer id to reprocess task queue. If all reprocess tasks for
+ // specific buffer id are all consumed, release that buffer.
+ std::map<uint64_t, ReprocessTaskQueue> buffer_id_reprocess_tasks_map_;
+
+ // Map that maps frame number to reprocess task queue. We should consume the
+ // content inside this map when preparing capture request.
+ std::map<uint32_t, ReprocessTaskQueue> frame_number_reprocess_tasks_map_;
+
+ // Buffer ids that are currently processing. When preparing capture request,
+ // we will ignore the reprocess task if its corresponding buffer id is in
+ // the set.
+ std::set<uint64_t> processing_buffer_ids_;
// Map for retrieving the last received frame number. It is used to check for
// duplicate or out of order of frames.
diff --git a/chromium/media/capture/video/chromeos/request_manager_unittest.cc b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
index f79620d1514..893dd2fcc73 100644
--- a/chromium/media/capture/video/chromeos/request_manager_unittest.cc
+++ b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "media/capture/video/chromeos/request_manager.h"
+#include "media/capture/video/chromeos/reprocess_manager.h"
#include "media/capture/video/chromeos/stream_buffer_manager.h"
#include <map>
@@ -184,7 +185,7 @@ class RequestManagerTest : public ::testing::Test {
std::vector<cros::mojom::Camera3StreamPtr> streams;
auto preview_stream = cros::mojom::Camera3Stream::New();
- preview_stream->id = static_cast<uint64_t>(StreamType::kPreview);
+ preview_stream->id = static_cast<uint64_t>(StreamType::kPreviewOutput);
preview_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
preview_stream->width = kDefaultCaptureFormat.frame_size.width();
@@ -199,7 +200,7 @@ class RequestManagerTest : public ::testing::Test {
streams.push_back(std::move(preview_stream));
auto still_capture_stream = cros::mojom::Camera3Stream::New();
- still_capture_stream->id = static_cast<uint64_t>(StreamType::kStillCapture);
+ still_capture_stream->id = static_cast<uint64_t>(StreamType::kJpegOutput);
still_capture_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
still_capture_stream->width = kDefaultCaptureFormat.frame_size.width();
@@ -222,7 +223,8 @@ class RequestManagerTest : public ::testing::Test {
auto error_msg = cros::mojom::Camera3ErrorMsg::New();
error_msg->frame_number = frame_number;
// There is only the preview stream.
- error_msg->error_stream_id = static_cast<uint64_t>(StreamType::kPreview);
+ error_msg->error_stream_id =
+ static_cast<uint64_t>(StreamType::kPreviewOutput);
error_msg->error_code = error_code;
auto notify_msg = cros::mojom::Camera3NotifyMsg::New();
notify_msg->message = cros::mojom::Camera3NotifyMsgMessage::New();
@@ -488,24 +490,6 @@ TEST_F(RequestManagerTest, BufferErrorTest) {
}
// Test that preview and still capture buffers can be correctly submitted.
-TEST_F(RequestManagerTest, TakePhotoTest) {
- EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
- .Times(AtLeast(1))
- .WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
-
- request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
- PrepareCaptureStream(/* max_buffers */ 1));
- request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
- request_manager_->TakePhoto(
- GetFakeStaticMetadata(/* partial_result_count */ 1),
- base::BindOnce([](RequestManagerTest* test,
- mojom::BlobPtr blob) { test->QuitCaptureLoop(); },
- base::Unretained(this)));
-
- // Wait until a captured frame is received by MockVideoCaptureClient.
- DoLoop();
-}
+// TODO(crbug.com/917574): Add reprocess tests and take photo test.
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
index 4420ec935a9..6ba6adfcddb 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
@@ -27,15 +27,7 @@ StreamBufferManager::StreamBufferManager(
weak_ptr_factory_(this) {}
StreamBufferManager::~StreamBufferManager() {
- for (const auto& iter : stream_context_) {
- if (iter.second) {
- for (const auto& buf : iter.second->buffers) {
- if (buf) {
- buf->Unmap();
- }
- }
- }
- }
+ DestroyCurrentStreamsAndBuffers();
}
gfx::GpuMemoryBuffer* StreamBufferManager::GetBufferById(StreamType stream_type,
@@ -54,9 +46,26 @@ VideoCaptureFormat StreamBufferManager::GetStreamCaptureFormat(
return stream_context_[stream_type]->capture_format;
}
+void StreamBufferManager::DestroyCurrentStreamsAndBuffers() {
+ for (const auto& iter : stream_context_) {
+ if (iter.second) {
+ for (const auto& buf : iter.second->buffers) {
+ if (buf) {
+ buf->Unmap();
+ }
+ }
+ iter.second->buffers.clear();
+ }
+ }
+ stream_context_.clear();
+}
+
bool StreamBufferManager::HasFreeBuffers(
const std::set<StreamType>& stream_types) {
for (auto stream_type : stream_types) {
+ if (IsInputStream(stream_type)) {
+ continue;
+ }
if (stream_context_[stream_type]->free_buffers.empty()) {
return false;
}
@@ -64,10 +73,22 @@ bool StreamBufferManager::HasFreeBuffers(
return true;
}
+bool StreamBufferManager::HasStreamsConfigured(
+ std::initializer_list<StreamType> stream_types) {
+ for (auto stream_type : stream_types) {
+ if (stream_context_.find(stream_type) == stream_context_.end()) {
+ return false;
+ }
+ }
+ return true;
+}
+
void StreamBufferManager::SetUpStreamsAndBuffers(
VideoCaptureFormat capture_format,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams) {
+ DestroyCurrentStreamsAndBuffers();
+
for (auto& stream : streams) {
DVLOG(2) << "Stream " << stream->id
<< " stream_type: " << stream->stream_type
@@ -100,17 +121,23 @@ void StreamBufferManager::SetUpStreamsAndBuffers(
stream_context_[stream_type]->capture_format.pixel_format =
stream_format.video_format;
+ // For input stream, there is no need to allocate buffers.
+ if (IsInputStream(stream_type)) {
+ continue;
+ }
+
// Allocate buffers.
size_t num_buffers = stream_context_[stream_type]->stream->max_buffers;
stream_context_[stream_type]->buffers.resize(num_buffers);
int32_t buffer_width, buffer_height;
switch (stream_type) {
- case StreamType::kPreview: {
+ case StreamType::kPreviewOutput:
+ case StreamType::kYUVOutput: {
buffer_width = stream_context_[stream_type]->stream->width;
buffer_height = stream_context_[stream_type]->stream->height;
break;
}
- case StreamType::kStillCapture: {
+ case StreamType::kJpegOutput: {
const cros::mojom::CameraMetadataEntryPtr* jpeg_max_size =
GetMetadataEntry(
static_metadata,
@@ -160,7 +187,8 @@ cros::mojom::Camera3StreamPtr StreamBufferManager::GetStreamConfiguration(
}
base::Optional<BufferInfo> StreamBufferManager::RequestBuffer(
- StreamType stream_type) {
+ StreamType stream_type,
+ base::Optional<uint64_t> buffer_id) {
VideoPixelFormat buffer_format =
stream_context_[stream_type]->capture_format.pixel_format;
uint32_t drm_format = PixFormatVideoToDrm(buffer_format);
@@ -175,11 +203,25 @@ base::Optional<BufferInfo> StreamBufferManager::RequestBuffer(
}
BufferInfo buffer_info;
- buffer_info.id = stream_context_[stream_type]->free_buffers.front();
- stream_context_[stream_type]->free_buffers.pop();
- buffer_info.gpu_memory_buffer = stream_context_[stream_type]
- ->buffers[GetBufferIndex(buffer_info.id)]
- .get();
+ if (buffer_id.has_value()) {
+ // Currently, only kYUVInput has an associated output buffer which is
+ // kYUVOutput.
+ if (stream_type != StreamType::kYUVInput) {
+ return {};
+ }
+ buffer_info.id = *buffer_id;
+ buffer_info.gpu_memory_buffer =
+ stream_context_[StreamType::kYUVOutput]
+ ->buffers[GetBufferIndex(buffer_info.id)]
+ .get();
+ } else {
+ buffer_info.id = stream_context_[stream_type]->free_buffers.front();
+ stream_context_[stream_type]->free_buffers.pop();
+ buffer_info.gpu_memory_buffer =
+ stream_context_[stream_type]
+ ->buffers[GetBufferIndex(buffer_info.id)]
+ .get();
+ }
buffer_info.hal_pixel_format = stream_context_[stream_type]->stream->format;
buffer_info.drm_format = drm_format;
return buffer_info;
@@ -187,11 +229,14 @@ base::Optional<BufferInfo> StreamBufferManager::RequestBuffer(
void StreamBufferManager::ReleaseBuffer(StreamType stream_type,
uint64_t buffer_id) {
+ if (IsInputStream(stream_type)) {
+ return;
+ }
stream_context_[stream_type]->free_buffers.push(buffer_id);
}
-size_t StreamBufferManager::GetNumberOfStreams() {
- return stream_context_.size();
+bool StreamBufferManager::IsReprocessSupported() {
+ return stream_context_.find(StreamType::kYUVOutput) != stream_context_.end();
}
// static
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.h b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
index a71db102a30..b53026582a2 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.h
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
@@ -51,9 +51,12 @@ class CAPTURE_EXPORT StreamBufferManager final {
VideoCaptureFormat GetStreamCaptureFormat(StreamType stream_type);
// Checks if all streams are available. For output stream, it is available if
- // it has free buffers.
+ // it has free buffers. For input stream, it is always available.
bool HasFreeBuffers(const std::set<StreamType>& stream_types);
+ // Checks if the target stream types have been configured or not.
+ bool HasStreamsConfigured(std::initializer_list<StreamType> stream_types);
+
// Sets up the stream context and allocate buffers according to the
// configuration specified in |stream|.
void SetUpStreamsAndBuffers(
@@ -63,13 +66,16 @@ class CAPTURE_EXPORT StreamBufferManager final {
cros::mojom::Camera3StreamPtr GetStreamConfiguration(StreamType stream_type);
- // Requests buffer for specific stream type.
- base::Optional<BufferInfo> RequestBuffer(StreamType stream_type);
+ // Requests buffer for specific stream type. If the |buffer_id| is provided,
+ // it will use |buffer_id| as buffer id rather than using id from free
+ // buffers.
+ base::Optional<BufferInfo> RequestBuffer(StreamType stream_type,
+ base::Optional<uint64_t> buffer_id);
// Releases buffer by marking it as free buffer.
void ReleaseBuffer(StreamType stream_type, uint64_t buffer_id);
- size_t GetNumberOfStreams();
+ bool IsReprocessSupported();
private:
friend class RequestManagerTest;
@@ -78,6 +84,9 @@ class CAPTURE_EXPORT StreamBufferManager final {
static size_t GetBufferIndex(uint64_t buffer_id);
+ // Destroy current streams and unmap mapped buffers.
+ void DestroyCurrentStreamsAndBuffers();
+
struct StreamContext {
StreamContext();
~StreamContext();
diff --git a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc
new file mode 100644
index 00000000000..24f47f12087
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc
@@ -0,0 +1,154 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/vendor_tag_ops_delegate.h"
+
+#include <utility>
+
+#include "base/bind.h"
+#include "base/strings/strcat.h"
+
+namespace media {
+
+VendorTagOpsDelegate::VendorTagOpsDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
+ : ipc_task_runner_(ipc_task_runner) {}
+
+VendorTagOpsDelegate::~VendorTagOpsDelegate() = default;
+
+cros::mojom::VendorTagOpsRequest VendorTagOpsDelegate::MakeRequest() {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ auto request = mojo::MakeRequest(&vendor_tag_ops_);
+ vendor_tag_ops_.set_connection_error_handler(
+ base::BindOnce(&VendorTagOpsDelegate::Reset, base::Unretained(this)));
+ return request;
+}
+
+void VendorTagOpsDelegate::Initialize() {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ vendor_tag_ops_->GetTagCount(base::BindOnce(
+ &VendorTagOpsDelegate::OnGotTagCount, base::Unretained(this)));
+}
+
+void VendorTagOpsDelegate::Reset() {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ vendor_tag_ops_.reset();
+ pending_info_.clear();
+ name_map_.clear();
+ tag_map_.clear();
+ initialized_.Reset();
+}
+
+void VendorTagOpsDelegate::RemovePending(uint32_t tag) {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ size_t removed = pending_info_.erase(tag);
+ DCHECK_EQ(removed, 1u);
+ if (pending_info_.empty()) {
+ DVLOG(1) << "VendorTagOpsDelegate initialized";
+ initialized_.Signal();
+ }
+}
+
+void VendorTagOpsDelegate::OnGotTagCount(int32_t tag_count) {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ if (tag_count == -1) {
+ LOG(ERROR) << "Failed to get tag count";
+ initialized_.Signal();
+ return;
+ }
+
+ if (tag_count == 0) {
+ // There is no vendor tag, we are done here.
+ initialized_.Signal();
+ return;
+ }
+
+ vendor_tag_ops_->GetAllTags(base::BindOnce(
+ &VendorTagOpsDelegate::OnGotAllTags, base::Unretained(this), tag_count));
+}
+
+void VendorTagOpsDelegate::OnGotAllTags(size_t tag_count,
+ const std::vector<uint32_t>& tags) {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ DCHECK_EQ(tags.size(), tag_count);
+
+ for (uint32_t tag : tags) {
+ pending_info_[tag].tag = static_cast<cros::mojom::CameraMetadataTag>(tag);
+ vendor_tag_ops_->GetSectionName(
+ tag, base::BindOnce(&VendorTagOpsDelegate::OnGotSectionName,
+ base::Unretained(this), tag));
+ }
+}
+
+void VendorTagOpsDelegate::OnGotSectionName(
+ uint32_t tag,
+ const base::Optional<std::string>& section_name) {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ if (!section_name.has_value()) {
+ LOG(ERROR) << "Failed to get section name of tag " << std::hex
+ << std::showbase << tag;
+ RemovePending(tag);
+ return;
+ }
+
+ pending_info_[tag].section_name = *section_name;
+ vendor_tag_ops_->GetTagName(
+ tag, base::BindOnce(&VendorTagOpsDelegate::OnGotTagName,
+ base::Unretained(this), tag));
+}
+
+void VendorTagOpsDelegate::OnGotTagName(
+ uint32_t tag,
+ const base::Optional<std::string>& tag_name) {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ if (!tag_name.has_value()) {
+ LOG(ERROR) << "Failed to get tag name of tag " << std::hex << std::showbase
+ << tag;
+ RemovePending(tag);
+ return;
+ }
+
+ pending_info_[tag].tag_name = *tag_name;
+ vendor_tag_ops_->GetTagType(
+ tag, base::BindOnce(&VendorTagOpsDelegate::OnGotTagType,
+ base::Unretained(this), tag));
+}
+
+void VendorTagOpsDelegate::OnGotTagType(uint32_t tag, int32_t type) {
+ DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+ if (type == -1) {
+ LOG(ERROR) << "Failed to get tag type of tag " << std::hex << std::showbase
+ << tag;
+ RemovePending(tag);
+ return;
+ }
+
+ VendorTagInfo& info = pending_info_[tag];
+ info.type = static_cast<cros::mojom::EntryType>(type);
+ std::string full_name = base::StrCat({info.section_name, ".", info.tag_name});
+ name_map_[full_name] = info;
+ RemovePending(tag);
+}
+
+const VendorTagInfo* VendorTagOpsDelegate::GetInfoByName(
+ const std::string& full_name) {
+ initialized_.Wait();
+ auto it = name_map_.find(full_name);
+ if (it == name_map_.end()) {
+ return nullptr;
+ }
+ return &it->second;
+}
+
+const VendorTagInfo* VendorTagOpsDelegate::GetInfoByTag(
+ cros::mojom::CameraMetadataTag tag) {
+ initialized_.Wait();
+ auto it = tag_map_.find(tag);
+ if (it == tag_map_.end()) {
+ return nullptr;
+ }
+ return &it->second;
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
new file mode 100644
index 00000000000..c343b6c656c
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
@@ -0,0 +1,68 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_VENDOR_TAG_OPS_DELEGATE_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_VENDOR_TAG_OPS_DELEGATE_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+
+namespace media {
+
+struct VendorTagInfo {
+ cros::mojom::CameraMetadataTag tag;
+ std::string section_name;
+ std::string tag_name;
+ cros::mojom::EntryType type;
+};
+
+class VendorTagOpsDelegate {
+ public:
+ VendorTagOpsDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
+ ~VendorTagOpsDelegate();
+
+ // Setups/Teardowns the VendorTagOpsDelegate instance. All methods here should
+ // be called on |ipc_task_runner_|.
+ cros::mojom::VendorTagOpsRequest MakeRequest();
+ void Initialize();
+ void Reset();
+
+ // Gets the info by name or tag after |inited_|. The returned pointer is still
+ // owned by VendorTagOpsDelegate. Returns nullptr if not found. These
+ // functions can be called concurrently on different threads.
+ const VendorTagInfo* GetInfoByName(const std::string& full_name);
+ const VendorTagInfo* GetInfoByTag(cros::mojom::CameraMetadataTag tag);
+
+ private:
+ void RemovePending(uint32_t tag);
+
+ void OnGotTagCount(int32_t tag_count);
+ void OnGotAllTags(size_t tag_count, const std::vector<uint32_t>& tags);
+ void OnGotSectionName(uint32_t tag,
+ const base::Optional<std::string>& section_name);
+ void OnGotTagName(uint32_t tag, const base::Optional<std::string>& tag_name);
+ void OnGotTagType(uint32_t tag, int32_t type);
+
+ scoped_refptr<base::SequencedTaskRunner> ipc_task_runner_;
+ cros::mojom::VendorTagOpsPtr vendor_tag_ops_;
+
+ // The paritally initialized tags. A tag with its info would be moved to
+ // |name_map_| and |tag_map_| once it's fully initialized. The |inited_| event
+ // would be signaled when |pending_info_| is empty.
+ std::map<uint32_t, VendorTagInfo> pending_info_;
+
+ // These maps are read-only after |inited_|.
+ std::map<std::string, VendorTagInfo> name_map_;
+ std::map<cros::mojom::CameraMetadataTag, VendorTagInfo> tag_map_;
+
+ base::WaitableEvent initialized_;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_VENDOR_TAG_OPS_DELEGATE_H_
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
index 7a28990cb02..5d14a389c30 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
@@ -14,7 +14,6 @@
#include "base/synchronization/waitable_event.h"
#include "base/threading/platform_thread.h"
#include "base/trace_event/trace_event.h"
-#include "chromeos/dbus/dbus_thread_manager.h"
#include "media/base/bind_to_current_loop.h"
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
index 492755d88cc..559907ae5c5 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
@@ -11,7 +11,7 @@
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread.h"
-#include "chromeos/dbus/power_manager_client.h"
+#include "chromeos/dbus/power/power_manager_client.h"
#include "media/capture/video/chromeos/display_rotation_observer.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video/video_capture_device_descriptor.h"
diff --git a/chromium/media/capture/video/fake_video_capture_device.h b/chromium/media/capture/video/fake_video_capture_device.h
index 164054d13b5..6154d232386 100644
--- a/chromium/media/capture/video/fake_video_capture_device.h
+++ b/chromium/media/capture/video/fake_video_capture_device.h
@@ -102,9 +102,9 @@ class FakeVideoCaptureDevice : public VideoCaptureDevice {
// This is a separate struct because read-access to it is shared with several
// collaborating classes.
struct FakeDeviceState {
- FakeDeviceState(float zoom,
- float exposure_time,
- float focus_distance,
+ FakeDeviceState(double zoom,
+ double exposure_time,
+ double focus_distance,
float frame_rate,
VideoPixelFormat pixel_format)
: zoom(zoom),
@@ -117,10 +117,10 @@ struct FakeDeviceState {
: mojom::MeteringMode::CONTINUOUS;
}
- uint32_t zoom;
- uint32_t exposure_time;
+ double zoom;
+ double exposure_time;
mojom::MeteringMode exposure_mode;
- uint32_t focus_distance;
+ double focus_distance;
mojom::MeteringMode focus_mode;
VideoCaptureFormat format;
};
diff --git a/chromium/media/capture/video/fake_video_capture_device_factory.cc b/chromium/media/capture/video/fake_video_capture_device_factory.cc
index 6f33ce38435..b4fada4de86 100644
--- a/chromium/media/capture/video/fake_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/fake_video_capture_device_factory.cc
@@ -16,8 +16,6 @@
namespace {
-static const size_t kDepthDeviceIndex = 1;
-
// Cap the frame rate command line input to reasonable values.
static const float kFakeCaptureMinFrameRate = 5.0f;
static const float kFakeCaptureMaxFrameRate = 60.0f;
@@ -218,18 +216,6 @@ void FakeVideoCaptureDeviceFactory::GetDeviceDescriptors(
);
entry_index++;
}
-
- // Video device on index |kDepthDeviceIndex| is depth video capture device.
- // Fill the camera calibration information only for it.
- if (device_descriptors->size() <= kDepthDeviceIndex)
- return;
- VideoCaptureDeviceDescriptor& depth_device(
- (*device_descriptors)[kDepthDeviceIndex]);
- depth_device.camera_calibration.emplace();
- depth_device.camera_calibration->focal_length_x = 135.0;
- depth_device.camera_calibration->focal_length_y = 135.6;
- depth_device.camera_calibration->depth_near = 0.0;
- depth_device.camera_calibration->depth_far = 65.535;
}
void FakeVideoCaptureDeviceFactory::GetSupportedFormats(
diff --git a/chromium/media/capture/video/fake_video_capture_device_unittest.cc b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
index 9e0bf3c1320..c5b3d806ba6 100644
--- a/chromium/media/capture/video/fake_video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
@@ -175,12 +175,12 @@ class FakeVideoCaptureDeviceTestBase : public ::testing::Test {
const media::VideoCaptureFormat& frame_format,
base::TimeTicks,
base::TimeDelta) { OnFrameCaptured(frame_format); }));
- ON_CALL(*result, DoOnIncomingCapturedBufferExt(_, _, _, _, _, _))
- .WillByDefault(
- Invoke([this](media::VideoCaptureDevice::Client::Buffer&,
- const media::VideoCaptureFormat& frame_format,
- base::TimeTicks, base::TimeDelta, gfx::Rect,
- const media::VideoFrameMetadata&) {
+ ON_CALL(*result, DoOnIncomingCapturedBufferExt(_, _, _, _, _, _, _))
+ .WillByDefault(Invoke(
+ [this](media::VideoCaptureDevice::Client::Buffer&,
+ const media::VideoCaptureFormat& frame_format,
+ const gfx::ColorSpace&, base::TimeTicks, base::TimeDelta,
+ gfx::Rect, const media::VideoFrameMetadata&) {
OnFrameCaptured(frame_format);
}));
return result;
@@ -312,21 +312,6 @@ TEST_F(FakeVideoCaptureDeviceTest, GetDeviceSupportedFormats) {
}
}
-TEST_F(FakeVideoCaptureDeviceTest, GetCameraCalibration) {
- const size_t device_count = 2;
- video_capture_device_factory_->SetToDefaultDevicesConfig(device_count);
- video_capture_device_factory_->GetDeviceDescriptors(descriptors_.get());
- ASSERT_EQ(device_count, descriptors_->size());
- ASSERT_FALSE(descriptors_->at(0).camera_calibration);
- const VideoCaptureDeviceDescriptor& depth_device = descriptors_->at(1);
- EXPECT_EQ("/dev/video1", depth_device.device_id);
- ASSERT_TRUE(depth_device.camera_calibration);
- EXPECT_EQ(135.0, depth_device.camera_calibration->focal_length_x);
- EXPECT_EQ(135.6, depth_device.camera_calibration->focal_length_y);
- EXPECT_EQ(0.0, depth_device.camera_calibration->depth_near);
- EXPECT_EQ(65.535, depth_device.camera_calibration->depth_far);
-}
-
TEST_F(FakeVideoCaptureDeviceTest, ErrorDeviceReportsError) {
auto device = FakeVideoCaptureDeviceFactory::CreateErrorDevice();
ASSERT_TRUE(device);
diff --git a/chromium/media/capture/video/mac/DEPS b/chromium/media/capture/video/mac/DEPS
index 58a10036ee4..577e795b73b 100644
--- a/chromium/media/capture/video/mac/DEPS
+++ b/chromium/media/capture/video/mac/DEPS
@@ -1,3 +1,4 @@
include_rules = [
"+third_party/decklink",
+ "+services/video_capture/public/uma",
]
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
index fa4301ef5a9..132a1c3de1f 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
@@ -18,8 +18,10 @@
#include "base/strings/string_util.h"
#include "base/strings/sys_string_conversions.h"
#include "media/base/timestamp_constants.h"
+#include "media/capture/video/mac/video_capture_device_factory_mac.h"
#include "media/capture/video/mac/video_capture_device_mac.h"
#include "media/capture/video_capture_types.h"
+#include "services/video_capture/public/uma/video_capture_service_event.h"
#include "ui/gfx/geometry/size.h"
// Prefer MJPEG if frame width or height is larger than this.
@@ -86,16 +88,64 @@ MacBookVersions GetMacBookModel(const std::string& model) {
// investigating crbug/582931.
void MaybeWriteUma(int number_of_devices, int number_of_suspended_devices) {
std::string model = base::mac::GetModelIdentifier();
- if (base::StartsWith(model, "MacBook",
- base::CompareCase::INSENSITIVE_ASCII)) {
- UMA_HISTOGRAM_COUNTS_1M("Media.VideoCapture.MacBook.NumberOfDevices",
- number_of_devices + number_of_suspended_devices);
- if (number_of_devices + number_of_suspended_devices == 0) {
- UMA_HISTOGRAM_ENUMERATION(
- "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
- GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
+ if (!base::StartsWith(model, "MacBook",
+ base::CompareCase::INSENSITIVE_ASCII)) {
+ return;
+ }
+ static int attempt_since_process_start_counter = 0;
+ static int device_count_at_last_attempt = 0;
+ static bool has_seen_zero_device_count = false;
+ const int attempt_count_since_process_start =
+ ++attempt_since_process_start_counter;
+ const int retry_count =
+ media::VideoCaptureDeviceFactoryMac::GetGetDeviceDescriptorsRetryCount();
+ const int device_count = number_of_devices + number_of_suspended_devices;
+ UMA_HISTOGRAM_COUNTS_1M("Media.VideoCapture.MacBook.NumberOfDevices",
+ device_count);
+ if (device_count == 0) {
+ UMA_HISTOGRAM_ENUMERATION(
+ "Media.VideoCapture.MacBook.HardwareVersionWhenNoCamera",
+ GetMacBookModel(model), MAX_MACBOOK_VERSION + 1);
+ if (!has_seen_zero_device_count) {
+ UMA_HISTOGRAM_COUNTS_1M(
+ "Media.VideoCapture.MacBook.AttemptCountWhenNoCamera",
+ attempt_count_since_process_start);
+ has_seen_zero_device_count = true;
}
}
+
+ if (attempt_count_since_process_start == 1) {
+ if (retry_count == 0) {
+ video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
+ device_count == 0
+ ? video_capture::uma::
+ AVF_RECEIVED_ZERO_INFOS_FIRST_TRY_FIRST_ATTEMPT
+ : video_capture::uma::
+ AVF_RECEIVED_NONZERO_INFOS_FIRST_TRY_FIRST_ATTEMPT);
+ } else {
+ video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
+ device_count == 0
+ ? video_capture::uma::AVF_RECEIVED_ZERO_INFOS_RETRY
+ : video_capture::uma::AVF_RECEIVED_NONZERO_INFOS_RETRY);
+ }
+ // attempt count > 1
+ } else if (retry_count == 0) {
+ video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
+ device_count == 0
+ ? video_capture::uma::
+ AVF_RECEIVED_ZERO_INFOS_FIRST_TRY_NONFIRST_ATTEMPT
+ : video_capture::uma::
+ AVF_RECEIVED_NONZERO_INFOS_FIRST_TRY_NONFIRST_ATTEMPT);
+ }
+ if (attempt_count_since_process_start > 1 &&
+ device_count != device_count_at_last_attempt) {
+ video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
+ device_count == 0
+ ? video_capture::uma::AVF_DEVICE_COUNT_CHANGED_FROM_POSITIVE_TO_ZERO
+ : video_capture::uma::
+ AVF_DEVICE_COUNT_CHANGED_FROM_ZERO_TO_POSITIVE);
+ }
+ device_count_at_last_attempt = device_count;
}
// This function translates Mac Core Video pixel formats to Chromium pixel
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac.h b/chromium/media/capture/video/mac/video_capture_device_factory_mac.h
index ccf31ddd290..be8a3a9c0a7 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac.h
@@ -19,6 +19,9 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryMac
VideoCaptureDeviceFactoryMac();
~VideoCaptureDeviceFactoryMac() override;
+ static void SetGetDeviceDescriptorsRetryCount(int count);
+ static int GetGetDeviceDescriptorsRetryCount();
+
std::unique_ptr<VideoCaptureDevice> CreateDevice(
const VideoCaptureDeviceDescriptor& device_descriptor) override;
void GetDeviceDescriptors(
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm b/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
index 30055787fdf..551ee4ef116 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
@@ -18,6 +18,7 @@
#import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
#import "media/capture/video/mac/video_capture_device_decklink_mac.h"
#include "media/capture/video/mac/video_capture_device_mac.h"
+#include "services/video_capture/public/uma/video_capture_service_event.h"
namespace {
@@ -37,6 +38,8 @@ void EnsureRunsOnCFRunLoopEnabledThread() {
// uniqueId. At the moment these are just Blackmagic devices.
const char* kBlacklistedCamerasIdSignature[] = {"-01FDA82C8A9C"};
+int32_t get_device_descriptors_retry_count = 0;
+
} // anonymous namespace
namespace media {
@@ -64,6 +67,17 @@ VideoCaptureDeviceFactoryMac::VideoCaptureDeviceFactoryMac() {
VideoCaptureDeviceFactoryMac::~VideoCaptureDeviceFactoryMac() {
}
+// static
+void VideoCaptureDeviceFactoryMac::SetGetDeviceDescriptorsRetryCount(
+ int count) {
+ get_device_descriptors_retry_count = count;
+}
+
+// static
+int VideoCaptureDeviceFactoryMac::GetGetDeviceDescriptorsRetryCount() {
+ return get_device_descriptors_retry_count;
+}
+
std::unique_ptr<VideoCaptureDevice> VideoCaptureDeviceFactoryMac::CreateDevice(
const VideoCaptureDeviceDescriptor& descriptor) {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -116,6 +130,11 @@ void VideoCaptureDeviceFactoryMac::GetDeviceDescriptors(
}
// Also retrieve Blackmagic devices, if present, via DeckLink SDK API.
VideoCaptureDeviceDeckLinkMac::EnumerateDevices(device_descriptors);
+
+ if ([capture_devices count] > 0 && device_descriptors->empty()) {
+ video_capture::uma::LogMacbookRetryGetDeviceInfosEvent(
+ video_capture::uma::AVF_DROPPED_DESCRIPTORS_AT_FACTORY);
+ }
}
void VideoCaptureDeviceFactoryMac::GetSupportedFormats(
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
index b07328eb05f..2759d69f348 100644
--- a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
@@ -9,6 +9,9 @@
#include "build/build_config.h"
#if defined(OS_CHROMEOS)
+#include <fcntl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
#include "media/capture/video/chromeos/request_manager.h"
#endif
@@ -19,6 +22,14 @@ namespace unittest_internal {
namespace {
+#if defined(OS_CHROMEOS)
+base::ScopedFD GetDummyFD() {
+ base::ScopedFD fd(open("/dev/zero", O_RDONLY));
+ DCHECK(fd.is_valid());
+ return fd;
+}
+#endif
+
class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
public:
FakeGpuMemoryBuffer(const gfx::Size& size, gfx::BufferFormat format)
@@ -37,14 +48,12 @@ class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
#if defined(OS_CHROMEOS)
// Set a dummy fd since this is for testing only.
- handle_.native_pixmap_handle.fds.push_back(base::FileDescriptor(0, true));
handle_.native_pixmap_handle.planes.push_back(
- gfx::NativePixmapPlane(size_.width(), 0, y_plane_size));
+ gfx::NativePixmapPlane(size_.width(), 0, y_plane_size, GetDummyFD()));
if (format == gfx::BufferFormat::YUV_420_BIPLANAR) {
- handle_.native_pixmap_handle.fds.push_back(base::FileDescriptor(0, true));
handle_.native_pixmap_handle.planes.push_back(gfx::NativePixmapPlane(
size_.width(), handle_.native_pixmap_handle.planes[0].size,
- uv_plane_size));
+ uv_plane_size, GetDummyFD()));
}
// For faking a valid JPEG blob buffer.
diff --git a/chromium/media/capture/video/mock_video_capture_device_client.cc b/chromium/media/capture/video/mock_video_capture_device_client.cc
index 74e1fdac07b..b113c6a4acc 100644
--- a/chromium/media/capture/video/mock_video_capture_device_client.cc
+++ b/chromium/media/capture/video/mock_video_capture_device_client.cc
@@ -19,12 +19,13 @@ void MockVideoCaptureDeviceClient::OnIncomingCapturedBuffer(
void MockVideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
Buffer buffer,
const media::VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,
const media::VideoFrameMetadata& additional_metadata) {
- DoOnIncomingCapturedBufferExt(buffer, format, reference_time, timestamp,
- visible_rect, additional_metadata);
+ DoOnIncomingCapturedBufferExt(buffer, format, color_space, reference_time,
+ timestamp, visible_rect, additional_metadata);
}
} // namespace media
diff --git a/chromium/media/capture/video/mock_video_capture_device_client.h b/chromium/media/capture/video/mock_video_capture_device_client.h
index a7f83a3de09..b0e2d76f5e0 100644
--- a/chromium/media/capture/video/mock_video_capture_device_client.h
+++ b/chromium/media/capture/video/mock_video_capture_device_client.h
@@ -47,6 +47,7 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
void OnIncomingCapturedBufferExt(
Buffer buffer,
const media::VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,
@@ -57,9 +58,10 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
const media::VideoCaptureFormat&,
base::TimeTicks,
base::TimeDelta));
- MOCK_METHOD6(DoOnIncomingCapturedBufferExt,
+ MOCK_METHOD7(DoOnIncomingCapturedBufferExt,
void(Buffer& buffer,
const media::VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,
diff --git a/chromium/media/capture/video/video_capture_device.h b/chromium/media/capture/video/video_capture_device.h
index d4ad22bcd46..3cef0193e4d 100644
--- a/chromium/media/capture/video/video_capture_device.h
+++ b/chromium/media/capture/video/video_capture_device.h
@@ -199,6 +199,7 @@ class CAPTURE_EXPORT VideoCaptureDevice
virtual void OnIncomingCapturedBufferExt(
Buffer buffer,
const VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index ef84b21e1e5..6c7cf02e6df 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -202,7 +202,8 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
int crop_x = 0;
int crop_y = 0;
- libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY;
+ libyuv::FourCC fourcc_format = libyuv::FOURCC_ANY;
+ gfx::ColorSpace color_space;
bool flip = false;
switch (format.pixel_format) {
@@ -210,27 +211,27 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
break;
case PIXEL_FORMAT_I420:
DCHECK(!chopped_width && !chopped_height);
- origin_colorspace = libyuv::FOURCC_I420;
+ fourcc_format = libyuv::FOURCC_I420;
break;
case PIXEL_FORMAT_YV12:
DCHECK(!chopped_width && !chopped_height);
- origin_colorspace = libyuv::FOURCC_YV12;
+ fourcc_format = libyuv::FOURCC_YV12;
break;
case PIXEL_FORMAT_NV12:
DCHECK(!chopped_width && !chopped_height);
- origin_colorspace = libyuv::FOURCC_NV12;
+ fourcc_format = libyuv::FOURCC_NV12;
break;
case PIXEL_FORMAT_NV21:
DCHECK(!chopped_width && !chopped_height);
- origin_colorspace = libyuv::FOURCC_NV21;
+ fourcc_format = libyuv::FOURCC_NV21;
break;
case PIXEL_FORMAT_YUY2:
DCHECK(!chopped_width && !chopped_height);
- origin_colorspace = libyuv::FOURCC_YUY2;
+ fourcc_format = libyuv::FOURCC_YUY2;
break;
case PIXEL_FORMAT_UYVY:
DCHECK(!chopped_width && !chopped_height);
- origin_colorspace = libyuv::FOURCC_UYVY;
+ fourcc_format = libyuv::FOURCC_UYVY;
break;
case PIXEL_FORMAT_RGB24:
// Linux RGB24 defines red at lowest byte address,
@@ -238,9 +239,9 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
// Windows RGB24 defines blue at lowest byte,
// see https://msdn.microsoft.com/en-us/library/windows/desktop/dd407253
#if defined(OS_LINUX)
- origin_colorspace = libyuv::FOURCC_RAW;
+ fourcc_format = libyuv::FOURCC_RAW;
#elif defined(OS_WIN)
- origin_colorspace = libyuv::FOURCC_24BG;
+ fourcc_format = libyuv::FOURCC_24BG;
#else
NOTREACHED() << "RGB24 is only available in Linux and Windows platforms";
#endif
@@ -251,6 +252,11 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
// that vertical flipping is needed.
flip = true;
#endif
+ // We don't actually know, for sure, what the source color space is. It's
+ // probably safe to assume its sRGB, though, and so it would be valid to
+ // assume libyuv::ConvertToI420() is going to produce results in Rec601
+ // (or very close to it).
+ color_space = gfx::ColorSpace::CreateREC601();
break;
case PIXEL_FORMAT_RGB32:
// Fallback to PIXEL_FORMAT_ARGB setting |flip| in Windows
@@ -260,10 +266,11 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
FALLTHROUGH;
#endif
case PIXEL_FORMAT_ARGB:
- origin_colorspace = libyuv::FOURCC_ARGB;
+ fourcc_format = libyuv::FOURCC_ARGB;
+ color_space = gfx::ColorSpace::CreateREC601();
break;
case PIXEL_FORMAT_MJPEG:
- origin_colorspace = libyuv::FOURCC_MJPG;
+ fourcc_format = libyuv::FOURCC_MJPG;
break;
default:
NOTREACHED();
@@ -289,12 +296,13 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
}
}
+ // libyuv::ConvertToI420 use Rec601 to convert RGB to YUV.
if (libyuv::ConvertToI420(
data, length, y_plane_data, yplane_stride, u_plane_data,
uv_plane_stride, v_plane_data, uv_plane_stride, crop_x, crop_y,
format.frame_size.width(),
(flip ? -1 : 1) * format.frame_size.height(), new_unrotated_width,
- new_unrotated_height, rotation_mode, origin_colorspace) != 0) {
+ new_unrotated_height, rotation_mode, fourcc_format) != 0) {
DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from "
<< VideoPixelFormatToString(format.pixel_format);
receiver_->OnFrameDropped(
@@ -304,8 +312,9 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
const VideoCaptureFormat output_format =
VideoCaptureFormat(dimensions, format.frame_rate, PIXEL_FORMAT_I420);
- OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time,
- timestamp);
+ OnIncomingCapturedBufferExt(std::move(buffer), output_format, color_space,
+ reference_time, timestamp, gfx::Rect(dimensions),
+ VideoFrameMetadata());
}
void VideoCaptureDeviceClient::OnIncomingCapturedGfxBuffer(
@@ -443,14 +452,15 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBuffer(
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
DFAKE_SCOPED_RECURSIVE_LOCK(call_from_producer_);
- OnIncomingCapturedBufferExt(std::move(buffer), format, reference_time,
- timestamp, gfx::Rect(format.frame_size),
- VideoFrameMetadata());
+ OnIncomingCapturedBufferExt(
+ std::move(buffer), format, gfx::ColorSpace(), reference_time, timestamp,
+ gfx::Rect(format.frame_size), VideoFrameMetadata());
}
void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
Buffer buffer,
const VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,
@@ -465,6 +475,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
mojom::VideoFrameInfoPtr info = mojom::VideoFrameInfo::New();
info->timestamp = timestamp;
info->pixel_format = format.pixel_format;
+ info->color_space = color_space;
info->coded_size = format.frame_size;
info->visible_rect = visible_rect;
info->metadata = metadata.GetInternalValues().Clone();
diff --git a/chromium/media/capture/video/video_capture_device_client.h b/chromium/media/capture/video/video_capture_device_client.h
index f93e44b85c2..dfea6ac3e0a 100644
--- a/chromium/media/capture/video/video_capture_device_client.h
+++ b/chromium/media/capture/video/video_capture_device_client.h
@@ -80,6 +80,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
void OnIncomingCapturedBufferExt(
Buffer buffer,
const VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,
diff --git a/chromium/media/capture/video/video_capture_device_descriptor.h b/chromium/media/capture/video/video_capture_device_descriptor.h
index c53274df3bf..6d690a4e192 100644
--- a/chromium/media/capture/video/video_capture_device_descriptor.h
+++ b/chromium/media/capture/video/video_capture_device_descriptor.h
@@ -8,7 +8,6 @@
#include <string>
#include <vector>
-#include "base/optional.h"
#include "media/base/video_facing.h"
#include "media/capture/capture_export.h"
@@ -94,21 +93,6 @@ struct CAPTURE_EXPORT VideoCaptureDeviceDescriptor {
VideoCaptureApi capture_api;
VideoCaptureTransportType transport_type;
- // Contains camera calibration parameters.
- // These parameters apply to both RGB and depth video devices. See also
- // https://w3c.github.io/mediacapture-depth/#mediatracksettings-dictionary
- // TODO(aleksandar.stojiljkovic): Add principal point and camera distortion
- // model and coefficients. See also https://crbug.com/616098
- struct CameraCalibration {
- double focal_length_x = 0.0;
- double focal_length_y = 0.0;
- // depth near and far are used only for depth cameras.
- double depth_near = 0.0;
- double depth_far = 0.0;
- };
-
- base::Optional<CameraCalibration> camera_calibration;
-
private:
std::string display_name_; // Name that is intended for display in the UI
};
diff --git a/chromium/media/capture/video/video_capture_device_factory.h b/chromium/media/capture/video/video_capture_device_factory.h
index 9ec274265c1..2f95f55c936 100644
--- a/chromium/media/capture/video/video_capture_device_factory.h
+++ b/chromium/media/capture/video/video_capture_device_factory.h
@@ -10,15 +10,12 @@
#include "base/threading/thread_checker.h"
#include "gpu/command_buffer/client/gpu_memory_buffer_manager.h"
#include "media/capture/video/video_capture_device.h"
-#include "media/mojo/interfaces/jpeg_decode_accelerator.mojom.h"
-#include "media/mojo/interfaces/jpeg_encode_accelerator.mojom.h"
+#include "media/mojo/interfaces/mjpeg_decode_accelerator.mojom.h"
namespace media {
-using MojoJpegDecodeAcceleratorFactoryCB =
- base::RepeatingCallback<void(media::mojom::JpegDecodeAcceleratorRequest)>;
-using MojoJpegEncodeAcceleratorFactoryCB =
- base::RepeatingCallback<void(media::mojom::JpegEncodeAcceleratorRequest)>;
+using MojoMjpegDecodeAcceleratorFactoryCB =
+ base::RepeatingCallback<void(media::mojom::MjpegDecodeAcceleratorRequest)>;
// VideoCaptureDeviceFactory is the base class for creation of video capture
// devices in the different platforms. VCDFs are created by MediaStreamManager
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index 31631646fc3..e5aceeaec31 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -47,8 +47,6 @@
#endif
#if defined(OS_CHROMEOS)
-#include "chromeos/dbus/dbus_thread_manager.h"
-#include "chromeos/dbus/fake_power_manager_client.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/local_gpu_memory_buffer_manager.h"
@@ -63,12 +61,9 @@
DISABLED_UsingRealWebcam_AllocateBadSize
// We will always get YUYV from the Mac AVFoundation implementations.
#define MAYBE_UsingRealWebcam_CaptureMjpeg DISABLED_UsingRealWebcam_CaptureMjpeg
-// TODO(crbug.com/923874).
-#define MAYBE_UsingRealWebcam_TakePhoto DISABLED_UsingRealWebcam_TakePhoto
- // TODO(crbug.com/923874).
-#define MAYBE_UsingRealWebcam_GetPhotoState DISABLED_UsingRealWebcam_GetPhotoState
- // TODO(crbug.com/923874).
-#define MAYBE_UsingRealWebcam_CaptureWithSize DISABLED_UsingRealWebcam_CaptureWithSize
+#define MAYBE_UsingRealWebcam_TakePhoto UsingRealWebcam_TakePhoto
+#define MAYBE_UsingRealWebcam_GetPhotoState UsingRealWebcam_GetPhotoState
+#define MAYBE_UsingRealWebcam_CaptureWithSize UsingRealWebcam_CaptureWithSize
#define MAYBE_UsingRealWebcam_CheckPhotoCallbackRelease \
UsingRealWebcam_CheckPhotoCallbackRelease
#elif defined(OS_WIN)
@@ -262,13 +257,12 @@ class VideoCaptureDeviceTest
#if defined(OS_CHROMEOS)
local_gpu_memory_buffer_manager_ =
std::make_unique<LocalGpuMemoryBufferManager>();
- dbus_setter_ = chromeos::DBusThreadManager::GetSetterForTesting();
VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
local_gpu_memory_buffer_manager_.get());
if (!CameraHalDispatcherImpl::GetInstance()->IsStarted()) {
CameraHalDispatcherImpl::GetInstance()->Start(
base::DoNothing::Repeatedly<
- media::mojom::JpegDecodeAcceleratorRequest>(),
+ media::mojom::MjpegDecodeAcceleratorRequest>(),
base::DoNothing::Repeatedly<
media::mojom::JpegEncodeAcceleratorRequest>());
}
@@ -279,7 +273,7 @@ class VideoCaptureDeviceTest
void SetUp() override {
#if defined(OS_CHROMEOS)
- chromeos::PowerManagerClient::Initialize();
+ chromeos::PowerManagerClient::InitializeFake();
#endif
#if defined(OS_ANDROID)
static_cast<VideoCaptureDeviceFactoryAndroid*>(
@@ -309,7 +303,7 @@ class VideoCaptureDeviceTest
ON_CALL(*result, OnError(_, _, _)).WillByDefault(Invoke(DumpError));
EXPECT_CALL(*result, ReserveOutputBuffer(_, _, _, _)).Times(0);
EXPECT_CALL(*result, DoOnIncomingCapturedBuffer(_, _, _, _)).Times(0);
- EXPECT_CALL(*result, DoOnIncomingCapturedBufferExt(_, _, _, _, _, _))
+ EXPECT_CALL(*result, DoOnIncomingCapturedBufferExt(_, _, _, _, _, _, _))
.Times(0);
ON_CALL(*result, OnIncomingCapturedData(_, _, _, _, _, _, _))
.WillByDefault(
@@ -472,7 +466,6 @@ class VideoCaptureDeviceTest
VideoCaptureFormat last_format_;
#if defined(OS_CHROMEOS)
std::unique_ptr<LocalGpuMemoryBufferManager> local_gpu_memory_buffer_manager_;
- std::unique_ptr<chromeos::DBusThreadManagerSetter> dbus_setter_;
#endif
std::unique_ptr<VideoCaptureDeviceFactory> video_capture_device_factory_;
};
diff --git a/chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc b/chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc
index efbfd88e3f1..fa59e2f3866 100644
--- a/chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc
+++ b/chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc
@@ -11,7 +11,7 @@
namespace media {
VideoCaptureJpegDecoderImpl::VideoCaptureJpegDecoderImpl(
- MojoJpegDecodeAcceleratorFactoryCB jpeg_decoder_factory,
+ MojoMjpegDecodeAcceleratorFactoryCB jpeg_decoder_factory,
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
DecodeDoneCB decode_done_cb,
base::RepeatingCallback<void(const std::string&)> send_log_message_cb)
@@ -21,7 +21,7 @@ VideoCaptureJpegDecoderImpl::VideoCaptureJpegDecoderImpl(
send_log_message_cb_(std::move(send_log_message_cb)),
has_received_decoded_frame_(false),
next_bitstream_buffer_id_(0),
- in_buffer_id_(media::JpegDecodeAccelerator::kInvalidBitstreamBufferId),
+ in_buffer_id_(media::MjpegDecodeAccelerator::kInvalidBitstreamBufferId),
decoder_status_(INIT_PENDING),
weak_ptr_factory_(this) {}
@@ -146,7 +146,7 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
// base::Unretained is safe because |decoder_| is deleted on
// |decoder_task_runner_|.
decoder_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&media::JpegDecodeAccelerator::Decode,
+ FROM_HERE, base::BindOnce(&media::MjpegDecodeAccelerator::Decode,
base::Unretained(decoder_.get()), in_buffer,
std::move(out_frame)));
}
@@ -170,7 +170,7 @@ void VideoCaptureJpegDecoderImpl::VideoFrameReady(int32_t bitstream_buffer_id) {
<< ", expected " << in_buffer_id_;
return;
}
- in_buffer_id_ = media::JpegDecodeAccelerator::kInvalidBitstreamBufferId;
+ in_buffer_id_ = media::MjpegDecodeAccelerator::kInvalidBitstreamBufferId;
std::move(decode_done_closure_).Run();
@@ -180,7 +180,7 @@ void VideoCaptureJpegDecoderImpl::VideoFrameReady(int32_t bitstream_buffer_id) {
void VideoCaptureJpegDecoderImpl::NotifyError(
int32_t bitstream_buffer_id,
- media::JpegDecodeAccelerator::Error error) {
+ media::MjpegDecodeAccelerator::Error error) {
DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
LOG(ERROR) << "Decode error, bitstream_buffer_id=" << bitstream_buffer_id
<< ", error=" << error;
@@ -194,11 +194,11 @@ void VideoCaptureJpegDecoderImpl::FinishInitialization() {
TRACE_EVENT0("gpu", "VideoCaptureJpegDecoderImpl::FinishInitialization");
DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
- media::mojom::JpegDecodeAcceleratorPtr remote_decoder;
+ media::mojom::MjpegDecodeAcceleratorPtr remote_decoder;
jpeg_decoder_factory_.Run(mojo::MakeRequest(&remote_decoder));
base::AutoLock lock(lock_);
- decoder_ = std::make_unique<media::MojoJpegDecodeAccelerator>(
+ decoder_ = std::make_unique<media::MojoMjpegDecodeAccelerator>(
decoder_task_runner_, remote_decoder.PassInterface());
decoder_->InitializeAsync(
diff --git a/chromium/media/capture/video/video_capture_jpeg_decoder_impl.h b/chromium/media/capture/video/video_capture_jpeg_decoder_impl.h
index a8d099ac208..1b446f1f5bc 100644
--- a/chromium/media/capture/video/video_capture_jpeg_decoder_impl.h
+++ b/chromium/media/capture/video/video_capture_jpeg_decoder_impl.h
@@ -19,12 +19,12 @@
#include "media/capture/capture_export.h"
#include "media/capture/video/video_capture_device_factory.h"
#include "media/capture/video/video_capture_jpeg_decoder.h"
-#include "media/mojo/clients/mojo_jpeg_decode_accelerator.h"
+#include "media/mojo/clients/mojo_mjpeg_decode_accelerator.h"
namespace media {
// Implementation of media::VideoCaptureJpegDecoder that delegates to a
-// media::mojom::JpegDecodeAccelerator. When a frame is received in
+// media::mojom::MjpegDecodeAccelerator. When a frame is received in
// DecodeCapturedData(), it is copied to |in_shared_memory| for IPC transport
// to |decoder_|. When the decoder is finished with the frame, |decode_done_cb_|
// is invoked. Until |decode_done_cb_| is invoked, subsequent calls to
@@ -34,10 +34,10 @@ namespace media {
// media::VideoCaptureJpegDecoder methods may be called from any thread.
class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
: public VideoCaptureJpegDecoder,
- public JpegDecodeAccelerator::Client {
+ public MjpegDecodeAccelerator::Client {
public:
VideoCaptureJpegDecoderImpl(
- MojoJpegDecodeAcceleratorFactoryCB jpeg_decoder_factory,
+ MojoMjpegDecodeAcceleratorFactoryCB jpeg_decoder_factory,
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
DecodeDoneCB decode_done_cb,
base::RepeatingCallback<void(const std::string&)> send_log_message_cb);
@@ -54,11 +54,11 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
base::TimeDelta timestamp,
media::VideoCaptureDevice::Client::Buffer out_buffer) override;
- // JpegDecodeAccelerator::Client implementation.
+ // MjpegDecodeAccelerator::Client implementation.
// These will be called on |decoder_task_runner|.
void VideoFrameReady(int32_t buffer_id) override;
void NotifyError(int32_t buffer_id,
- media::JpegDecodeAccelerator::Error error) override;
+ media::MjpegDecodeAccelerator::Error error) override;
private:
void FinishInitialization();
@@ -72,11 +72,11 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
void DestroyDecoderOnIOThread(base::WaitableEvent* event);
- MojoJpegDecodeAcceleratorFactoryCB jpeg_decoder_factory_;
+ MojoMjpegDecodeAcceleratorFactoryCB jpeg_decoder_factory_;
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner_;
// The underlying JPEG decode accelerator.
- std::unique_ptr<media::JpegDecodeAccelerator> decoder_;
+ std::unique_ptr<media::MjpegDecodeAccelerator> decoder_;
// The callback to run when decode succeeds.
const DecodeDoneCB decode_done_cb_;
diff --git a/chromium/media/capture/video/video_frame_receiver.h b/chromium/media/capture/video/video_frame_receiver.h
index baffd359e3c..aaa69b78828 100644
--- a/chromium/media/capture/video/video_frame_receiver.h
+++ b/chromium/media/capture/video/video_frame_receiver.h
@@ -13,7 +13,12 @@
namespace media {
// Callback interface for VideoCaptureDeviceClient to communicate with its
-// clients.
+// clients. On some platforms, VideoCaptureDeviceClient calls these methods from
+// OS or capture driver provided threads which do not have a task runner and
+// cannot be posted back to. The mostly equivalent interface
+// video_capture::mojom::Receiver cannot be used by VideoCaptureDeviceClient
+// directly, because creating a video_catpure::mojom::ScopedAccessPermissionPtr
+// for passing into OnFrameReadyInBuffer() requires a thread with a task runner.
class CAPTURE_EXPORT VideoFrameReceiver {
public:
virtual ~VideoFrameReceiver() {}
@@ -46,9 +51,9 @@ class CAPTURE_EXPORT VideoFrameReceiver {
// while the receiver is still holding |buffer_read_permission| from a call to
// OnFrameReadInBuffer() for the same buffer. In that case, it means that the
// caller is asking the VideoFrameReceiver to release the read permission and
- // buffer handle at its earliest convenience.
- // After this call, a producer may immediately reuse the retired |buffer_id|
- // with a new buffer via a call to OnNewBufferHandle().
+ // buffer handle at its earliest convenience. After this call, a producer may
+ // immediately reuse the retired |buffer_id| with a new buffer via a call to
+ // OnNewBuffer().
virtual void OnBufferRetired(int buffer_id) = 0;
virtual void OnError(VideoCaptureError error) = 0;
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index 21f1b71c446..673eeb541c5 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -87,7 +87,10 @@ const char* const kModelIdsBlacklistedForMediaFoundation[] = {
// Sensoray 2253
"1943:2253",
// Dell E5440
- "0c45:64d0", "0c45:64d2"};
+ "0c45:64d0", "0c45:64d2",
+ // Lenovo Thinkpad Model 20CG0006FMZ front and rear cameras, see
+ // also https://crbug.com/924528
+ "04ca:7047", "04ca:7048"};
const std::pair<VideoCaptureApi, std::vector<std::pair<GUID, GUID>>>
kMfAttributes[] = {{VideoCaptureApi::WIN_MEDIA_FOUNDATION,
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
index 8ad8f3fa0d1..f8c252f2a9e 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
@@ -53,6 +53,7 @@ class MockClient : public VideoCaptureDevice::Client {
void OnIncomingCapturedBufferExt(
Buffer buffer,
const VideoCaptureFormat& format,
+ const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
gfx::Rect visible_rect,